wrangler
Advanced tools
Comparing version 0.0.0-f0a2b68 to 0.0.0-f19dde1
{ | ||
"name": "wrangler", | ||
"version": "0.0.0-f0a2b68", | ||
"version": "0.0.0-f19dde1", | ||
"author": "wrangler@cloudflare.com", | ||
@@ -40,5 +40,6 @@ "description": "Command-line interface for all things Cloudflare Workers", | ||
"esbuild": "0.14.1", | ||
"miniflare": "2.0.0", | ||
"miniflare": "2.2.0", | ||
"path-to-regexp": "^6.2.0", | ||
"semiver": "^1.1.0" | ||
"semiver": "^1.1.0", | ||
"xxhash-addon": "^1.4.0" | ||
}, | ||
@@ -69,2 +70,3 @@ "optionalDependencies": { | ||
"formdata-node": "^4.3.1", | ||
"ignore": "^5.2.0", | ||
"ink": "^3.2.0", | ||
@@ -102,3 +104,4 @@ "ink-select-input": "^4.2.1", | ||
"start": "npm run bundle && NODE_OPTIONS=--enable-source-maps ./bin/wrangler.js", | ||
"test": "CF_API_TOKEN=some-api-token CF_ACCOUNT_ID=some-account-id jest --silent=false --verbose=true" | ||
"test": "CF_API_TOKEN=some-api-token CF_ACCOUNT_ID=some-account-id jest --silent=false --verbose=true", | ||
"test-watch": "npm run test -- --runInBand --testTimeout=50000 --watch" | ||
}, | ||
@@ -105,0 +108,0 @@ "engines": { |
@@ -55,3 +55,3 @@ import path from "path"; | ||
// `export async function onRequest() {...}` | ||
if (declaration.type === "FunctionDeclaration") { | ||
if (declaration.type === "FunctionDeclaration" && declaration.id) { | ||
exportNames.push(declaration.id.name); | ||
@@ -159,8 +159,8 @@ } | ||
export function compareRoutes(a: string, b: string) { | ||
function parseRoutePath(routePath: string) { | ||
let [method, segmentedPath] = routePath.split(" "); | ||
if (!segmentedPath) { | ||
segmentedPath = method; | ||
method = null; | ||
} | ||
function parseRoutePath(routePath: string): [string | null, string[]] { | ||
const parts = routePath.split(" ", 2); | ||
// split() will guarantee at least one element. | ||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||
const segmentedPath = parts.pop()!; | ||
const method = parts.pop() ?? null; | ||
@@ -209,3 +209,3 @@ const segments = segmentedPath.slice(1).split("/").filter(Boolean); | ||
while (searchPaths.length) { | ||
while (isNotEmpty(searchPaths)) { | ||
const cwd = searchPaths.shift(); | ||
@@ -225,1 +225,8 @@ const dir = await fs.readdir(cwd, { withFileTypes: true }); | ||
} | ||
interface NonEmptyArray<T> extends Array<T> { | ||
shift(): T; | ||
} | ||
function isNotEmpty<T>(array: T[]): array is NonEmptyArray<T> { | ||
return array.length > 0; | ||
} |
@@ -117,3 +117,3 @@ import path from "path"; | ||
for (const [route, props] of Object.entries(config.routes)) { | ||
for (const [route, props] of Object.entries(config.routes ?? {})) { | ||
let [_methods, routePath] = route.split(" "); | ||
@@ -120,0 +120,0 @@ if (!routePath) { |
@@ -36,5 +36,5 @@ import { match } from "path-to-regexp"; | ||
// expect an ASSETS fetcher binding pointing to the asset-server stage | ||
type Env = { | ||
[name: string]: unknown; | ||
ASSETS: { fetch(url: string, init: RequestInit): Promise<Response> }; | ||
type FetchEnv = { | ||
[name: string]: { fetch: typeof fetch }; | ||
ASSETS: { fetch: typeof fetch }; | ||
}; | ||
@@ -46,4 +46,3 @@ | ||
// eslint-disable-next-line @typescript-eslint/no-unused-vars -- `env` can be used by __FALLBACK_SERVICE_FETCH__ | ||
function* executeRequest(request: Request, env: Env) { | ||
function* executeRequest(request: Request, _env: FetchEnv) { | ||
const requestPath = new URL(request.url).pathname; | ||
@@ -93,16 +92,6 @@ | ||
} | ||
// Finally, yield to the fallback service (`env.ASSETS.fetch` in Pages' case) | ||
return { | ||
handler: () => | ||
__FALLBACK_SERVICE__ | ||
? // @ts-expect-error expecting __FALLBACK_SERVICE__ to be the name of a service binding, so fetch should be defined | ||
env[__FALLBACK_SERVICE__].fetch(request) | ||
: fetch(request), | ||
params: {} as Params, | ||
}; | ||
} | ||
export default { | ||
async fetch(request: Request, env: Env, workerContext: WorkerContext) { | ||
async fetch(request: Request, env: FetchEnv, workerContext: WorkerContext) { | ||
const handlerIterator = executeRequest(request, env); | ||
@@ -115,10 +104,7 @@ const data = {}; // arbitrary data the user can set between functions | ||
const { value } = handlerIterator.next(); | ||
if (value) { | ||
const { handler, params } = value; | ||
const context: EventContext< | ||
unknown, | ||
string, | ||
Record<string, unknown> | ||
> = { | ||
const result = handlerIterator.next(); | ||
// Note we can't use `!result.done` because this doesn't narrow to the correct type | ||
if (result.done == false) { | ||
const { handler, params } = result.value; | ||
const context = { | ||
request: new Request(request.clone()), | ||
@@ -139,2 +125,8 @@ next, | ||
); | ||
} else if (__FALLBACK_SERVICE__) { | ||
// There are no more handlers so finish with the fallback service (`env.ASSETS.fetch` in Pages' case) | ||
return env[__FALLBACK_SERVICE__].fetch(request); | ||
} else { | ||
// There was not fallback service so actually make the request to the origin. | ||
return fetch(request); | ||
} | ||
@@ -141,0 +133,0 @@ }; |
import { writeFileSync } from "fs"; | ||
import type { KVNamespaceInfo } from "../kv"; | ||
import { | ||
@@ -199,7 +200,7 @@ setMockResponse, | ||
it("should list namespaces", async () => { | ||
const KVNamespaces = [ | ||
const kvNamespaces: KVNamespaceInfo[] = [ | ||
{ title: "title-1", id: "id-1" }, | ||
{ title: "title-2", id: "id-2" }, | ||
]; | ||
mockListRequest(KVNamespaces); | ||
mockListRequest(kvNamespaces); | ||
const { error, stdout, stderr } = await runWrangler( | ||
@@ -211,3 +212,3 @@ "kv:namespace list" | ||
const namespaces = JSON.parse(stdout); | ||
expect(namespaces).toEqual(KVNamespaces); | ||
expect(namespaces).toEqual(kvNamespaces); | ||
}); | ||
@@ -217,10 +218,10 @@ | ||
// Create a lot of mock namespaces, so that the fetch requests will be paginated | ||
const KVNamespaces = []; | ||
const kvNamespaces: KVNamespaceInfo[] = []; | ||
for (let i = 0; i < 550; i++) { | ||
KVNamespaces.push({ title: "title-" + i, id: "id-" + i }); | ||
kvNamespaces.push({ title: "title-" + i, id: "id-" + i }); | ||
} | ||
const requests = mockListRequest(KVNamespaces); | ||
const requests = mockListRequest(kvNamespaces); | ||
const { stdout } = await runWrangler("kv:namespace list"); | ||
const namespaces = JSON.parse(stdout); | ||
expect(namespaces).toEqual(KVNamespaces); | ||
expect(namespaces).toEqual(kvNamespaces); | ||
expect(requests.count).toEqual(6); | ||
@@ -341,4 +342,12 @@ }); | ||
expect(body).toEqual(expectedValue); | ||
expect(query.get("expiration")).toEqual(`${expiration}`); | ||
expect(query.get("expiration_ttl")).toEqual(`${expirationTtl}`); | ||
if (expiration !== undefined) { | ||
expect(query.get("expiration")).toEqual(`${expiration}`); | ||
} else { | ||
expect(query.has("expiration")).toBe(false); | ||
} | ||
if (expirationTtl) { | ||
expect(query.get("expiration_ttl")).toEqual(`${expirationTtl}`); | ||
} else { | ||
expect(query.has("expiration_ttl")).toBe(false); | ||
} | ||
return null; | ||
@@ -643,5 +652,8 @@ } | ||
expect(stdout).toMatchInlineSnapshot(`""`); | ||
expect(stderr).toMatchInlineSnapshot( | ||
`"A namespace with binding name \\"otherBinding\\" was not found in the configured \\"kv_namespaces\\"."` | ||
); | ||
expect(stderr).toMatchInlineSnapshot(` | ||
"A namespace with binding name \\"otherBinding\\" was not found in the configured \\"kv_namespaces\\". | ||
[32m%s[0m | ||
If you think this is a bug then please create an issue at https://github.com/cloudflare/wrangler2/issues/new." | ||
`); | ||
expect(error).toMatchInlineSnapshot( | ||
@@ -663,5 +675,8 @@ `[Error: A namespace with binding name "otherBinding" was not found in the configured "kv_namespaces".]` | ||
expect(stdout).toMatchInlineSnapshot(`""`); | ||
expect(stderr).toMatchInlineSnapshot( | ||
`"someBinding has both a namespace ID and a preview ID. Specify \\"--preview\\" or \\"--preview false\\" to avoid writing data to the wrong namespace."` | ||
); | ||
expect(stderr).toMatchInlineSnapshot(` | ||
"someBinding has both a namespace ID and a preview ID. Specify \\"--preview\\" or \\"--preview false\\" to avoid writing data to the wrong namespace. | ||
[32m%s[0m | ||
If you think this is a bug then please create an issue at https://github.com/cloudflare/wrangler2/issues/new." | ||
`); | ||
expect(error).toMatchInlineSnapshot( | ||
@@ -675,33 +690,2 @@ `[Error: someBinding has both a namespace ID and a preview ID. Specify "--preview" or "--preview false" to avoid writing data to the wrong namespace.]` | ||
describe("list", () => { | ||
function mockKeyListRequest( | ||
expectedNamespaceId: string, | ||
expectedKeys: string[], | ||
keysPerRequest = 1000 | ||
) { | ||
const requests = { count: 0 }; | ||
setMockRawResponse( | ||
"/accounts/:accountId/storage/kv/namespaces/:namespaceId/keys", | ||
([_url, accountId, namespaceId], _init, query) => { | ||
requests.count++; | ||
expect(accountId).toEqual("some-account-id"); | ||
expect(namespaceId).toEqual(expectedNamespaceId); | ||
if (expectedKeys.length <= keysPerRequest) { | ||
return createFetchResult(expectedKeys); | ||
} else { | ||
const start = parseInt(query.get("cursor")) || 0; | ||
const end = start + keysPerRequest; | ||
const cursor = end < expectedKeys.length ? end : undefined; | ||
return createFetchResult( | ||
expectedKeys.slice(start, end), | ||
true, | ||
[], | ||
[], | ||
{ cursor } | ||
); | ||
} | ||
} | ||
); | ||
return requests; | ||
} | ||
it("should list the keys of a namespace specified by namespace-id", async () => { | ||
@@ -716,5 +700,19 @@ const keys = ["key-1", "key-2", "key-3"]; | ||
expect(stdout).toMatchInlineSnapshot(` | ||
"key-1 | ||
key-2 | ||
key-3" | ||
"[ | ||
{ | ||
\\"name\\": \\"key-1\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-2\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-3\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
} | ||
]" | ||
`); | ||
@@ -733,5 +731,19 @@ }); | ||
expect(stdout).toMatchInlineSnapshot(` | ||
"key-1 | ||
key-2 | ||
key-3" | ||
"[ | ||
{ | ||
\\"name\\": \\"key-1\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-2\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-3\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
} | ||
]" | ||
`); | ||
@@ -750,5 +762,19 @@ }); | ||
expect(stdout).toMatchInlineSnapshot(` | ||
"key-1 | ||
key-2 | ||
key-3" | ||
"[ | ||
{ | ||
\\"name\\": \\"key-1\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-2\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-3\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
} | ||
]" | ||
`); | ||
@@ -767,5 +793,19 @@ }); | ||
expect(stdout).toMatchInlineSnapshot(` | ||
"key-1 | ||
key-2 | ||
key-3" | ||
"[ | ||
{ | ||
\\"name\\": \\"key-1\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-2\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-3\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
} | ||
]" | ||
`); | ||
@@ -784,5 +824,19 @@ }); | ||
expect(stdout).toMatchInlineSnapshot(` | ||
"key-1 | ||
key-2 | ||
key-3" | ||
"[ | ||
{ | ||
\\"name\\": \\"key-1\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-2\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
}, | ||
{ | ||
\\"name\\": \\"key-3\\", | ||
\\"expiration\\": 123456789, | ||
\\"metadata\\": {} | ||
} | ||
]" | ||
`); | ||
@@ -793,3 +847,3 @@ }); | ||
// Create a lot of mock keys, so that the fetch requests will be paginated | ||
const keys = []; | ||
const keys: string[] = []; | ||
for (let i = 0; i < 550; i++) { | ||
@@ -805,3 +859,3 @@ keys.push("key-" + i); | ||
expect(stderr).toMatchInlineSnapshot(`""`); | ||
expect(stdout).toEqual(keys.join("\n")); | ||
expect(JSON.parse(stdout).map((k) => k.name)).toEqual(keys); | ||
expect(requests.count).toEqual(6); | ||
@@ -818,5 +872,8 @@ }); | ||
); | ||
expect(stderr).toMatchInlineSnapshot( | ||
`"A namespace with binding name \\"otherBinding\\" was not found in the configured \\"kv_namespaces\\"."` | ||
); | ||
expect(stderr).toMatchInlineSnapshot(` | ||
"A namespace with binding name \\"otherBinding\\" was not found in the configured \\"kv_namespaces\\". | ||
[32m%s[0m | ||
If you think this is a bug then please create an issue at https://github.com/cloudflare/wrangler2/issues/new." | ||
`); | ||
expect(stdout).toMatchInlineSnapshot(`""`); | ||
@@ -1006,5 +1063,8 @@ }); | ||
expect(stdout).toMatchInlineSnapshot(`""`); | ||
expect(stderr).toMatchInlineSnapshot( | ||
`"A namespace with binding name \\"otherBinding\\" was not found in the configured \\"kv_namespaces\\"."` | ||
); | ||
expect(stderr).toMatchInlineSnapshot(` | ||
"A namespace with binding name \\"otherBinding\\" was not found in the configured \\"kv_namespaces\\". | ||
[32m%s[0m | ||
If you think this is a bug then please create an issue at https://github.com/cloudflare/wrangler2/issues/new." | ||
`); | ||
expect(error).toMatchInlineSnapshot( | ||
@@ -1067,5 +1127,8 @@ `[Error: A namespace with binding name "otherBinding" was not found in the configured "kv_namespaces".]` | ||
); | ||
expect(stderr).toMatchInlineSnapshot( | ||
`"A namespace with binding name \\"otherBinding\\" was not found in the configured \\"kv_namespaces\\"."` | ||
); | ||
expect(stderr).toMatchInlineSnapshot(` | ||
"A namespace with binding name \\"otherBinding\\" was not found in the configured \\"kv_namespaces\\". | ||
[32m%s[0m | ||
If you think this is a bug then please create an issue at https://github.com/cloudflare/wrangler2/issues/new." | ||
`); | ||
}); | ||
@@ -1116,1 +1179,39 @@ | ||
} | ||
export function mockKeyListRequest( | ||
expectedNamespaceId: string, | ||
expectedKeys: string[], | ||
keysPerRequest = 1000 | ||
) { | ||
const requests = { count: 0 }; | ||
// See https://api.cloudflare.com/#workers-kv-namespace-list-a-namespace-s-keys | ||
const expectedKeyObjects = expectedKeys.map((name) => ({ | ||
name, | ||
expiration: 123456789, | ||
metadata: {}, | ||
})); | ||
setMockRawResponse( | ||
"/accounts/:accountId/storage/kv/namespaces/:namespaceId/keys", | ||
"GET", | ||
([_url, accountId, namespaceId], _init, query) => { | ||
requests.count++; | ||
expect(accountId).toEqual("some-account-id"); | ||
expect(namespaceId).toEqual(expectedNamespaceId); | ||
if (expectedKeyObjects.length <= keysPerRequest) { | ||
return createFetchResult(expectedKeyObjects); | ||
} else { | ||
const start = parseInt(query.get("cursor") ?? "0") || 0; | ||
const end = start + keysPerRequest; | ||
const cursor = end < expectedKeyObjects.length ? end : undefined; | ||
return createFetchResult( | ||
expectedKeyObjects.slice(start, end), | ||
true, | ||
[], | ||
[], | ||
{ cursor } | ||
); | ||
} | ||
} | ||
); | ||
return requests; | ||
} |
import type { RequestInit } from "node-fetch"; | ||
import type { URLSearchParams } from "node:url"; | ||
import { URLSearchParams } from "node:url"; | ||
import { pathToRegexp } from "path-to-regexp"; | ||
@@ -12,4 +12,4 @@ import { CF_API_BASE_URL } from "../cfetch"; | ||
uri: RegExpExecArray, | ||
init?: RequestInit, | ||
queryParams?: URLSearchParams | ||
init: RequestInit, | ||
queryParams: URLSearchParams | ||
) => ResponseType; | ||
@@ -36,3 +36,3 @@ | ||
init: RequestInit = {}, | ||
queryParams?: URLSearchParams | ||
queryParams: URLSearchParams = new URLSearchParams() | ||
) { | ||
@@ -43,3 +43,3 @@ for (const { regexp, method, handler } of mocks) { | ||
// Do the resource path and (if specified) the HTTP method match? | ||
if (uri !== null && (!method || method === init.method)) { | ||
if (uri !== null && (!method || method === (init.method ?? "GET"))) { | ||
// The `resource` regular expression will extract the labelled groups from the URL. | ||
@@ -50,3 +50,5 @@ // These are passed through to the `handler` call, to allow it to do additional checks or behaviour. | ||
} | ||
throw new Error(`no mocks found for ${init.method}: ${resource}`); | ||
throw new Error( | ||
`no mocks found for ${init.method ?? "any HTTP"} request to ${resource}` | ||
); | ||
} | ||
@@ -53,0 +55,0 @@ |
@@ -26,3 +26,3 @@ import type { | ||
function toModule(module: CfModule, entryType?: CfModuleType): Blob { | ||
function toModule(module: CfModule, entryType: CfModuleType): Blob { | ||
const { type: moduleType, content } = module; | ||
@@ -29,0 +29,0 @@ const type = toMimeType(moduleType ?? entryType); |
@@ -115,3 +115,3 @@ import type { CfPreviewToken } from "./preview"; | ||
*/ | ||
name: string | void; | ||
name: string | undefined; | ||
/** | ||
@@ -124,3 +124,3 @@ * The entrypoint module. | ||
*/ | ||
modules: void | CfModule[]; | ||
modules: undefined | CfModule[]; | ||
/** | ||
@@ -135,6 +135,6 @@ * All the bindings | ||
}; | ||
migrations: void | CfDurableObjectMigrations; | ||
compatibility_date: string | void; | ||
compatibility_flags: void | string[]; | ||
usage_model: void | "bundled" | "unbound"; | ||
migrations: undefined | CfDurableObjectMigrations; | ||
compatibility_date: string | undefined; | ||
compatibility_flags: undefined | string[]; | ||
usage_model: undefined | "bundled" | "unbound"; | ||
} | ||
@@ -141,0 +141,0 @@ |
@@ -43,2 +43,6 @@ import fetch from "node-fetch"; | ||
function cloneHeaders(headers: HeadersInit | undefined): HeadersInit { | ||
return { ...headers }; | ||
} | ||
async function requireLoggedIn(): Promise<void> { | ||
@@ -59,6 +63,2 @@ const loggedIn = await loginOrRefreshIfRequired(); | ||
function cloneHeaders(headers: HeadersInit): HeadersInit { | ||
return { ...headers }; | ||
} | ||
function addAuthorizationHeader(headers: HeadersInit, apiToken: string): void { | ||
@@ -65,0 +65,0 @@ if (headers["Authorization"]) { |
@@ -1,131 +0,646 @@ | ||
// we're going to manually write both the type definition AND | ||
// the validator for the config, so that we can give better error messages | ||
import assert from "node:assert"; | ||
type DurableObjectMigration = { | ||
tag: string; | ||
new_classes?: string[]; | ||
renamed_classes?: string[]; | ||
deleted_classes?: string[]; | ||
}; | ||
/** | ||
* This is the static type definition for the configuration object. | ||
* It reflects the configuration that you can write in wrangler.toml, | ||
* and optionally augment with arguments passed directly to wrangler. | ||
* The type definition doesn't fully reflect the constraints applied | ||
* to the configuration, but it is a good starting point. Later, we | ||
* also defined a validator function that will validate the configuration | ||
* with the same rules as the type definition, as well as the extra | ||
* constraints. The type definition is good for asserting correctness | ||
* in the wrangler codebase, whereas the validator function is useful | ||
* for signalling errors in the configuration to a user of wrangler. | ||
* | ||
* For more information about the configuration object, see the | ||
* documentation at https://developers.cloudflare.com/workers/cli-wrangler/configuration | ||
* | ||
* Legend for the annotations: | ||
* | ||
* *:optional means providing a value isn't mandatory | ||
* *:deprecated means the field itself isn't necessary anymore in wrangler.toml | ||
* *:breaking means the deprecation/optionality is a breaking change from wrangler 1 | ||
* *:todo means there's more work to be done (with details attached) | ||
* *:inherited means the field is copied to all environments | ||
*/ | ||
export type Config = { | ||
/** | ||
* The name of your worker. Alphanumeric + dashes only. | ||
* | ||
* @optional | ||
* @inherited | ||
*/ | ||
name?: string; | ||
type Project = "webpack" | "javascript" | "rust"; | ||
/** | ||
* The entrypoint/path to the JavaScript file that will be executed. | ||
* | ||
* @optional | ||
* @inherited | ||
* @todo this needs to be implemented! | ||
*/ | ||
entry?: string; | ||
type Site = { | ||
// inherited | ||
bucket: string; | ||
"entry-point": string; | ||
include?: string[]; | ||
exclude?: string[]; | ||
}; | ||
/** | ||
* This is the ID of the account associated with your zone. | ||
* You might have more than one account, so make sure to use | ||
* the ID of the account associated with the zone/route you | ||
* provide, if you provide one. It can also be specified through | ||
* the CF_ACCOUNT_ID environment variable. | ||
* | ||
* @optional | ||
* @inherited | ||
*/ | ||
account_id?: string; | ||
type Dev = { | ||
ip?: string; | ||
port?: number; | ||
local_protocol?: string; | ||
upstream_protocol?: string; | ||
}; | ||
/** | ||
* The project "type". A holdover from wrangler 1.x. | ||
* Valid values were "webpack", "javascript", and "rust". | ||
* | ||
* @deprecated DO NOT USE THIS. Most common features now work out of the box with wrangler, including modules, jsx, typescript, etc. If you need anything more, use a custom build. | ||
* @optional | ||
* @inherited | ||
* @breaking | ||
*/ | ||
type?: "webpack" | "javascript" | "rust"; | ||
export type Vars = { [key: string]: string }; | ||
/** | ||
* A date in the form yyyy-mm-dd, which will be used to determine | ||
* which version of the Workers runtime is used. More details at | ||
* https://developers.cloudflare.com/workers/platform/compatibility-dates | ||
* @optional true for `dev`, false for `publish` | ||
* @inherited | ||
*/ | ||
compatibility_date?: string; | ||
type Cron = string; // TODO: we should be able to parse a cron pattern with ts | ||
/** | ||
* A list of flags that enable features from upcoming features of | ||
* the Workers runtime, usually used together with compatibility_flags. | ||
* More details at | ||
* https://developers.cloudflare.com/workers/platform/compatibility-dates | ||
* | ||
* @optional | ||
* @inherited | ||
* @todo This could be an enum! | ||
*/ | ||
compatibility_flags?: string[]; | ||
type KVNamespace = { | ||
binding: string; | ||
preview_id?: string; | ||
id: string; | ||
}; | ||
/** | ||
* Whether we use <name>.<subdomain>.workers.dev to | ||
* test and deploy your worker. | ||
* | ||
* @default `true` (This is a breaking change from wrangler 1) | ||
* @optional | ||
* @inherited | ||
* @breaking | ||
*/ | ||
workers_dev?: boolean; | ||
type DurableObject = { | ||
name: string; | ||
class_name: string; | ||
script_name?: string; | ||
}; | ||
/** | ||
* The zone ID of the zone you want to deploy to. You can find this | ||
* in your domain page on the dashboard. | ||
* | ||
* @deprecated This is unnecessary since we can deduce this from routes directly. | ||
* @optional | ||
* @inherited | ||
*/ | ||
zone_id?: string; | ||
type Service = { | ||
name: string; | ||
service: string; | ||
environment: string; | ||
/** | ||
* A list of routes that your worker should be deployed to. | ||
* Only one of `routes` or `route` is required. | ||
* | ||
* @optional false only when workers_dev is false, and there's no scheduled worker | ||
* @inherited | ||
*/ | ||
routes?: string[]; | ||
/** | ||
* A route that your worker should be deployed to. Literally | ||
* the same as routes, but only one. | ||
* Only one of `routes` or `route` is required. | ||
* | ||
* @optional false only when workers_dev is false, and there's no scheduled worker | ||
* @inherited | ||
*/ | ||
route?: string; | ||
/** | ||
* Path to the webpack config to use when building your worker. | ||
* A holdover from wrangler 1.x, used with `type: "webpack"`. | ||
* | ||
* @deprecated DO NOT USE THIS. Most common features now work out of the box with wrangler, including modules, jsx, typescript, etc. If you need anything more, use a custom build. | ||
* @inherited | ||
* @breaking | ||
*/ | ||
webpack_config?: string; | ||
/** | ||
* The function to use to replace jsx syntax. | ||
* | ||
* @default `"React.createElement"` | ||
* @optional | ||
* @inherited | ||
*/ | ||
jsx_factory?: string; | ||
/** | ||
* The function to use to replace jsx fragment syntax. | ||
* | ||
* @default `"React.Fragment"` | ||
* @optional | ||
* @inherited | ||
*/ | ||
jsx_fragment?: string; | ||
/** | ||
* A map of environment variables to set when deploying your worker. | ||
* Of note, they can only be strings. Which is unfortunate, really. | ||
* (TODO: verify that they can only be strings?) | ||
* NB: these are not inherited, and HAVE to be duplicated across all environments. | ||
* | ||
* @default `{}` | ||
* @optional | ||
* @inherited false | ||
*/ | ||
vars?: { [key: string]: string }; | ||
/** | ||
* A list of durable objects that your worker should be bound to. | ||
* For more information about Durable Objects, see the documentation at | ||
* https://developers.cloudflare.com/workers/learning/using-durable-objects | ||
* NB: these are not inherited, and HAVE to be duplicated across all environments. | ||
* | ||
* @default `{ bindings: [] }` | ||
* @optional | ||
* @inherited false | ||
*/ | ||
durable_objects?: { | ||
bindings: { | ||
/** The name of the binding used to refer to the Durable Object */ | ||
name: string; | ||
/** The exported class name of the Durable Object */ | ||
class_name: string; | ||
/** The script where the Durable Object is defined (if it's external to this worker) */ | ||
script_name?: string; | ||
}[]; | ||
}; | ||
/** | ||
* These specify any Workers KV Namespaces you want to | ||
* access from inside your Worker. To learn more about KV Namespaces, | ||
* see the documentation at https://developers.cloudflare.com/workers/learning/how-kv-works | ||
* NB: these are not inherited, and HAVE to be duplicated across all environments. | ||
* | ||
* @default `[]` | ||
* @optional | ||
* @inherited false | ||
*/ | ||
kv_namespaces?: { | ||
/** The binding name used to refer to the KV Namespace */ | ||
binding: string; | ||
/** The ID of the KV namespace */ | ||
id: string; | ||
/** The ID of the KV namespace used during `wrangler dev` */ | ||
preview_id?: string; | ||
}[]; | ||
/** | ||
* A list of services that your worker should be bound to. | ||
* NB: these are not inherited, and HAVE to be duplicated across all environments. | ||
* | ||
* @default `[]` | ||
* @optional | ||
* @inherited false | ||
*/ | ||
experimental_services?: { | ||
/** The binding name used to refer to the Service */ | ||
name: string; | ||
/** The name of the Service being bound */ | ||
service: string; | ||
/** The Service's environment */ | ||
environment: string; | ||
}[]; | ||
/** | ||
* A list of migrations that should be uploaded with your Worker. | ||
* These define changes in your Durable Object declarations. | ||
* More details at https://developers.cloudflare.com/workers/learning/using-durable-objects#configuring-durable-object-classes-with-migrations | ||
* NB: these ARE inherited, and SHOULD NOT be duplicated across all environments. | ||
* | ||
* @default `[]` | ||
* @optional | ||
* @inherited true | ||
*/ | ||
migrations?: { | ||
/** A unique identifier for this migration. */ | ||
tag: string; | ||
/** The new Durable Objects being defined. */ | ||
new_classes?: string[]; | ||
/** The Durable Objects being renamed. */ | ||
renamed_classes?: { | ||
from: string; | ||
to: string; | ||
}[]; | ||
/** The Durable Objects being removed. */ | ||
deleted_classes?: string[]; | ||
}[]; | ||
/** | ||
* The definition of a Worker Site, a feature that lets you upload | ||
* static assets with your Worker. | ||
* More details at https://developers.cloudflare.com/workers/platform/sites | ||
* NB: This IS inherited, and SHOULD NOT be duplicated across all environments. | ||
* | ||
* @default `undefined` | ||
* @optional | ||
* @inherited true | ||
*/ | ||
site?: { | ||
/** | ||
* The directory containing your static assets. It must be | ||
* a path relative to your wrangler.toml file. | ||
* Example: bucket = "./public" | ||
* | ||
* optional false | ||
*/ | ||
bucket: string; | ||
/** | ||
* The location of your Worker script. | ||
* | ||
* @deprecated DO NOT use this (it's a holdover from wrangler 1.x). Either use the top level `entry` field, or pass the path to your entry file as a command line argument. | ||
* @todo we should use a top level "entry" property instead | ||
* @breaking | ||
*/ | ||
"entry-point": string; | ||
/** | ||
* An exclusive list of .gitignore-style patterns that match file | ||
* or directory names from your bucket location. Only matched | ||
* items will be uploaded. Example: include = ["upload_dir"] | ||
* | ||
* @optional | ||
* @default `[]` | ||
* @todo this needs to be implemented! | ||
*/ | ||
include?: string[]; | ||
/** | ||
* A list of .gitignore-style patterns that match files or | ||
* directories in your bucket that should be excluded from | ||
* uploads. Example: exclude = ["ignore_dir"] | ||
* | ||
* @optional | ||
* @default `[]` | ||
* @todo this needs to be implemented! | ||
*/ | ||
exclude?: string[]; | ||
}; | ||
/** | ||
* "Cron" definitions to trigger a worker's "scheduled" function. | ||
* Lets you call workers periodically, much like a cron job. | ||
* More details here https://developers.cloudflare.com/workers/platform/cron-triggers | ||
* | ||
* @inherited | ||
* @default `{ crons: [] }` | ||
* @optional | ||
* @todo can we use typescript for cron patterns? | ||
*/ | ||
triggers?: { crons: string[] }; | ||
/** | ||
* Options to configure the development server that your worker will use. | ||
* NB: This is NOT inherited, and SHOULD NOT be duplicated across all environments. | ||
* | ||
* @default `{}` | ||
* @optional | ||
* @inherited false | ||
*/ | ||
dev?: { | ||
/** | ||
* IP address for the local dev server to listen on, | ||
* | ||
* @default `127.0.0.1` | ||
* @todo this needs to be implemented | ||
*/ | ||
ip?: string; | ||
/** | ||
* Port for the local dev server to listen on | ||
* | ||
* @default `8787` | ||
*/ | ||
port?: number; | ||
/** | ||
* Protocol that local wrangler dev server listens to requests on. | ||
* | ||
* @default `http` | ||
* @todo this needs to be implemented | ||
*/ | ||
local_protocol?: string; | ||
/** | ||
* Protocol that wrangler dev forwards requests on | ||
* | ||
* @default `https` | ||
* @todo this needs to be implemented | ||
*/ | ||
upstream_protocol?: string; | ||
}; | ||
/** | ||
* Specifies the Usage Model for your Worker. There are two options - | ||
* [bundled](https://developers.cloudflare.com/workers/platform/limits#bundled-usage-model) and | ||
* [unbound](https://developers.cloudflare.com/workers/platform/limits#unbound-usage-model). | ||
* For newly created Workers, if the Usage Model is omitted | ||
* it will be set to the [default Usage Model set on the account](https://dash.cloudflare.com/?account=workers/default-usage-model). | ||
* For existing Workers, if the Usage Model is omitted, it will be | ||
* set to the Usage Model configured in the dashboard for that Worker. | ||
*/ | ||
usage_model?: undefined | "bundled" | "unbound"; | ||
/** | ||
* Configures a custom build step to be run by Wrangler when | ||
* building your Worker. Refer to the [custom builds documentation](https://developers.cloudflare.com/workers/cli-wrangler/configuration#build) | ||
* for more details. | ||
* | ||
* @default `undefined` | ||
* @optional | ||
* @inherited false | ||
*/ | ||
build?: { | ||
/** The command used to build your Worker. On Linux and macOS, the command is executed in the `sh` shell and the `cmd` shell for Windows. The `&&` and `||` shell operators may be used. */ | ||
command?: string; | ||
/** The directory in which the command is executed. */ | ||
cwd?: string; | ||
/** The directory to watch for changes while using wrangler dev, defaults to the current working directory */ | ||
watch_dir?: string; | ||
} & /** | ||
* Much of the rest of this configuration isn't necessary anymore | ||
* in wrangler2. We infer the format automatically, and we can pass | ||
* the path to the script either in the CLI (or, @todo, as the top level | ||
* `entry` property). | ||
*/ ( | ||
| { | ||
upload?: { | ||
/** | ||
* The format of the Worker script, must be "service-worker". | ||
* | ||
* @deprecated We infer the format automatically now. | ||
*/ | ||
format: "service-worker"; | ||
/** | ||
* The path to the Worker script. This should be replaced | ||
* by the top level `entry' property. | ||
* | ||
* @deprecated This will be replaced by the top level `entry' property. | ||
*/ | ||
main: string; | ||
}; | ||
} | ||
| { | ||
/** | ||
* When we use the module format, we only really | ||
* need to specify the entry point. The format is deduced | ||
* automatically in wrangler2. | ||
*/ | ||
upload?: { | ||
/** | ||
* The format of the Worker script, must be "modules". | ||
* | ||
* @deprecated We infer the format automatically now. | ||
*/ | ||
format: "modules"; | ||
/** | ||
* The directory you wish to upload your modules from, | ||
* defaults to the dist relative to the project root directory. | ||
* | ||
* @deprecated | ||
* @breaking | ||
*/ | ||
dir?: string; | ||
/** | ||
* The path to the Worker script. This should be replaced | ||
* by the top level `entry' property. | ||
* | ||
* @deprecated This will be replaced by the top level `entry' property. | ||
*/ | ||
main?: string; | ||
/** | ||
* An ordered list of rules that define which modules to import, | ||
* and what type to import them as. You will need to specify rules | ||
* to use Text, Data, and CompiledWasm modules, or when you wish to | ||
* have a .js file be treated as an ESModule instead of CommonJS. | ||
* | ||
* @deprecated These are now inferred automatically for major file types, but you can still specify them manually. | ||
* @todo this needs to be implemented! | ||
* @breaking | ||
*/ | ||
rules?: { | ||
type: "ESModule" | "CommonJS" | "Text" | "Data" | "CompiledWasm"; | ||
globs: string[]; | ||
fallthrough?: boolean; | ||
}; | ||
}; | ||
} | ||
); | ||
/** | ||
* The `env` section defines overrides for the configuration for | ||
* different environments. Most fields can be overridden, while | ||
* some have to be specifically duplicated in every environment. | ||
* For more information, see the documentation at https://developers.cloudflare.com/workers/cli-wrangler/configuration#environments | ||
*/ | ||
env?: { | ||
[envName: string]: | ||
| undefined | ||
| Omit<Config, "env" | "migrations" | "site" | "dev">; | ||
}; | ||
}; | ||
type Build = { | ||
command?: string; | ||
cwd?: string; | ||
watch_dir?: string; | ||
} & ( | ||
| { | ||
upload?: { | ||
format: "service-worker"; | ||
main: string; | ||
}; | ||
type ValidationResults = ( | ||
| { key: string; info: string } | ||
| { key: string; error: string } | ||
| { key: string; warning: string } | ||
)[]; | ||
/** | ||
* We also define a validation function that manually validates | ||
* every field in the configuration as per the type definitions, | ||
* as well as extra constraints we apply to some fields, as well | ||
* as some constraints on combinations of fields. This is useful for | ||
* presenting errors and messages to the user. Eventually, we will | ||
* combine this with some automatic config rewriting tools. | ||
* | ||
*/ | ||
export async function validateConfig( | ||
_config: Partial<Config> | ||
): Promise<ValidationResults> { | ||
const results: ValidationResults = []; | ||
return results; | ||
} | ||
/** | ||
* Process the environments (`env`) specified in the `config`. | ||
* | ||
* The environments configuration is complicated since each environment is a customized version of the main config. | ||
* Some of the configuration can be inherited from the main config, while other configuration must replace what is in the main config. | ||
* | ||
* This function ensures that each environment is set up correctly with inherited configuration, as necessary. | ||
* It will log a warning if an environment is missing required configuration. | ||
*/ | ||
export function normaliseAndValidateEnvironmentsConfig(config: Config) { | ||
if (config.env == undefined) { | ||
// There are no environments specified so there is nothing to do here. | ||
return; | ||
} | ||
const environments = config.env; | ||
for (const envKey of Object.keys(environments)) { | ||
const environment = environments[envKey]; | ||
// Given how TOML works, there should never be an environment containing nothing. | ||
// I.e. if there is a section in a TOML file, then the parser will create an object for it. | ||
// But it may be possible in the future if we change how the configuration is stored. | ||
assert( | ||
environment, | ||
`Environment ${envKey} is specified in the config but not defined.` | ||
); | ||
// Fall back on "inherited fields" from the config, if not specified in the environment. | ||
const inheritedFields = [ | ||
"name", | ||
"account_id", | ||
"workers_dev", | ||
"compatibility_date", | ||
"compatibility_flags", | ||
"zone_id", | ||
"routes", | ||
"route", | ||
"jsx_factory", | ||
"jsx_fragment", | ||
"site", | ||
"triggers", | ||
"usage_model", | ||
]; | ||
for (const inheritedField of inheritedFields) { | ||
if (config[inheritedField] !== undefined) { | ||
if (environment[inheritedField] === undefined) { | ||
environment[inheritedField] = config[inheritedField]; // TODO: - shallow or deep copy? | ||
} | ||
} | ||
} | ||
| { | ||
upload?: { | ||
format: "modules"; | ||
dir?: string; | ||
main?: string; | ||
rules?: { | ||
type: "ESModule" | "CommonJS" | "Text" | "Data" | "CompiledWasm"; | ||
globs: string[]; // can we use typescript for these patterns? | ||
fallthrough?: boolean; | ||
}; | ||
}; | ||
// Warn if there is a "required" field in the top level config that has not been specified specified in the environment. | ||
// These required fields are `vars`, `durable_objects`, `kv_namespaces` and `experimental_services`. | ||
// Each of them has different characteristics that need to be checked. | ||
// `vars` is just an object | ||
if (config.vars !== undefined) { | ||
if (environment.vars === undefined) { | ||
console.warn( | ||
`In your configuration, "vars" exists at the top level, but not on "env.${envKey}".\n` + | ||
`This is not what you probably want, since "vars" is not inherited by environments.\n` + | ||
`Please add "vars" to "env.${envKey}".` | ||
); | ||
} else { | ||
for (const varField of Object.keys(config.vars)) { | ||
if (!(varField in environment.vars)) { | ||
console.warn( | ||
`In your configuration, "vars.${varField}" exists at the top level, but not on "env.${envKey}".\n` + | ||
`This is not what you probably want, since "vars" is not inherited by environments.\n` + | ||
`Please add "vars.${varField}" to "env.${envKey}".` | ||
); | ||
} | ||
} | ||
} | ||
} | ||
); | ||
type UsageModel = "bundled" | "unbound"; | ||
// `durable_objects` is an object containing a `bindings` array | ||
if (config.durable_objects !== undefined) { | ||
if (environment.durable_objects === undefined) { | ||
console.warn( | ||
`In your configuration, "durable_objects.bindings" exists at the top level, but not on "env.${envKey}".\n` + | ||
`This is not what you probably want, since "durable_objects" is not inherited by environments.\n` + | ||
`Please add "durable_objects.bindings" to "env.${envKey}".` | ||
); | ||
} else { | ||
const envBindingNames = new Set( | ||
environment.durable_objects.bindings.map((b) => b.name) | ||
); | ||
for (const bindingName of config.durable_objects.bindings.map( | ||
(b) => b.name | ||
)) { | ||
if (!envBindingNames.has(bindingName)) { | ||
console.warn( | ||
`In your configuration, there is a durable_objects binding with name "${bindingName}" at the top level, but not on "env.${envKey}".\n` + | ||
`This is not what you probably want, since "durable_objects" is not inherited by environments.\n` + | ||
`Please add a binding for "${bindingName}" to "env.${envKey}.durable_objects.bindings".` | ||
); | ||
} | ||
} | ||
} | ||
} | ||
type Env = { | ||
name?: string; // inherited | ||
account_id?: string; // inherited | ||
workers_dev?: boolean; // inherited | ||
compatibility_date?: string; // inherited | ||
compatibility_flags?: string[]; // inherited | ||
zone_id?: string; // inherited | ||
routes?: string[]; // inherited | ||
route?: string; // inherited | ||
webpack_config?: string; // inherited | ||
site?: Site; | ||
jsx_factory?: string; // inherited | ||
jsx_fragment?: string; // inherited | ||
// we should use typescript to parse cron patterns | ||
triggers?: { crons: Cron[] }; // inherited | ||
vars?: Vars; | ||
durable_objects?: { bindings: DurableObject[] }; | ||
kv_namespaces?: KVNamespace[]; | ||
experimental_services?: Service[]; | ||
migrations?: DurableObjectMigration[]; | ||
usage_model?: UsageModel; // inherited | ||
}; | ||
// `kv_namespaces` contains an array of namespace bindings | ||
if (config.kv_namespaces !== undefined) { | ||
if (environment.kv_namespaces === undefined) { | ||
console.warn( | ||
`In your configuration, "kv_namespaces" exists at the top level, but not on "env.${envKey}".\n` + | ||
`This is not what you probably want, since "kv_namespaces" is not inherited by environments.\n` + | ||
`Please add "kv_namespaces" to "env.${envKey}".` | ||
); | ||
} else { | ||
const envBindings = new Set( | ||
environment.kv_namespaces.map((kvNamespace) => kvNamespace.binding) | ||
); | ||
for (const bindingName of config.kv_namespaces.map( | ||
(kvNamespace) => kvNamespace.binding | ||
)) { | ||
if (!envBindings.has(bindingName)) { | ||
console.warn( | ||
`In your configuration, there is a kv_namespaces with binding "${bindingName}" at the top level, but not on "env.${envKey}".\n` + | ||
`This is not what you probably want, since "kv_namespaces" is not inherited by environments.\n` + | ||
`Please add a binding for "${bindingName}" to "env.${envKey}.kv_namespaces".` | ||
); | ||
} | ||
} | ||
} | ||
} | ||
export type Config = { | ||
name?: string; // inherited | ||
account_id?: string; // inherited | ||
// @deprecated Don't use this | ||
type?: Project; // top level | ||
compatibility_date?: string; // inherited | ||
compatibility_flags?: string[]; // inherited | ||
// -- there's some mutually exclusive logic for this next block, | ||
// but I didn't bother for now | ||
workers_dev?: boolean; // inherited | ||
zone_id?: string; // inherited | ||
routes?: string[]; // inherited | ||
route?: string; // inherited | ||
// -- end mutually exclusive stuff | ||
// @deprecated Don't use this | ||
webpack_config?: string; // inherited | ||
jsx_factory?: string; // inherited | ||
jsx_fragment?: string; // inherited | ||
vars?: Vars; | ||
durable_objects?: { bindings: DurableObject[] }; | ||
kv_namespaces?: KVNamespace[]; | ||
experimental_services?: Service[]; | ||
migrations?: DurableObjectMigration[]; | ||
site?: Site; // inherited | ||
// we should use typescript to parse cron patterns | ||
triggers?: { crons: Cron[] }; // inherited | ||
dev?: Dev; | ||
usage_model?: UsageModel; // inherited | ||
// top level | ||
build?: Build; | ||
env?: { [envName: string]: void | Env }; | ||
}; | ||
// `experimental_services` contains an array of namespace bindings | ||
if (config.experimental_services !== undefined) { | ||
if (environment.experimental_services === undefined) { | ||
console.warn( | ||
`In your configuration, "experimental_services" exists at the top level, but not on "env.${envKey}".\n` + | ||
`This is not what you probably want, since "experimental_services" is not inherited by environments.\n` + | ||
`Please add "experimental_services" to "env.${envKey}".` | ||
); | ||
} else { | ||
const envBindingNames = new Set( | ||
environment.experimental_services.map((service) => service.name) | ||
); | ||
for (const bindingName of config.experimental_services.map( | ||
(service) => service.name | ||
)) { | ||
if (!envBindingNames.has(bindingName)) { | ||
console.warn( | ||
`In your configuration, there is a experimental_services with binding name "${bindingName}" at the top level, but not on "env.${envKey}".\n` + | ||
`This is not what you probably want, since "experimental_services" is not inherited by environments.\n` + | ||
`Please add a service for "${bindingName}" to "env.${envKey}.experimental_services".` | ||
); | ||
} | ||
} | ||
} | ||
} | ||
} | ||
} |
@@ -0,1 +1,2 @@ | ||
import assert from "assert"; | ||
import type { MessageEvent } from "ws"; | ||
@@ -54,2 +55,8 @@ import WebSocket, { WebSocketServer } from "ws"; | ||
const inspectorIdRef = useRef(randomId()); | ||
/** The websocket from the devtools instance. */ | ||
const [localWebSocket, setLocalWebSocket] = useState<WebSocket>(); | ||
/** The websocket from the edge */ | ||
const [remoteWebSocket, setRemoteWebSocket] = useState<WebSocket>(); | ||
/** | ||
@@ -60,14 +67,3 @@ * The local proxy server that acts as the bridge between | ||
const serverRef = useRef<Server>(); | ||
/** The websocket server that runs on top of the proxy server. */ | ||
const wsServerRef = useRef<WebSocketServer>(); | ||
/** The websocket from the devtools instance. */ | ||
const [localWebSocket, setLocalWebSocket] = useState<WebSocket | undefined>(); | ||
/** The websocket from the edge */ | ||
const [remoteWebSocket, setRemoteWebSocket] = useState< | ||
WebSocket | undefined | ||
>(); | ||
if (!serverRef.current) { | ||
// Let's create the proxy server! | ||
if (serverRef.current === undefined) { | ||
serverRef.current = createServer( | ||
@@ -83,3 +79,3 @@ (req: IncomingMessage, res: ServerResponse) => { | ||
Browser: `wrangler/v${version}`, | ||
// TODO: (someday): The DevTools protocol should match that of edgeworker. | ||
// TODO: (someday): The DevTools protocol should match that of Edge Worker. | ||
// This could be exposed by the preview API. | ||
@@ -124,27 +120,35 @@ "Protocol-Version": "1.3", | ||
); | ||
} | ||
const server = serverRef.current; | ||
// Let's create the websocket server on top of the proxy server | ||
/** | ||
* The websocket server that runs on top of the proxy server. | ||
*/ | ||
const wsServerRef = useRef<WebSocketServer>(); | ||
if (wsServerRef.current === undefined) { | ||
wsServerRef.current = new WebSocketServer({ | ||
server: serverRef.current, | ||
server, | ||
clientTracking: true, | ||
}); | ||
wsServerRef.current.on("connection", (ws: WebSocket) => { | ||
if (wsServerRef.current.clients.size > 1) { | ||
/** We only want to have one active Devtools instance at a time. */ | ||
console.error( | ||
"Tried to open a new devtools window when a previous one was already open." | ||
); | ||
ws.close(1013, "Too many clients; only one can be connected at a time"); | ||
} else { | ||
// As promised, save the created websocket in a state hook | ||
setLocalWebSocket(ws); | ||
ws.addEventListener("close", () => { | ||
// And and cleanup when devtools closes | ||
setLocalWebSocket(undefined); | ||
}); | ||
} | ||
}); | ||
} | ||
const wsServer = wsServerRef.current; | ||
wsServer.on("connection", (ws: WebSocket) => { | ||
if (wsServer.clients.size > 1) { | ||
/** We only want to have one active Devtools instance at a time. */ | ||
console.error( | ||
"Tried to open a new devtools window when a previous one was already open." | ||
); | ||
ws.close(1013, "Too many clients; only one can be connected at a time"); | ||
} else { | ||
// As promised, save the created websocket in a state hook | ||
setLocalWebSocket(ws); | ||
ws.addEventListener("close", () => { | ||
// And and cleanup when devtools closes | ||
setLocalWebSocket(undefined); | ||
}); | ||
} | ||
}); | ||
/** | ||
@@ -155,10 +159,10 @@ * We start and stop the server in an effect to take advantage | ||
useEffect(() => { | ||
serverRef.current.listen(props.port); | ||
server.listen(props.port); | ||
return () => { | ||
serverRef.current.close(); | ||
server.close(); | ||
// Also disconnect any open websockets/devtools connections | ||
wsServerRef.current.clients.forEach((ws) => ws.close()); | ||
wsServerRef.current.close(); | ||
wsServer.clients.forEach((ws) => ws.close()); | ||
wsServer.close(); | ||
}; | ||
}, [props.port]); | ||
}, [props.port, server, wsServer]); | ||
@@ -246,3 +250,3 @@ /** | ||
params.exceptionDetails.text, | ||
params.exceptionDetails.exception.description | ||
params.exceptionDetails.exception?.description ?? "" | ||
); | ||
@@ -293,3 +297,3 @@ } | ||
// tell it to clear the console. | ||
wsServerRef.current.clients.forEach((client) => { | ||
wsServer.clients.forEach((client) => { | ||
// We could've used `localSocket` here, but | ||
@@ -323,2 +327,3 @@ // then we would have had to add it to the effect | ||
props.logToTerminal, | ||
wsServer, | ||
]); | ||
@@ -361,2 +366,6 @@ | ||
try { | ||
assert( | ||
remoteWebSocket, | ||
"Trying to send a message to an undefined `remoteWebSocket`" | ||
); | ||
remoteWebSocket.send(event.data); | ||
@@ -379,2 +388,6 @@ } catch (e) { | ||
function sendMessageToLocalWebSocket(event: MessageEvent) { | ||
assert( | ||
localWebSocket, | ||
"Trying to send a message to an undefined `localWebSocket`" | ||
); | ||
localWebSocket.send(event.data); | ||
@@ -433,3 +446,3 @@ } | ||
function logConsoleMessage(evt: Protocol.Runtime.ConsoleAPICalledEvent): void { | ||
const args = []; | ||
const args: string[] = []; | ||
for (const ro of evt.args) { | ||
@@ -446,9 +459,9 @@ switch (ro.type) { | ||
case "function": | ||
args.push(`[Function: ${ro.description}]`); | ||
args.push(`[Function: ${ro.description ?? "<no-description>"}]`); | ||
break; | ||
case "object": | ||
if (!ro.preview) { | ||
args.push(ro.description); | ||
args.push(ro.description ?? "<no-description>"); | ||
} else { | ||
args.push(ro.preview.description); | ||
args.push(ro.preview.description ?? "<no-description>"); | ||
@@ -472,5 +485,9 @@ switch (ro.preview.subtype) { | ||
"{\n" + | ||
ro.preview.entries | ||
.map(({ key, value }) => { | ||
return ` ${key.description} => ${value.description}`; | ||
// Maps always have entries | ||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||
ro.preview | ||
.entries!.map(({ key, value }) => { | ||
return ` ${key?.description ?? "<unknown>"} => ${ | ||
value.description | ||
}`; | ||
}) | ||
@@ -486,4 +503,6 @@ .join(",\n") + | ||
"{ " + | ||
ro.preview.entries | ||
.map(({ value }) => { | ||
// Sets always have entries | ||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||
ro.preview | ||
.entries!.map(({ value }) => { | ||
return `${value.description}`; | ||
@@ -490,0 +509,0 @@ }) |
351
src/proxy.ts
import { connect } from "node:http2"; | ||
import type { ClientHttp2Session, ServerHttp2Stream } from "node:http2"; | ||
import { createServer } from "node:http"; | ||
import type { Server, IncomingHttpHeaders, RequestListener } from "node:http"; | ||
import type { | ||
IncomingHttpHeaders, | ||
RequestListener, | ||
IncomingMessage, | ||
ServerResponse, | ||
Server, | ||
} from "node:http"; | ||
import WebSocket from "faye-websocket"; | ||
import serveStatic from "serve-static"; | ||
import type { CfPreviewToken } from "./api/preview"; | ||
import { useEffect, useRef } from "react"; | ||
export interface HttpProxyInit { | ||
host: string; | ||
assetPath?: string | null; | ||
onRequest?: (headers: IncomingHttpHeaders) => void; | ||
onResponse?: (headers: IncomingHttpHeaders) => void; | ||
/** | ||
* `usePreviewServer` is a React hook that creates a local development | ||
* server that can be used to develop a Worker. | ||
* | ||
* When we run `wrangler dev`, we start by uploading the compiled worker | ||
* to the preview service, which responds with a preview token. | ||
* (see `useWorker`/`createWorker` for details.) | ||
* We can then use that token to connect to the preview server for a | ||
* great local development experience. Further, as we change the worker, | ||
* we can update the preview token transparently without having to restart | ||
* the development server. | ||
*/ | ||
/** Rewrite request headers to add the preview token. */ | ||
function addCfPreviewTokenHeader( | ||
headers: IncomingHttpHeaders, | ||
previewTokenValue: string | ||
) { | ||
headers["cf-workers-preview-token"] = previewTokenValue; | ||
} | ||
/** | ||
* Creates a HTTP/1 proxy that sends requests over HTTP/2. | ||
* Rewrite references in request headers | ||
* from the preview host to the local host. | ||
*/ | ||
export function createHttpProxy(init: HttpProxyInit): Server { | ||
const { host, assetPath, onRequest = () => {}, onResponse = () => {} } = init; | ||
const remote = connect(`https://${host}`); | ||
const local = createServer(); | ||
// HTTP/2 -> HTTP/2 | ||
local.on("stream", (stream, headers: IncomingHttpHeaders) => { | ||
onRequest(headers); | ||
headers[":authority"] = host; | ||
const request = stream.pipe(remote.request(headers)); | ||
request.on("response", (responseHeaders: IncomingHttpHeaders) => { | ||
onResponse(responseHeaders); | ||
stream.respond(responseHeaders); | ||
request.pipe(stream, { end: true }); | ||
}); | ||
}); | ||
// HTTP/1 -> HTTP/2 | ||
const handleRequest: RequestListener = (message, response) => { | ||
const { httpVersionMajor, headers, method, url } = message; | ||
if (httpVersionMajor >= 2) { | ||
return; // Already handled by the "stream" event. | ||
function rewriteRemoteHostToLocalHostInHeaders( | ||
headers: IncomingHttpHeaders, | ||
remoteHost: string, | ||
localPort: number | ||
) { | ||
for (const [name, value] of Object.entries(headers)) { | ||
// Rewrite the remote host to the local host. | ||
if (typeof value === "string" && value.includes(remoteHost)) { | ||
headers[name] = value | ||
.replaceAll(`https://${remoteHost}`, `http://localhost:${localPort}`) | ||
.replaceAll(remoteHost, `localhost:${localPort}`); | ||
} | ||
onRequest(headers); | ||
headers[":method"] = method; | ||
headers[":path"] = url; | ||
headers[":authority"] = host; | ||
headers[":scheme"] = "https"; | ||
for (const name of Object.keys(headers)) { | ||
if (HTTP1_HEADERS.has(name.toLowerCase())) { | ||
delete headers[name]; | ||
} | ||
} | ||
export function usePreviewServer({ | ||
previewToken, | ||
publicRoot, | ||
port, | ||
}: { | ||
previewToken: CfPreviewToken | undefined; | ||
publicRoot: undefined | string; | ||
port: number; | ||
}) { | ||
/** Creates an HTTP/1 proxy that sends requests over HTTP/2. */ | ||
const proxyServer = useRef<Server>(); | ||
const proxy = (proxyServer.current ??= createProxyServer()); | ||
/** | ||
* When we're not connected / getting a fresh token on changes, | ||
* we'd like to buffer streams/requests until we're connected. | ||
* Once connected, we can flush the buffered streams/requests. | ||
* streamBufferRef is used to buffer http/2 streams, while | ||
* requestResponseBufferRef is used to buffer http/1 requests. | ||
*/ | ||
const streamBufferRef = useRef< | ||
{ stream: ServerHttp2Stream; headers: IncomingHttpHeaders }[] | ||
>([]); | ||
const requestResponseBufferRef = useRef< | ||
{ request: IncomingMessage; response: ServerResponse }[] | ||
>([]); | ||
useEffect(() => { | ||
// If we don't have a token, that means either we're just starting up, | ||
// or we're refreshing the token. | ||
if (!previewToken) { | ||
const cleanupListeners: (() => void)[] = []; | ||
const bufferStream = ( | ||
stream: ServerHttp2Stream, | ||
headers: IncomingHttpHeaders | ||
) => { | ||
// store the stream in a buffer so we can replay it later | ||
streamBufferRef.current.push({ stream, headers }); | ||
}; | ||
proxy.on("stream", bufferStream); | ||
cleanupListeners.push(() => proxy.off("stream", bufferStream)); | ||
const bufferRequestResponse = ( | ||
request: IncomingMessage, | ||
response: ServerResponse | ||
) => { | ||
// store the request and response in a buffer so we can replay it later | ||
requestResponseBufferRef.current.push({ request, response }); | ||
}; | ||
proxy.on("request", bufferRequestResponse); | ||
cleanupListeners.push(() => proxy.off("request", bufferRequestResponse)); | ||
return () => { | ||
cleanupListeners.forEach((cleanup) => cleanup()); | ||
}; | ||
} | ||
// We have a token. Let's proxy requests to the preview end point. | ||
const cleanupListeners: (() => void)[] = []; | ||
const assetPath = typeof publicRoot === "string" ? publicRoot : null; | ||
// create a ClientHttp2Session | ||
const remote = connect(`https://${previewToken.host}`); | ||
cleanupListeners.push(() => remote.destroy()); | ||
/** HTTP/2 -> HTTP/2 */ | ||
const handleStream = createStreamHandler(previewToken, remote, port); | ||
proxy.on("stream", handleStream); | ||
cleanupListeners.push(() => proxy.off("stream", handleStream)); | ||
// flush and replay buffered streams | ||
streamBufferRef.current.forEach((buffer) => | ||
handleStream(buffer.stream, buffer.headers) | ||
); | ||
streamBufferRef.current = []; | ||
/** HTTP/1 -> HTTP/2 */ | ||
const handleRequest: RequestListener = ( | ||
message: IncomingMessage, | ||
response: ServerResponse | ||
) => { | ||
const { httpVersionMajor, headers, method, url } = message; | ||
if (httpVersionMajor >= 2) { | ||
return; // Already handled by the "stream" event. | ||
} | ||
} | ||
const request = message.pipe(remote.request(headers)); | ||
request.on("response", (responseHeaders) => { | ||
const status = responseHeaders[":status"]; | ||
onResponse(responseHeaders); | ||
for (const name of Object.keys(responseHeaders)) { | ||
if (name.startsWith(":")) { | ||
delete responseHeaders[name]; | ||
addCfPreviewTokenHeader(headers, previewToken.value); | ||
headers[":method"] = method; | ||
headers[":path"] = url; | ||
headers[":authority"] = previewToken.host; | ||
headers[":scheme"] = "https"; | ||
for (const name of Object.keys(headers)) { | ||
if (HTTP1_HEADERS.has(name.toLowerCase())) { | ||
delete headers[name]; | ||
} | ||
} | ||
response.writeHead(status, responseHeaders); | ||
request.pipe(response, { end: true }); | ||
}); | ||
}; | ||
// If an asset path is defined, check the file system | ||
// for a file first and serve if it exists. | ||
if (assetPath) { | ||
const handleAsset = serveStatic(assetPath, { | ||
cacheControl: false, | ||
}); | ||
local.on("request", (request, response) => { | ||
handleAsset(request, response, () => { | ||
handleRequest(request, response); | ||
const request = message.pipe(remote.request(headers)); | ||
request.on("response", (responseHeaders) => { | ||
const status = responseHeaders[":status"] ?? 500; | ||
rewriteRemoteHostToLocalHostInHeaders( | ||
responseHeaders, | ||
previewToken.host, | ||
port | ||
); | ||
for (const name of Object.keys(responseHeaders)) { | ||
if (name.startsWith(":")) { | ||
delete responseHeaders[name]; | ||
} | ||
} | ||
response.writeHead(status, responseHeaders); | ||
request.pipe(response, { end: true }); | ||
}); | ||
}); | ||
} else { | ||
local.on("request", handleRequest); | ||
} | ||
// HTTP/1 -> WebSocket (over HTTP/1) | ||
local.on("upgrade", (message, socket, body) => { | ||
const { headers, url } = message; | ||
onRequest(headers); | ||
headers["host"] = host; | ||
const localWebsocket = new WebSocket(message, socket, body); | ||
// TODO(soon): Custom WebSocket protocol is not working? | ||
const remoteWebsocketClient = new WebSocket.Client( | ||
`wss://${host}${url}`, | ||
[], | ||
{ headers } | ||
}; | ||
// If an asset path is defined, check the file system | ||
// for a file first and serve if it exists. | ||
const actualHandleRequest = assetPath | ||
? createHandleAssetsRequest(assetPath, handleRequest) | ||
: handleRequest; | ||
proxy.on("request", actualHandleRequest); | ||
cleanupListeners.push(() => proxy.off("request", actualHandleRequest)); | ||
// flush and replay buffered requests | ||
requestResponseBufferRef.current.forEach(({ request, response }) => | ||
actualHandleRequest(request, response) | ||
); | ||
localWebsocket.pipe(remoteWebsocketClient).pipe(localWebsocket); | ||
requestResponseBufferRef.current = []; | ||
/** HTTP/1 -> WebSocket (over HTTP/1) */ | ||
const handleUpgrade = ( | ||
message: IncomingMessage, | ||
socket: WebSocket, | ||
body: Buffer | ||
) => { | ||
const { headers, url } = message; | ||
addCfPreviewTokenHeader(headers, previewToken.value); | ||
headers["host"] = previewToken.host; | ||
const localWebsocket = new WebSocket(message, socket, body); | ||
// TODO(soon): Custom WebSocket protocol is not working? | ||
const remoteWebsocketClient = new WebSocket.Client( | ||
`wss://${previewToken.host}${url}`, | ||
[], | ||
{ headers } | ||
); | ||
localWebsocket.pipe(remoteWebsocketClient).pipe(localWebsocket); | ||
// We close down websockets whenever we refresh the token. | ||
cleanupListeners.push(() => { | ||
localWebsocket.destroy(); | ||
remoteWebsocketClient.destroy(); | ||
}); | ||
}; | ||
proxy.on("upgrade", handleUpgrade); | ||
cleanupListeners.push(() => proxy.off("upgrade", handleUpgrade)); | ||
return () => { | ||
cleanupListeners.forEach((d) => d()); | ||
}; | ||
}, [previewToken, publicRoot, port, proxy]); | ||
// Start/stop the server whenever the | ||
// containing component is mounted/unmounted. | ||
useEffect(() => { | ||
proxy.listen(port); | ||
console.log(`⬣ Listening at http://localhost:${port}`); | ||
return () => { | ||
proxy.close(); | ||
}; | ||
}, [port, proxy]); | ||
} | ||
function createHandleAssetsRequest( | ||
assetPath: string, | ||
handleRequest: RequestListener | ||
) { | ||
const handleAsset = serveStatic(assetPath, { | ||
cacheControl: false, | ||
}); | ||
remote.on("close", () => { | ||
local.close(); | ||
}); | ||
return local; | ||
return (request: IncomingMessage, response: ServerResponse) => { | ||
handleAsset(request, response, () => { | ||
handleRequest(request, response); | ||
}); | ||
}; | ||
} | ||
/** A Set of headers we want to remove from HTTP/1 requests. */ | ||
const HTTP1_HEADERS = new Set([ | ||
@@ -104,1 +252,52 @@ "host", | ||
]); | ||
function createProxyServer() { | ||
return createServer() | ||
.on("request", function (req, res) { | ||
// log all requests | ||
console.log( | ||
new Date().toLocaleTimeString(), | ||
req.method, | ||
req.url, | ||
res.statusCode | ||
); | ||
}) | ||
.on("upgrade", (req) => { | ||
// log all websocket connections | ||
console.log( | ||
new Date().toLocaleTimeString(), | ||
req.method, | ||
req.url, | ||
101, | ||
"(WebSocket)" | ||
); | ||
}) | ||
.on("error", (err) => { | ||
// log all connection errors | ||
console.error(new Date().toLocaleTimeString(), err); | ||
}); | ||
} | ||
function createStreamHandler( | ||
previewToken: CfPreviewToken, | ||
remote: ClientHttp2Session, | ||
port: number | ||
) { | ||
return function handleStream( | ||
stream: ServerHttp2Stream, | ||
headers: IncomingHttpHeaders | ||
) { | ||
addCfPreviewTokenHeader(headers, previewToken.value); | ||
headers[":authority"] = previewToken.host; | ||
const request = stream.pipe(remote.request(headers)); | ||
request.on("response", (responseHeaders: IncomingHttpHeaders) => { | ||
rewriteRemoteHostToLocalHostInHeaders( | ||
responseHeaders, | ||
previewToken.host, | ||
port | ||
); | ||
stream.respond(responseHeaders); | ||
request.pipe(stream, { end: true }); | ||
}); | ||
}; | ||
} |
import assert from "node:assert"; | ||
import path from "node:path"; | ||
import { readFile } from "node:fs/promises"; | ||
import esbuild from "esbuild"; | ||
import * as esbuild from "esbuild"; | ||
import type { Metafile } from "esbuild"; | ||
import { execa } from "execa"; | ||
@@ -12,21 +13,22 @@ import tmp from "tmp-promise"; | ||
import makeModuleCollector from "./module-collection"; | ||
import type { AssetPaths } from "./sites"; | ||
import { syncAssets } from "./sites"; | ||
type CfScriptFormat = void | "modules" | "service-worker"; | ||
type CfScriptFormat = undefined | "modules" | "service-worker"; | ||
type Props = { | ||
config: Config; | ||
format?: CfScriptFormat; | ||
script?: string; | ||
name?: string; | ||
env?: string; | ||
compatibilityDate?: string; | ||
compatibilityFlags?: string[]; | ||
public?: string; | ||
site?: string; | ||
triggers?: (string | number)[]; | ||
routes?: (string | number)[]; | ||
legacyEnv?: boolean; | ||
jsxFactory: void | string; | ||
jsxFragment: void | string; | ||
format: CfScriptFormat | undefined; | ||
script: string | undefined; | ||
name: string | undefined; | ||
env: string | undefined; | ||
compatibilityDate: string | undefined; | ||
compatibilityFlags: string[] | undefined; | ||
assetPaths: AssetPaths | undefined; | ||
triggers: (string | number)[] | undefined; | ||
routes: (string | number)[] | undefined; | ||
legacyEnv: boolean | undefined; | ||
jsxFactory: undefined | string; | ||
jsxFragment: undefined | string; | ||
experimentalPublic: boolean; | ||
}; | ||
@@ -39,6 +41,6 @@ | ||
export default async function publish(props: Props): Promise<void> { | ||
if (props.public && props.format === "service-worker") { | ||
if (props.experimentalPublic && props.format === "service-worker") { | ||
// TODO: check config too | ||
throw new Error( | ||
"You cannot use the service worker format with a public directory." | ||
"You cannot publish in the service worker format with a public directory." | ||
); | ||
@@ -55,9 +57,14 @@ } | ||
const envRootObj = props.env ? config.env[props.env] || {} : config; | ||
const envRootObj = | ||
props.env && config.env ? config.env[props.env] || {} : config; | ||
assert( | ||
envRootObj.compatibility_date || props["compatibility-date"], | ||
envRootObj.compatibility_date || props.compatibilityDate, | ||
"A compatibility_date is required when publishing. Add one to your wrangler.toml file, or pass it in your terminal as --compatibility_date. See https://developers.cloudflare.com/workers/platform/compatibility-dates for more information." | ||
); | ||
if (accountId === undefined) { | ||
throw new Error("No account_id provided."); | ||
} | ||
const triggers = props.triggers || config.triggers?.crons; | ||
@@ -79,6 +86,8 @@ const routes = props.routes || config.routes; | ||
if (props.script) { | ||
file = props.script; | ||
// If the script name comes from the command line it is relative to the current working directory. | ||
file = path.resolve(props.script); | ||
} else { | ||
// If the script name comes from the config, then it is relative to the wrangler.toml file. | ||
assert(build?.upload?.main, "missing main file"); | ||
file = path.join(path.dirname(__path__), build.upload.main); | ||
file = path.resolve(path.dirname(__path__), build.upload.main); | ||
} | ||
@@ -92,3 +101,2 @@ | ||
const destination = await tmp.dir({ unsafeCleanup: true }); | ||
if (props.config.build?.command) { | ||
@@ -107,3 +115,3 @@ // TODO: add a deprecation message here? | ||
const result = await esbuild.build({ | ||
...(props.public | ||
...(props.experimentalPublic | ||
? { | ||
@@ -116,10 +124,10 @@ stdin: { | ||
) | ||
).replace("__ENTRY_POINT__", path.join(process.cwd(), file)), | ||
).replace("__ENTRY_POINT__", file), | ||
sourcefile: "static-asset-facade.js", | ||
resolveDir: path.dirname(file), | ||
}, | ||
nodePaths: [path.join(__dirname, "../vendor")], | ||
} | ||
: { entryPoints: [file] }), | ||
bundle: true, | ||
nodePaths: props.public ? [path.join(__dirname, "../vendor")] : undefined, | ||
outdir: destination.path, | ||
@@ -139,14 +147,29 @@ external: ["__STATIC_CONTENT_MANIFEST"], | ||
const chunks = Object.entries(result.metafile.outputs).find( | ||
([_path, { entryPoint }]) => | ||
entryPoint === | ||
(props.public | ||
? path.join(path.dirname(file), "static-asset-facade.js") | ||
: file) | ||
// result.metafile is defined because of the `metafile: true` option above. | ||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||
const metafile = result.metafile!; | ||
const entryPoints = Object.values(metafile.outputs).filter( | ||
(output) => output.entryPoint !== undefined | ||
); | ||
assert( | ||
entryPoints.length > 0, | ||
`Cannot find entry-point "${file}" in generated bundle.` + | ||
listEntryPoints(entryPoints) | ||
); | ||
assert( | ||
entryPoints.length < 2, | ||
"More than one entry-point found for generated bundle." + | ||
listEntryPoints(entryPoints) | ||
); | ||
const entryPointExports = entryPoints[0].exports; | ||
const resolvedEntryPointPath = path.resolve( | ||
destination.path, | ||
// We know that entryPoint is not null because we filtered out those without above. | ||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||
entryPoints[0].entryPoint! | ||
); | ||
const { format } = props; | ||
const bundle = { | ||
type: chunks[1].exports.length > 0 ? "esm" : "commonjs", | ||
exports: chunks[1].exports, | ||
type: entryPointExports.length > 0 ? "esm" : "commonjs", | ||
exports: entryPointExports, | ||
}; | ||
@@ -168,3 +191,3 @@ | ||
const content = await readFile(chunks[0], { encoding: "utf-8" }); | ||
const content = await readFile(resolvedEntryPointPath, { encoding: "utf-8" }); | ||
await destination.cleanup(); | ||
@@ -175,3 +198,3 @@ | ||
let migrations; | ||
if ("migrations" in config) { | ||
if (config.migrations !== undefined) { | ||
const scripts = await fetchResult<{ id: string; migration_tag: string }[]>( | ||
@@ -212,11 +235,8 @@ `/accounts/${accountId}/workers/scripts` | ||
const assets = | ||
props.public || props.site || props.config.site?.bucket // TODO: allow both | ||
? await syncAssets( | ||
accountId, | ||
scriptName, | ||
props.public || props.site || props.config.site?.bucket, | ||
false | ||
) | ||
: { manifest: undefined, namespace: undefined }; | ||
const assets = await syncAssets( | ||
accountId, | ||
scriptName, | ||
props.assetPaths, | ||
false | ||
); | ||
@@ -237,3 +257,3 @@ const bindings: CfWorkerInit["bindings"] = { | ||
main: { | ||
name: path.basename(chunks[0]), | ||
name: path.basename(resolvedEntryPointPath), | ||
content: content, | ||
@@ -271,3 +291,3 @@ type: bundle.type === "esm" ? "esm" : "commonjs", | ||
const { available_on_subdomain } = await fetchResult( | ||
`${workerUrl}?available_on_subdomain=true`, | ||
workerUrl, | ||
{ | ||
@@ -277,3 +297,4 @@ method: "PUT", | ||
body: toFormData(worker), | ||
} | ||
}, | ||
new URLSearchParams({ available_on_subdomains: "true" }) | ||
); | ||
@@ -373,1 +394,7 @@ | ||
} | ||
function listEntryPoints(outputs: ValueOf<Metafile["outputs"]>[]): string { | ||
return outputs.map((output) => output.entryPoint).join("\n"); | ||
} | ||
type ValueOf<T> = T[keyof T]; |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
12627437
78
13458
1
6
37
+ Addedxxhash-addon@^1.4.0
+ Added@miniflare/cache@2.2.0(transitive)
+ Added@miniflare/cli-parser@2.2.0(transitive)
+ Added@miniflare/core@2.2.0(transitive)
+ Added@miniflare/durable-objects@2.2.0(transitive)
+ Added@miniflare/html-rewriter@2.2.0(transitive)
+ Added@miniflare/http-server@2.2.0(transitive)
+ Added@miniflare/kv@2.2.0(transitive)
+ Added@miniflare/runner-vm@2.2.0(transitive)
+ Added@miniflare/scheduler@2.2.0(transitive)
+ Added@miniflare/shared@2.2.0(transitive)
+ Added@miniflare/sites@2.2.0(transitive)
+ Added@miniflare/storage-file@2.2.0(transitive)
+ Added@miniflare/storage-memory@2.2.0(transitive)
+ Added@miniflare/watcher@2.2.0(transitive)
+ Added@miniflare/web-sockets@2.2.0(transitive)
+ Added@types/node@22.13.1(transitive)
+ Added@types/node-forge@1.3.11(transitive)
+ Addedhtml-rewriter-wasm@0.4.1(transitive)
+ Addedminiflare@2.2.0(transitive)
+ Addednode-forge@1.3.1(transitive)
+ Addedselfsigned@2.4.1(transitive)
+ Addedundici-types@6.20.0(transitive)
+ Addedxxhash-addon@1.5.0(transitive)
- Removed@miniflare/cache@2.0.0(transitive)
- Removed@miniflare/cli-parser@2.0.0(transitive)
- Removed@miniflare/core@2.0.0(transitive)
- Removed@miniflare/durable-objects@2.0.0(transitive)
- Removed@miniflare/html-rewriter@2.0.0(transitive)
- Removed@miniflare/http-server@2.0.0(transitive)
- Removed@miniflare/kv@2.0.0(transitive)
- Removed@miniflare/runner-vm@2.0.0(transitive)
- Removed@miniflare/scheduler@2.0.0(transitive)
- Removed@miniflare/shared@2.0.0(transitive)
- Removed@miniflare/sites@2.0.0(transitive)
- Removed@miniflare/storage-file@2.0.0(transitive)
- Removed@miniflare/storage-memory@2.0.0(transitive)
- Removed@miniflare/watcher@2.0.0(transitive)
- Removed@miniflare/web-sockets@2.0.0(transitive)
- Removedhtml-rewriter-wasm@0.3.2(transitive)
- Removedminiflare@2.0.0(transitive)
- Removednode-forge@0.10.0(transitive)
- Removedselfsigned@1.10.14(transitive)
Updatedminiflare@2.2.0