New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

wrangler

Package Overview
Dependencies
Maintainers
4
Versions
4199
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

wrangler - npm Package Compare versions

Comparing version 0.0.0-5d2c177 to 0.0.0-5d423e9

src/inspect.ts

5

package.json
{
"name": "wrangler",
"version": "0.0.0-5d2c177",
"version": "0.0.0-5d423e9",
"author": "wrangler@cloudflare.com",

@@ -40,3 +40,3 @@ "description": "Command-line interface for all things Cloudflare Workers",

"esbuild": "0.14.1",
"miniflare": "2.0.0-rc.5",
"miniflare": "2.1.0",
"path-to-regexp": "^6.2.0",

@@ -63,2 +63,3 @@ "semiver": "^1.1.0"

"command-exists": "^1.2.9",
"devtools-protocol": "^0.0.955664",
"execa": "^6.0.0",

@@ -65,0 +66,0 @@ "faye-websocket": "^0.11.4",

@@ -9,2 +9,3 @@ import path from "path";

sourcemap?: boolean;
fallbackService?: string;
watch?: boolean;

@@ -19,2 +20,3 @@ onEnd?: () => void;

sourcemap = false,
fallbackService = "ASSETS",
watch = false,

@@ -36,2 +38,5 @@ onEnd = () => {},

allowOverwrite: true,
define: {
__FALLBACK_SERVICE__: JSON.stringify(fallbackService),
},
plugins: [

@@ -38,0 +43,0 @@ {

9

pages/functions/filepath-routing.test.ts
import { compareRoutes } from "./filepath-routing";
describe("compareRoutes()", () => {
test("routes / last", () => {
expect(compareRoutes("/", "/foo")).toBeGreaterThanOrEqual(1);
expect(compareRoutes("/", "/:foo")).toBeGreaterThanOrEqual(1);
expect(compareRoutes("/", "/:foo*")).toBeGreaterThanOrEqual(1);
});
test("routes with fewer segments come after those with more segments", () => {
expect(compareRoutes("/foo", "/foo/bar")).toBe(1);
expect(compareRoutes("/foo", "/foo/bar")).toBeGreaterThanOrEqual(1);
expect(compareRoutes("/foo", "/foo/bar/cat")).toBeGreaterThanOrEqual(1);
});

@@ -7,0 +14,0 @@

@@ -1,3 +0,1 @@

/* eslint-disable @typescript-eslint/no-explicit-any */
import path from "path";

@@ -8,5 +6,4 @@ import fs from "fs/promises";

import * as acornWalk from "acorn-walk";
import type { Config } from "./routes";
import type { Identifier } from "estree";
import type { ExportNamedDeclaration } from "@babel/types";
import type { Config, RouteConfig } from "./routes";
import type { ExportNamedDeclaration, Identifier } from "estree";

@@ -22,6 +19,3 @@ type Arguments = {

}: Arguments) {
let routeEntries: [
string,
{ [key in "module" | "middleware"]?: string[] }
][] = [] as any;
let routeEntries: [string, RouteConfig][] = [];

@@ -38,3 +32,3 @@ if (!baseURL.startsWith("/")) {

const ext = path.extname(filepath);
if (/\.(mjs|js|ts)/.test(ext)) {
if (/^\.(mjs|js|ts|tsx|jsx)$/.test(ext)) {
// transform the code to ensure we're working with vanilla JS + ESM

@@ -51,4 +45,6 @@ const { code } = await transform(await fs.readFile(filepath, "utf-8"), {

acornWalk.simple(ast, {
ExportNamedDeclaration(_node) {
const node: ExportNamedDeclaration = _node as any;
ExportNamedDeclaration(_node: unknown) {
// This dynamic cast assumes that the AST generated by acornWalk will generate nodes that
// are compatible with the eslint AST nodes.
const node = _node as ExportNamedDeclaration;

@@ -173,3 +169,3 @@ // this is an array because multiple things can be exported from a single statement

const segments = segmentedPath.slice(1).split("/");
const segments = segmentedPath.slice(1).split("/").filter(Boolean);
return [method, segments];

@@ -176,0 +172,0 @@ }

@@ -71,3 +71,3 @@ export const RESERVED_KEYWORDS = [

export const isValidIdentifer = (identifier: string) =>
export const isValidIdentifier = (identifier: string) =>
validIdentifierRegex.test(identifier);

@@ -74,0 +74,0 @@

@@ -1,6 +0,4 @@

/* eslint-disable @typescript-eslint/no-explicit-any */
import path from "path";
import fs from "fs/promises";
import { isValidIdentifer, normalizeIdentifier } from "./identifiers";
import { isValidIdentifier, normalizeIdentifier } from "./identifiers";

@@ -20,3 +18,3 @@ export const HTTP_METHODS = [

): maybeHTTPMethod is HTTPMethod {
return HTTP_METHODS.includes(maybeHTTPMethod as any);
return (HTTP_METHODS as readonly string[]).includes(maybeHTTPMethod);
}

@@ -33,12 +31,14 @@

routes?: RoutesConfig;
schedules?: any;
schedules?: unknown;
};
export type RoutesConfig = {
[route: string]: {
middleware?: string | string[];
module?: string | string[];
};
[route: string]: RouteConfig;
};
export type RouteConfig = {
middleware?: string | string[];
module?: string | string[];
};
type ImportMap = Map<

@@ -98,3 +98,3 @@ string,

// ensure the module name (if provided) is a valid identifier to guard against injection attacks
if (name !== "default" && !isValidIdentifer(name)) {
if (name !== "default" && !isValidIdentifier(name)) {
throw new Error(`Invalid module identifier "${name}"`);

@@ -101,0 +101,0 @@ }

@@ -1,3 +0,1 @@

/* eslint-disable @typescript-eslint/no-explicit-any */
import { match } from "path-to-regexp";

@@ -7,7 +5,7 @@ import type { HTTPMethod } from "./routes";

/* TODO: Grab these from @cloudflare/workers-types instead */
type Params<P extends string = any> = Record<P, string | string[]>;
type Params<P extends string = string> = Record<P, string | string[]>;
type EventContext<Env, P extends string, Data> = {
request: Request;
waitUntil: (promise: Promise<any>) => void;
waitUntil: (promise: Promise<unknown>) => void;
next: (input?: Request | string, init?: RequestInit) => Promise<Response>;

@@ -21,3 +19,3 @@ env: Env & { ASSETS: { fetch: typeof fetch } };

Env = unknown,
P extends string = any,
P extends string = string,
Data extends Record<string, unknown> = Record<string, unknown>

@@ -36,6 +34,8 @@ > = (context: EventContext<Env, P, Data>) => Response | Promise<Response>;

declare const routes: RouteHandler[];
// define `__FALLBACK_SERVICE__` via ESBuild
declare const __FALLBACK_SERVICE__: string;
// expect an ASSETS fetcher binding pointing to the asset-server stage
type Env = {
[name: string]: any;
[name: string]: unknown;
ASSETS: { fetch(url: string, init: RequestInit): Promise<Response> };

@@ -45,10 +45,11 @@ };

type WorkerContext = {
waitUntil: (promise: Promise<any>) => void;
waitUntil: (promise: Promise<unknown>) => void;
};
// eslint-disable-next-line @typescript-eslint/no-unused-vars -- `env` can be used by __FALLBACK_SERVICE_FETCH__
function* executeRequest(request: Request, env: Env) {
const requestPath = new URL(request.url).pathname;
// First, iterate through the routes and execute "middlewares" on partial route matches
for (const route of routes) {
// First, iterate through the routes (backwards) and execute "middlewares" on partial route matches
for (const route of [...routes].reverse()) {
if (

@@ -95,5 +96,9 @@ route.methods.length &&

// Finally, yield to the asset-server
// Finally, yield to the fallback service (`env.ASSETS.fetch` in Pages' case)
return {
handler: () => env.ASSETS.fetch(request.url, request),
handler: () =>
__FALLBACK_SERVICE__
? // @ts-expect-error expecting __FALLBACK_SERVICE__ to be the name of a service binding, so fetch should be defined
env[__FALLBACK_SERVICE__].fetch(request)
: fetch(request),
params: {} as Params,

@@ -115,3 +120,7 @@ };

const { handler, params } = value;
const context: EventContext<unknown, any, any> = {
const context: EventContext<
unknown,
string,
Record<string, unknown>
> = {
request: new Request(request.clone()),

@@ -118,0 +127,0 @@ next,

@@ -115,3 +115,3 @@ import type { CfPreviewToken } from "./preview";

*/
name: string | void;
name: string | undefined;
/**

@@ -124,3 +124,3 @@ * The entrypoint module.

*/
modules: void | CfModule[];
modules: undefined | CfModule[];
/**

@@ -135,6 +135,6 @@ * All the bindings

};
migrations: void | CfDurableObjectMigrations;
compatibility_date: string | void;
compatibility_flags: void | string[];
usage_model: void | "bundled" | "unbound";
migrations: undefined | CfDurableObjectMigrations;
compatibility_date: string | undefined;
compatibility_flags: undefined | string[];
usage_model: undefined | "bundled" | "unbound";
}

@@ -141,0 +141,0 @@

@@ -101,4 +101,3 @@ import type { RequestInit } from "node-fetch";

function hasCursor(result_info: unknown): result_info is { cursor: string } {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
return (result_info as any)?.cursor !== undefined;
return (result_info as { cursor } | undefined)?.cursor !== undefined;
}

@@ -1,131 +0,487 @@

// we're going to manually write both the type definition AND
// the validator for the config, so that we can give better error messages
/**
* This is the static type definition for the configuration object.
* It reflects the configuration that you can write in wrangler.toml,
* and optionally augment with arguments passed directly to wrangler.
* The type definition doesn't fully reflect the constraints applied
* to the configuration, but it is a good starting point. Later, we
* also defined a validator function that will validate the configuration
* with the same rules as the type definition, as well as the extra
* constraints. The type definition is good for asserting correctness
* in the wrangler codebase, whereas the validator function is useful
* for signalling errors in the configuration to a user of wrangler.
*
* For more information about the configuration object, see the
* documentation at https://developers.cloudflare.com/workers/cli-wrangler/configuration
*
* Legend for the annotations:
*
* *:optional means providing a value isn't mandatory
* *:deprecated means the field itself isn't necessary anymore in wrangler.toml
* *:breaking means the deprecation/optionality is a breaking change from wrangler 1
* *:todo means there's more work to be done (with details attached)
* *:inherited means the field is copied to all environments
*/
export type Config = {
/**
* The name of your worker. Alphanumeric + dashes only.
*
* @optional
* @inherited
*/
name?: string;
type DurableObjectMigration = {
tag: string;
new_classes?: string[];
renamed_classes?: string[];
deleted_classes?: string[];
};
/**
* The entrypoint/path to the JavaScript file that will be executed.
*
* @optional
* @inherited
* @todo this needs to be implemented!
*/
entry?: string;
type Project = "webpack" | "javascript" | "rust";
/**
* This is the ID of the account associated with your zone.
* You might have more than one account, so make sure to use
* the ID of the account associated with the zone/route you
* provide, if you provide one. It can also be specified through
* the CF_ACCOUNT_ID environment variable.
*
* @optional
* @inherited
*/
account_id?: string;
type Site = {
// inherited
bucket: string;
"entry-point": string;
include?: string[];
exclude?: string[];
};
/**
* The project "type". A holdover from wrangler 1.x.
* Valid values were "webpack", "javascript", and "rust".
*
* @deprecated DO NOT USE THIS. Most common features now work out of the box with wrangler, including modules, jsx, typescript, etc. If you need anything more, use a custom build.
* @optional
* @inherited
* @breaking
*/
type?: "webpack" | "javascript" | "rust";
type Dev = {
ip?: string;
port?: number;
local_protocol?: string;
upstream_protocol?: string;
};
/**
* A date in the form yyyy-mm-dd, which will be used to determine
* which version of the Workers runtime is used. More details at
* https://developers.cloudflare.com/workers/platform/compatibility-dates
* @optional true for `dev`, false for `publish`
* @inherited
*/
compatibility_date?: string;
export type Vars = { [key: string]: string };
/**
* A list of flags that enable features from upcoming features of
* the Workers runtime, usually used together with compatibility_flags.
* More details at
* https://developers.cloudflare.com/workers/platform/compatibility-dates
*
* @optional
* @inherited
* @todo This could be an enum!
*/
compatibility_flags?: string[];
type Cron = string; // TODO: we should be able to parse a cron pattern with ts
/**
* Whether we use <name>.<subdomain>.workers.dev to
* test and deploy your worker.
*
* @default `true` (This is a breaking change from wrangler 1)
* @optional
* @inherited
* @breaking
*/
workers_dev?: boolean;
type KVNamespace = {
binding: string;
preview_id?: string;
id: string;
};
/**
* The zone ID of the zone you want to deploy to. You can find this
* in your domain page on the dashboard.
*
* @deprecated This is unnecessary since we can deduce this from routes directly.
* @optional
* @inherited
*/
zone_id?: string;
type DurableObject = {
name: string;
class_name: string;
script_name?: string;
};
/**
* A list of routes that your worker should be deployed to.
* Only one of `routes` or `route` is required.
*
* @optional false only when workers_dev is false, and there's no scheduled worker
* @inherited
*/
routes?: string[];
type Service = {
name: string;
service: string;
environment: string;
};
/**
* A route that your worker should be deployed to. Literally
* the same as routes, but only one.
* Only one of `routes` or `route` is required.
*
* @optional false only when workers_dev is false, and there's no scheduled worker
* @inherited
*/
route?: string;
type Build = {
command?: string;
cwd?: string;
watch_dir?: string;
} & (
| {
upload?: {
format: "service-worker";
main: string;
};
}
| {
upload?: {
format: "modules";
dir?: string;
main?: string;
rules?: {
type: "ESModule" | "CommonJS" | "Text" | "Data" | "CompiledWasm";
globs: string[]; // can we use typescript for these patterns?
fallthrough?: boolean;
/**
* Path to the webpack config to use when building your worker.
* A holdover from wrangler 1.x, used with `type: "webpack"`.
*
* @deprecated DO NOT USE THIS. Most common features now work out of the box with wrangler, including modules, jsx, typescript, etc. If you need anything more, use a custom build.
* @inherited
* @breaking
*/
webpack_config?: string;
/**
* The function to use to replace jsx syntax.
*
* @default `"React.createElement"`
* @optional
* @inherited
*/
jsx_factory?: string;
/**
* The function to use to replace jsx fragment syntax.
*
* @default `"React.Fragment"`
* @optional
* @inherited
*/
jsx_fragment?: string;
/**
* A map of environment variables to set when deploying your worker.
* Of note, they can only be strings. Which is unfortunate, really.
* (TODO: verify that they can only be strings?)
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `{}`
* @optional
* @inherited false
*/
vars?: { [key: string]: string };
/**
* A list of durable objects that your worker should be bound to.
* For more information about Durable Objects, see the documentation at
* https://developers.cloudflare.com/workers/learning/using-durable-objects
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `{ bindings: [] }`
* @optional
* @inherited false
*/
durable_objects?: {
bindings: {
/** The name of the binding used to refer to the Durable Object */
name: string;
/** The exported class name of the Durable Object */
class_name: string;
/** The script where the Durable Object is defined (if it's external to this worker) */
script_name?: string;
}[];
};
/**
* These specify any Workers KV Namespaces you want to
* access from inside your Worker. To learn more about KV Namespaces,
* see the documentation at https://developers.cloudflare.com/workers/learning/how-kv-works
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `[]`
* @optional
* @inherited false
*/
kv_namespaces?: {
/** The binding name used to refer to the KV Namespace */
binding: string;
/** The ID of the KV namespace */
id: string;
/** The ID of the KV namespace used during `wrangler dev` */
preview_id?: string;
}[];
/**
* A list of services that your worker should be bound to.
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `[]`
* @optional
* @inherited false
*/
experimental_services?: {
/** The binding name used to refer to the Service */
name: string;
/** The name of the Service being bound */
service: string;
/** The Service's environment */
environment: string;
}[];
/**
* A list of migrations that should be uploaded with your Worker.
* These define changes in your Durable Object declarations.
* More details at https://developers.cloudflare.com/workers/learning/using-durable-objects#configuring-durable-object-classes-with-migrations
* NB: these ARE inherited, and SHOULD NOT be duplicated across all environments.
*
* @default `[]`
* @optional
* @inherited true
*/
migrations?: {
/** A unique identifier for this migration. */
tag: string;
/** The new Durable Objects being defined. */
new_classes?: string[];
/** The Durable Objects being renamed. */
renamed_classes?: {
from: string;
to: string;
}[];
/** The Durable Objects being removed. */
deleted_classes?: string[];
}[];
/**
* The definition of a Worker Site, a feature that lets you upload
* static assets with your Worker.
* More details at https://developers.cloudflare.com/workers/platform/sites
* NB: This IS inherited, and SHOULD NOT be duplicated across all environments.
*
* @default `undefined`
* @optional
* @inherited true
*/
site?: {
/**
* The directory containing your static assets. It must be
* a path relative to your wrangler.toml file.
* Example: bucket = "./public"
*
* optional false
*/
bucket: string;
/**
* The location of your Worker script.
*
* @deprecated DO NOT use this (it's a holdover from wrangler 1.x). Either use the top level `entry` field, or pass the path to your entry file as a command line argument.
* @todo we should use a top level "entry" property instead
* @breaking
*/
"entry-point": string;
/**
* An exclusive list of .gitignore-style patterns that match file
* or directory names from your bucket location. Only matched
* items will be uploaded. Example: include = ["upload_dir"]
*
* @optional
* @default `[]`
* @todo this needs to be implemented!
*/
include?: string[];
/**
* A list of .gitignore-style patterns that match files or
* directories in your bucket that should be excluded from
* uploads. Example: exclude = ["ignore_dir"]
*
* @optional
* @default `[]`
* @todo this needs to be implemented!
*/
exclude?: string[];
};
/**
* "Cron" definitions to trigger a worker's "scheduled" function.
* Lets you call workers periodically, much like a cron job.
* More details here https://developers.cloudflare.com/workers/platform/cron-triggers
*
* @inherited
* @default `{ crons: [] }`
* @optional
* @todo can we use typescript for cron patterns?
*/
triggers?: { crons: string[] };
/**
* Options to configure the development server that your worker will use.
* NB: This is NOT inherited, and SHOULD NOT be duplicated across all environments.
*
* @default `{}`
* @optional
* @inherited false
*/
dev?: {
/**
* IP address for the local dev server to listen on,
*
* @default `127.0.0.1`
* @todo this needs to be implemented
*/
ip?: string;
/**
* Port for the local dev server to listen on
*
* @default `8787`
*/
port?: number;
/**
* Protocol that local wrangler dev server listens to requests on.
*
* @default `http`
* @todo this needs to be implemented
*/
local_protocol?: string;
/**
* Protocol that wrangler dev forwards requests on
*
* @default `https`
* @todo this needs to be implemented
*/
upstream_protocol?: string;
};
/**
* Specifies the Usage Model for your Worker. There are two options -
* [bundled](https://developers.cloudflare.com/workers/platform/limits#bundled-usage-model) and
* [unbound](https://developers.cloudflare.com/workers/platform/limits#unbound-usage-model).
* For newly created Workers, if the Usage Model is omitted
* it will be set to the [default Usage Model set on the account](https://dash.cloudflare.com/?account=workers/default-usage-model).
* For existing Workers, if the Usage Model is omitted, it will be
* set to the Usage Model configured in the dashboard for that Worker.
*/
usage_model?: undefined | "bundled" | "unbound";
/**
* Configures a custom build step to be run by Wrangler when
* building your Worker. Refer to the [custom builds documentation](https://developers.cloudflare.com/workers/cli-wrangler/configuration#build)
* for more details.
*
* @default `undefined`
* @optional
* @inherited false
*/
build?: {
/** The command used to build your Worker. On Linux and macOS, the command is executed in the `sh` shell and the `cmd` shell for Windows. The `&&` and `||` shell operators may be used. */
command?: string;
/** The directory in which the command is executed. */
cwd?: string;
/** The directory to watch for changes while using wrangler dev, defaults to the current working directory */
watch_dir?: string;
} & /**
* Much of the rest of this configuration isn't necessary anymore
* in wrangler2. We infer the format automatically, and we can pass
* the path to the script either in the CLI (or, @todo, as the top level
* `entry` property).
*/ (
| {
upload?: {
/**
* The format of the Worker script, must be "service-worker".
*
* @deprecated We infer the format automatically now.
*/
format: "service-worker";
/**
* The path to the Worker script. This should be replaced
* by the top level `entry' property.
*
* @deprecated This will be replaced by the top level `entry' property.
*/
main: string;
};
};
}
);
}
| {
/**
* When we use the module format, we only really
* need to specify the entry point. The format is deduced
* automatically in wrangler2.
*/
upload?: {
/**
* The format of the Worker script, must be "modules".
*
* @deprecated We infer the format automatically now.
*/
format: "modules";
type UsageModel = "bundled" | "unbound";
/**
* The directory you wish to upload your modules from,
* defaults to the dist relative to the project root directory.
*
* @deprecated
* @breaking
*/
dir?: string;
type Env = {
name?: string; // inherited
account_id?: string; // inherited
workers_dev?: boolean; // inherited
compatibility_date?: string; // inherited
compatibility_flags?: string[]; // inherited
zone_id?: string; // inherited
routes?: string[]; // inherited
route?: string; // inherited
webpack_config?: string; // inherited
site?: Site;
jsx_factory?: string; // inherited
jsx_fragment?: string; // inherited
// we should use typescript to parse cron patterns
triggers?: { crons: Cron[] }; // inherited
vars?: Vars;
durable_objects?: { bindings: DurableObject[] };
kv_namespaces?: KVNamespace[];
experimental_services?: Service[];
migrations?: DurableObjectMigration[];
usage_model?: UsageModel; // inherited
/**
* The path to the Worker script. This should be replaced
* by the top level `entry' property.
*
* @deprecated This will be replaced by the top level `entry' property.
*/
main?: string;
/**
* An ordered list of rules that define which modules to import,
* and what type to import them as. You will need to specify rules
* to use Text, Data, and CompiledWasm modules, or when you wish to
* have a .js file be treated as an ESModule instead of CommonJS.
*
* @deprecated These are now inferred automatically for major file types, but you can still specify them manually.
* @todo this needs to be implemented!
* @breaking
*/
rules?: {
type: "ESModule" | "CommonJS" | "Text" | "Data" | "CompiledWasm";
globs: string[];
fallthrough?: boolean;
};
};
}
);
/**
* The `env` section defines overrides for the configuration for
* different environments. Most fields can be overridden, while
* some have to be specifically duplicated in every environment.
* For more information, see the documentation at https://developers.cloudflare.com/workers/cli-wrangler/configuration#environments
*/
env?: {
[envName: string]:
| undefined
| Omit<Config, "env" | "migrations" | "site" | "dev">;
};
};
export type Config = {
name?: string; // inherited
account_id?: string; // inherited
// @deprecated Don't use this
type?: Project; // top level
compatibility_date?: string; // inherited
compatibility_flags?: string[]; // inherited
// -- there's some mutually exclusive logic for this next block,
// but I didn't bother for now
workers_dev?: boolean; // inherited
zone_id?: string; // inherited
routes?: string[]; // inherited
route?: string; // inherited
// -- end mutually exclusive stuff
// @deprecated Don't use this
webpack_config?: string; // inherited
jsx_factory?: string; // inherited
jsx_fragment?: string; // inherited
vars?: Vars;
durable_objects?: { bindings: DurableObject[] };
kv_namespaces?: KVNamespace[];
experimental_services?: Service[];
migrations?: DurableObjectMigration[];
site?: Site; // inherited
// we should use typescript to parse cron patterns
triggers?: { crons: Cron[] }; // inherited
dev?: Dev;
usage_model?: UsageModel; // inherited
// top level
build?: Build;
env?: { [envName: string]: void | Env };
};
type ValidationResults = (
| { key: string; info: string }
| { key: string; error: string }
| { key: string; warning: string }
)[];
/**
* We also define a validation function that manually validates
* every field in the configuration as per the type definitions,
* as well as extra constraints we apply to some fields, as well
* as some constraints on combinations of fields. This is useful for
* presenting errors and messages to the user. Eventually, we will
* combine this with some automatic config rewriting tools.
*
*/
export async function validateConfig(
_config: Partial<Config>
): Promise<ValidationResults> {
const results: ValidationResults = [];
return results;
}
import { connect } from "node:http2";
import type { ServerHttp2Stream } from "node:http2";
import { createServer } from "node:http";
import type { Server, IncomingHttpHeaders, RequestListener } from "node:http";
import type {
IncomingHttpHeaders,
RequestListener,
IncomingMessage,
ServerResponse,
Server,
} from "node:http";
import WebSocket from "faye-websocket";
import serveStatic from "serve-static";
import type { CfPreviewToken } from "./api/preview";
import { useEffect, useRef } from "react";
export interface HttpProxyInit {
host: string;
assetPath?: string | null;
onRequest?: (headers: IncomingHttpHeaders) => void;
onResponse?: (headers: IncomingHttpHeaders) => void;
/**
* `usePreviewServer` is a React hook that creates a local development
* server that can be used to develop a Worker.
*
* When we run `wrangler dev`, we start by uploading the compiled worker
* to the preview service, which responds with a preview token.
* (see `useWorker`/`createWorker` for details.)
* We can then use that token to connect to the preview server for a
* great local development experience. Further, as we change the worker,
* we can update the preview token transparently without having to restart
* the development server.
*/
/** Rewrite request headers to add the preview token. */
function addCfPreviewTokenHeader(
headers: IncomingHttpHeaders,
previewTokenValue: string
) {
headers["cf-workers-preview-token"] = previewTokenValue;
}
/**
* Creates a HTTP/1 proxy that sends requests over HTTP/2.
* Rewrite references in request headers
* from the preview host to the local host.
*/
export function createHttpProxy(init: HttpProxyInit): Server {
const { host, assetPath, onRequest = () => {}, onResponse = () => {} } = init;
const remote = connect(`https://${host}`);
const local = createServer();
// HTTP/2 -> HTTP/2
local.on("stream", (stream, headers: IncomingHttpHeaders) => {
onRequest(headers);
headers[":authority"] = host;
const request = stream.pipe(remote.request(headers));
request.on("response", (responseHeaders: IncomingHttpHeaders) => {
onResponse(responseHeaders);
stream.respond(responseHeaders);
request.pipe(stream, { end: true });
});
});
// HTTP/1 -> HTTP/2
const handleRequest: RequestListener = (message, response) => {
const { httpVersionMajor, headers, method, url } = message;
if (httpVersionMajor >= 2) {
return; // Already handled by the "stream" event.
function rewriteRemoteHostToLocalHostInHeaders(
headers: IncomingHttpHeaders,
remoteHost: string,
localPort: number
) {
for (const [name, value] of Object.entries(headers)) {
// Rewrite the remote host to the local host.
if (typeof value === "string" && value.includes(remoteHost)) {
headers[name] = value
.replaceAll(`https://${remoteHost}`, `http://localhost:${localPort}`)
.replaceAll(remoteHost, `localhost:${localPort}`);
}
onRequest(headers);
headers[":method"] = method;
headers[":path"] = url;
headers[":authority"] = host;
headers[":scheme"] = "https";
for (const name of Object.keys(headers)) {
if (HTTP1_HEADERS.has(name.toLowerCase())) {
delete headers[name];
}
}
export function usePreviewServer({
previewToken,
publicRoot,
port,
}: {
previewToken: CfPreviewToken | undefined;
publicRoot: undefined | string;
port: number;
}) {
/** Creates an HTTP/1 proxy that sends requests over HTTP/2. */
const proxyServer = useRef<Server>();
if (!proxyServer.current) {
proxyServer.current = createServer()
.on("request", function (req, res) {
// log all requests
console.log(
new Date().toLocaleTimeString(),
req.method,
req.url,
res.statusCode
);
})
.on("upgrade", (req) => {
// log all websocket connections
console.log(
new Date().toLocaleTimeString(),
req.method,
req.url,
101,
"(WebSocket)"
);
})
.on("error", (err) => {
// log all connection errors
console.error(new Date().toLocaleTimeString(), err);
});
}
/**
* When we're not connected / getting a fresh token on changes,
* we'd like to buffer streams/requests until we're connected.
* Once connected, we can flush the buffered streams/requests.
* streamBufferRef is used to buffer http/2 streams, while
* requestResponseBufferRef is used to buffer http/1 requests.
*/
const streamBufferRef = useRef<
{ stream: ServerHttp2Stream; headers: IncomingHttpHeaders }[]
>([]);
const requestResponseBufferRef = useRef<
{ request: IncomingMessage; response: ServerResponse }[]
>([]);
useEffect(() => {
const proxy = proxyServer.current;
// If we don't have a token, that means either we're just starting up,
// or we're refreshing the token.
if (!previewToken) {
const cleanupListeners: (() => void)[] = [];
const bufferStream = (
stream: ServerHttp2Stream,
headers: IncomingHttpHeaders
) => {
// store the stream in a buffer so we can replay it later
streamBufferRef.current.push({ stream, headers });
};
proxy.on("stream", bufferStream);
cleanupListeners.push(() => proxy.off("stream", bufferStream));
const bufferRequestResponse = (
request: IncomingMessage,
response: ServerResponse
) => {
// store the request and response in a buffer so we can replay it later
requestResponseBufferRef.current.push({ request, response });
};
proxy.on("request", bufferRequestResponse);
cleanupListeners.push(() => proxy.off("request", bufferRequestResponse));
return () => {
cleanupListeners.forEach((cleanup) => cleanup());
};
}
// We have a token. Let's proxy requests to the preview end point.
const cleanupListeners: (() => void)[] = [];
const assetPath = typeof publicRoot === "string" ? publicRoot : null;
// create a ClientHttp2Session
const remote = connect(`https://${previewToken.host}`);
cleanupListeners.push(() => remote.destroy());
/** HTTP/2 -> HTTP/2 */
function handleStream(
stream: ServerHttp2Stream,
headers: IncomingHttpHeaders
) {
addCfPreviewTokenHeader(headers, previewToken.value);
headers[":authority"] = previewToken.host;
const request = stream.pipe(remote.request(headers));
request.on("response", (responseHeaders: IncomingHttpHeaders) => {
rewriteRemoteHostToLocalHostInHeaders(
responseHeaders,
previewToken.host,
port
);
stream.respond(responseHeaders);
request.pipe(stream, { end: true });
});
}
proxy.on("stream", handleStream);
cleanupListeners.push(() => proxy.off("stream", handleStream));
// flush and replay buffered streams
streamBufferRef.current.forEach((buffer) =>
handleStream(buffer.stream, buffer.headers)
);
streamBufferRef.current = [];
/** HTTP/1 -> HTTP/2 */
const handleRequest: RequestListener = (
message: IncomingMessage,
response: ServerResponse
) => {
const { httpVersionMajor, headers, method, url } = message;
if (httpVersionMajor >= 2) {
return; // Already handled by the "stream" event.
}
}
const request = message.pipe(remote.request(headers));
request.on("response", (responseHeaders) => {
const status = responseHeaders[":status"];
onResponse(responseHeaders);
for (const name of Object.keys(responseHeaders)) {
if (name.startsWith(":")) {
delete responseHeaders[name];
addCfPreviewTokenHeader(headers, previewToken.value);
headers[":method"] = method;
headers[":path"] = url;
headers[":authority"] = previewToken.host;
headers[":scheme"] = "https";
for (const name of Object.keys(headers)) {
if (HTTP1_HEADERS.has(name.toLowerCase())) {
delete headers[name];
}
}
response.writeHead(status, responseHeaders);
request.pipe(response, { end: true });
});
};
// If an asset path is defined, check the file system
// for a file first and serve if it exists.
if (assetPath) {
const handleAsset = serveStatic(assetPath, {
cacheControl: false,
});
local.on("request", (request, response) => {
handleAsset(request, response, () => {
handleRequest(request, response);
const request = message.pipe(remote.request(headers));
request.on("response", (responseHeaders) => {
const status = responseHeaders[":status"];
rewriteRemoteHostToLocalHostInHeaders(
responseHeaders,
previewToken.host,
port
);
for (const name of Object.keys(responseHeaders)) {
if (name.startsWith(":")) {
delete responseHeaders[name];
}
}
response.writeHead(status, responseHeaders);
request.pipe(response, { end: true });
});
});
} else {
local.on("request", handleRequest);
}
// HTTP/1 -> WebSocket (over HTTP/1)
local.on("upgrade", (message, socket, body) => {
const { headers, url } = message;
onRequest(headers);
headers["host"] = host;
const localWebsocket = new WebSocket(message, socket, body);
// TODO(soon): Custom WebSocket protocol is not working?
const remoteWebsocketClient = new WebSocket.Client(
`wss://${host}${url}`,
[],
{ headers }
};
// If an asset path is defined, check the file system
// for a file first and serve if it exists.
const actualHandleRequest = assetPath
? createHandleAssetsRequest(assetPath, handleRequest)
: handleRequest;
proxy.on("request", actualHandleRequest);
cleanupListeners.push(() => proxy.off("request", actualHandleRequest));
// flush and replay buffered requests
requestResponseBufferRef.current.forEach(({ request, response }) =>
actualHandleRequest(request, response)
);
localWebsocket.pipe(remoteWebsocketClient).pipe(localWebsocket);
requestResponseBufferRef.current = [];
/** HTTP/1 -> WebSocket (over HTTP/1) */
const handleUpgrade = (
message: IncomingMessage,
socket: WebSocket,
body: Buffer
) => {
const { headers, url } = message;
addCfPreviewTokenHeader(headers, previewToken.value);
headers["host"] = previewToken.host;
const localWebsocket = new WebSocket(message, socket, body);
// TODO(soon): Custom WebSocket protocol is not working?
const remoteWebsocketClient = new WebSocket.Client(
`wss://${previewToken.host}${url}`,
[],
{ headers }
);
localWebsocket.pipe(remoteWebsocketClient).pipe(localWebsocket);
// We close down websockets whenever we refresh the token.
cleanupListeners.push(() => {
localWebsocket.destroy();
remoteWebsocketClient.destroy();
});
};
proxy.on("upgrade", handleUpgrade);
cleanupListeners.push(() => proxy.off("upgrade", handleUpgrade));
return () => {
cleanupListeners.forEach((d) => d());
};
}, [previewToken, publicRoot, port]);
// Start/stop the server whenever the
// containing component is mounted/unmounted.
useEffect(() => {
proxyServer.current.listen(port);
console.log(`⬣ Listening at http://localhost:${port}`);
return () => {
proxyServer.current.close();
};
}, [port]);
}
function createHandleAssetsRequest(
assetPath: string,
handleRequest: RequestListener
) {
const handleAsset = serveStatic(assetPath, {
cacheControl: false,
});
remote.on("close", () => {
local.close();
});
return local;
return (request: IncomingMessage, response: ServerResponse) => {
handleAsset(request, response, () => {
handleRequest(request, response);
});
};
}
/** A Set of headers we want to remove from HTTP/1 requests. */
const HTTP1_HEADERS = new Set([

@@ -96,0 +287,0 @@ "host",

@@ -14,3 +14,3 @@ import assert from "node:assert";

type CfScriptFormat = void | "modules" | "service-worker";
type CfScriptFormat = undefined | "modules" | "service-worker";

@@ -23,2 +23,4 @@ type Props = {

env?: string;
compatibilityDate?: string;
compatibilityFlags?: string[];
public?: string;

@@ -29,4 +31,4 @@ site?: string;

legacyEnv?: boolean;
jsxFactory: void | string;
jsxFragment: void | string;
jsxFactory: undefined | string;
jsxFragment: undefined | string;
};

@@ -54,2 +56,9 @@

const envRootObj = props.env ? config.env[props.env] || {} : config;
assert(
envRootObj.compatibility_date || props["compatibility-date"],
"A compatibility_date is required when publishing. Add one to your wrangler.toml file, or pass it in your terminal as --compatibility_date. See https://developers.cloudflare.com/workers/platform/compatibility-dates for more information."
);
const triggers = props.triggers || config.triggers?.crons;

@@ -132,3 +141,3 @@ const routes = props.routes || config.routes;

? path.join(path.dirname(file), "static-asset-facade.js")
: file)
: Object.keys(result.metafile.inputs)[0])
);

@@ -208,3 +217,2 @@

const envRootObj = props.env ? config.env[props.env] || {} : config;
const bindings: CfWorkerInit["bindings"] = {

@@ -211,0 +219,0 @@ kv_namespaces: envRootObj.kv_namespaces?.concat(

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc