New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

wrangler

Package Overview
Dependencies
Maintainers
4
Versions
4005
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

wrangler - npm Package Compare versions

Comparing version 0.0.0-3cec4f8 to 0.0.0-402c77d

pages/functions/buildWorker.ts

32

package.json
{
"name": "wrangler",
"version": "0.0.0-3cec4f8",
"version": "0.0.0-402c77d",
"author": "wrangler@cloudflare.com",

@@ -39,5 +39,5 @@ "description": "Command-line interface for all things Cloudflare Workers",

"dependencies": {
"@cloudflare/pages-functions-compiler": "0.3.8",
"esbuild": "0.14.1",
"miniflare": "2.0.0-rc.3",
"miniflare": "2.2.0",
"path-to-regexp": "^6.2.0",
"semiver": "^1.1.0"

@@ -49,25 +49,32 @@ },

"devDependencies": {
"@babel/types": "^7.16.0",
"@iarna/toml": "^2.2.5",
"@types/cloudflare": "^2.7.6",
"@types/express": "^4.17.13",
"@types/estree": "^0.0.50",
"@types/mime": "^2.0.3",
"@types/react": "^17.0.37",
"@types/serve-static": "^1.13.10",
"@types/signal-exit": "^3.0.1",
"@types/ws": "^8.2.1",
"@types/yargs": "^17.0.7",
"acorn": "^8.6.0",
"acorn-walk": "^8.2.0",
"chokidar": "^3.5.2",
"clipboardy": "^3.0.0",
"command-exists": "^1.2.9",
"devtools-protocol": "^0.0.955664",
"execa": "^6.0.0",
"faye-websocket": "^0.11.4",
"finalhandler": "^1.1.2",
"find-up": "^6.2.0",
"formdata-node": "^4.3.1",
"http-proxy": "^1.18.1",
"ink": "^3.2.0",
"ink-select-input": "^4.2.1",
"ink-table": "^3.0.0",
"ink-testing-library": "^2.1.0",
"ink-text-input": "^4.0.2",
"mime": "^3.0.0",
"node-fetch": "^3.1.0",
"open": "^8.4.0",
"path-to-regexp": "^6.2.0",
"react": "^17.0.2",
"react-error-boundary": "^3.1.4",
"serve-static": "^1.14.1",

@@ -78,4 +85,3 @@ "signal-exit": "^3.0.6",

"ws": "^8.3.0",
"yargs": "^17.3.0",
"react-error-boundary": "^3.1.4"
"yargs": "^17.3.0"
},

@@ -85,2 +91,3 @@ "files": [

"bin",
"pages",
"miniflare-config-stubs",

@@ -118,4 +125,7 @@ "wrangler-dist",

]
}
},
"setupFilesAfterEnv": [
"<rootDir>/src/__tests__/jest.setup.ts"
]
}
}
}

@@ -1,59 +0,14 @@

import * as fs from "node:fs";
import * as fsp from "node:fs/promises";
import * as path from "node:path";
import * as TOML from "@iarna/toml";
import { main } from "../index";
import { setMock, unsetAllMocks } from "./mock-cfetch";
import { existsSync } from "node:fs";
import { confirm } from "../dialogs";
import { mockConfirm } from "./mock-dialogs";
import { runWrangler } from "./run-wrangler";
import { runInTempDir } from "./run-in-tmp";
import * as fs from "node:fs";
jest.mock("../cfetch", () => jest.requireActual("./mock-cfetch"));
describe("wrangler", () => {
runInTempDir();
jest.mock("../dialogs", () => {
return {
...jest.requireActual<object>("../dialogs"),
confirm: jest.fn().mockName("confirmMock"),
};
});
/**
* Mock the implementation of `confirm()` that will respond with configured results
* for configured confirmation text messages.
*
* If there is a call to `confirm()` that does not match any of the expectations
* then an error is thrown.
*/
function mockConfirm(...expectations: { text: string; result: boolean }[]) {
(confirm as jest.Mock).mockImplementationOnce((text: string) => {
for (const { text: expectedText, result } of expectations) {
if (text === expectedText) {
return result;
}
}
throw new Error(`Unexpected confirmation message: ${text}`);
});
}
async function w(cmd?: string) {
const logSpy = jest.spyOn(console, "log").mockImplementation();
const errorSpy = jest.spyOn(console, "error").mockImplementation();
const warnSpy = jest.spyOn(console, "warn").mockImplementation();
try {
await main(cmd?.split(" ") ?? []);
return {
stdout: logSpy.mock.calls.flat(2).join("\n"),
stderr: errorSpy.mock.calls.flat(2).join("\n"),
warnings: warnSpy.mock.calls.flat(2).join("\n"),
};
} finally {
logSpy.mockRestore();
errorSpy.mockRestore();
warnSpy.mockRestore();
}
}
describe("wrangler", () => {
describe("no command", () => {
it("should display a list of available commands", async () => {
const { stdout, stderr } = await w();
const { stdout, stderr } = await runWrangler();

@@ -89,5 +44,6 @@ expect(stdout).toMatchInlineSnapshot(`

it("should display an error", async () => {
const { stdout, stderr } = await w("invalid-command");
const { error, stdout, stderr } = await runWrangler("invalid-command");
expect(stdout).toMatchInlineSnapshot(`
expect(stdout).toMatchInlineSnapshot(`""`);
expect(stderr).toMatchInlineSnapshot(`
"wrangler

@@ -112,9 +68,9 @@

Options:
-l, --local Run on my machine [boolean] [default: false]"
`);
-l, --local Run on my machine [boolean] [default: false]
expect(stderr).toMatchInlineSnapshot(`
"
Unknown command: invalid-command."
`);
expect(error).toMatchInlineSnapshot(
`[Error: Unknown command: invalid-command.]`
);
});

@@ -124,23 +80,16 @@ });

describe("init", () => {
const ogcwd = process.cwd();
beforeEach(() => {
process.chdir(path.join(__dirname, "fixtures", "init"));
});
afterEach(async () => {
if (existsSync("./wrangler.toml")) {
await fsp.rm("./wrangler.toml");
}
process.chdir(ogcwd);
});
it("should create a wrangler.toml", async () => {
await w("init");
mockConfirm({
text: "No package.json found. Would you like to create one?",
result: false,
});
await runWrangler("init");
const parsed = TOML.parse(await fsp.readFile("./wrangler.toml", "utf-8"));
expect(typeof parsed.compatibility_date).toBe("string");
expect(fs.existsSync("./package.json")).toBe(false);
expect(fs.existsSync("./tsconfig.json")).toBe(false);
});
it("should display warning when wrangler.toml already exists, and exit if user does not want to carry on", async () => {
fs.closeSync(fs.openSync("./wrangler.toml", "w"));
fs.writeFileSync("./wrangler.toml", "", "utf-8");
mockConfirm({

@@ -150,4 +99,4 @@ text: "Do you want to continue initializing this project?",

});
const { stderr } = await w("init");
expect(stderr).toContain("wrangler.toml file already exists!");
const { warnings } = await runWrangler("init");
expect(warnings).toContain("wrangler.toml file already exists!");
const parsed = TOML.parse(await fsp.readFile("./wrangler.toml", "utf-8"));

@@ -158,9 +107,15 @@ expect(typeof parsed.compatibility_date).toBe("undefined");

it("should display warning when wrangler.toml already exists, but continue if user does want to carry on", async () => {
fs.closeSync(fs.openSync("./wrangler.toml", "w"));
mockConfirm({
text: "Do you want to continue initializing this project?",
result: true,
});
const { stderr } = await w("init");
expect(stderr).toContain("wrangler.toml file already exists!");
fs.writeFileSync("./wrangler.toml", "", "utf-8");
mockConfirm(
{
text: "Do you want to continue initializing this project?",
result: true,
},
{
text: "No package.json found. Would you like to create one?",
result: false,
}
);
const { warnings } = await runWrangler("init");
expect(warnings).toContain("wrangler.toml file already exists!");
const parsed = TOML.parse(await fsp.readFile("./wrangler.toml", "utf-8"));

@@ -170,86 +125,169 @@ expect(typeof parsed.compatibility_date).toBe("string");

it("should error if `--type` is used", async () => {
const noValue = await w("init --type");
expect(noValue.stderr).toMatchInlineSnapshot(
`"The --type option is no longer supported."`
it("should create a package.json if none is found and user confirms", async () => {
mockConfirm(
{
text: "No package.json found. Would you like to create one?",
result: true,
},
{
text: "Would you like to use typescript?",
result: false,
}
);
await runWrangler("init");
expect(fs.existsSync("./package.json")).toBe(true);
const packageJson = JSON.parse(
fs.readFileSync("./package.json", "utf-8")
);
expect(packageJson.name).toEqual("worker"); // TODO: should we infer the name from the directory?
expect(packageJson.version).toEqual("0.0.1");
expect(fs.existsSync("./tsconfig.json")).toBe(false);
});
it("should error if `--type javascript` is used", async () => {
const javascriptValue = await w("init --type javascript");
expect(javascriptValue.stderr).toMatchInlineSnapshot(
`"The --type option is no longer supported."`
it("should not touch an existing package.json in the same directory", async () => {
mockConfirm({
text: "Would you like to use typescript?",
result: false,
});
fs.writeFileSync(
"./package.json",
JSON.stringify({ name: "test", version: "1.0.0" }),
"utf-8"
);
});
it("should error if `--type rust` is used", async () => {
const rustValue = await w("init --type rust");
expect(rustValue.stderr).toMatchInlineSnapshot(
`"The --type option is no longer supported."`
await runWrangler("init");
const packageJson = JSON.parse(
fs.readFileSync("./package.json", "utf-8")
);
expect(packageJson.name).toEqual("test");
expect(packageJson.version).toEqual("1.0.0");
});
it("should error if `--type webpack` is used", async () => {
const webpackValue = await w("init --type webpack");
expect(webpackValue.stderr).toMatchInlineSnapshot(`
"The --type option is no longer supported.
If you wish to use webpack then you will need to create a custom build."
`);
it("should not touch an existing package.json in an ancestor directory", async () => {
mockConfirm({
text: "Would you like to use typescript?",
result: false,
});
fs.writeFileSync(
"./package.json",
JSON.stringify({ name: "test", version: "1.0.0" }),
"utf-8"
);
fs.mkdirSync("./sub-1/sub-2", { recursive: true });
process.chdir("./sub-1/sub-2");
await runWrangler("init");
expect(fs.existsSync("./package.json")).toBe(false);
expect(fs.existsSync("../../package.json")).toBe(true);
const packageJson = JSON.parse(
fs.readFileSync("../../package.json", "utf-8")
);
expect(packageJson.name).toEqual("test");
expect(packageJson.version).toEqual("1.0.0");
});
});
describe("kv:namespace", () => {
afterAll(() => {
unsetAllMocks();
it("should create a tsconfig.json and install `workers-types` if none is found and user confirms", async () => {
mockConfirm(
{
text: "No package.json found. Would you like to create one?",
result: true,
},
{
text: "Would you like to use typescript?",
result: true,
}
);
await runWrangler("init");
expect(fs.existsSync("./tsconfig.json")).toBe(true);
const tsconfigJson = JSON.parse(
fs.readFileSync("./tsconfig.json", "utf-8")
);
expect(tsconfigJson.compilerOptions.types).toEqual([
"@cloudflare/workers-types",
]);
const packageJson = JSON.parse(
fs.readFileSync("./package.json", "utf-8")
);
expect(packageJson.devDependencies).toEqual({
"@cloudflare/workers-types": expect.any(String),
});
});
let KVNamespaces: { title: string; id: string }[] = [];
it("can create a namespace", async () => {
setMock("/accounts/:accountId/storage/kv/namespaces", (uri, init) => {
expect(init.method === "POST");
const body = JSON.parse(init.body);
expect(body.title).toBe("worker-UnitTestNamespace");
KVNamespaces.push({ title: body.title, id: "some-namespace-id" });
return { id: "some-namespace-id" };
});
await w("kv:namespace create UnitTestNamespace");
expect(
KVNamespaces.find((ns) => ns.title === `worker-UnitTestNamespace`)
).toBeTruthy();
it("should not touch an existing tsconfig.json in the same directory", async () => {
fs.writeFileSync(
"./package.json",
JSON.stringify({ name: "test", version: "1.0.0" }),
"utf-8"
);
fs.writeFileSync(
"./tsconfig.json",
JSON.stringify({ compilerOptions: {} }),
"utf-8"
);
await runWrangler("init");
const tsconfigJson = JSON.parse(
fs.readFileSync("./tsconfig.json", "utf-8")
);
expect(tsconfigJson.compilerOptions).toEqual({});
});
let createdNamespace: { id: string; title: string };
it("can list namespaces", async () => {
setMock(
"/accounts/:accountId/storage/kv/namespaces\\?:qs",
(uri, init) => {
expect(init).toBe(undefined);
return KVNamespaces;
}
it("should not touch an existing package.json in an ancestor directory", async () => {
fs.writeFileSync(
"./package.json",
JSON.stringify({ name: "test", version: "1.0.0" }),
"utf-8"
);
const { stdout } = await w("kv:namespace list");
const namespaces = JSON.parse(stdout);
createdNamespace = namespaces.find(
(ns) => ns.title === "worker-UnitTestNamespace"
fs.writeFileSync(
"./tsconfig.json",
JSON.stringify({ compilerOptions: {} }),
"utf-8"
);
expect(createdNamespace.title).toBe("worker-UnitTestNamespace");
fs.mkdirSync("./sub-1/sub-2", { recursive: true });
process.chdir("./sub-1/sub-2");
await runWrangler("init");
expect(fs.existsSync("./tsconfig.json")).toBe(false);
expect(fs.existsSync("../../tsconfig.json")).toBe(true);
const tsconfigJson = JSON.parse(
fs.readFileSync("../../tsconfig.json", "utf-8")
);
expect(tsconfigJson.compilerOptions).toEqual({});
});
it("can delete a namespace", async () => {
const namespaceIdToDelete = createdNamespace.id;
setMock(
"/accounts/:accountId/storage/kv/namespaces/:namespaceId",
(uri, init) => {
expect(init.method).toBe("DELETE");
KVNamespaces = KVNamespaces.filter(
(ns) => ns.id !== namespaceIdToDelete
);
}
it("should error if `--type` is used", async () => {
const { error } = await runWrangler("init --type");
expect(error).toMatchInlineSnapshot(
`[Error: The --type option is no longer supported.]`
);
await w(`kv:namespace delete --namespace-id ${namespaceIdToDelete}`);
expect(KVNamespaces.find((ns) => ns.id === namespaceIdToDelete)).toBe(
undefined
});
it("should error if `--type javascript` is used", async () => {
const { error } = await runWrangler("init --type javascript");
expect(error).toMatchInlineSnapshot(
`[Error: The --type option is no longer supported.]`
);
});
it("should error if `--type rust` is used", async () => {
const { error } = await runWrangler("init --type rust");
expect(error).toMatchInlineSnapshot(
`[Error: The --type option is no longer supported.]`
);
});
it("should error if `--type webpack` is used", async () => {
const { error } = await runWrangler("init --type webpack");
expect(error).toMatchInlineSnapshot(`
[Error: The --type option is no longer supported.
If you wish to use webpack then you will need to create a custom build.]
`);
});
});
});
import type {
CfWorkerInit,
CfModuleType,
CfVariable,
CfModule,
CfDurableObjectMigrations,
} from "./worker.js";
import { FormData, Blob } from "formdata-node";
// Credit: https://stackoverflow.com/a/9458996
function toBase64(source: BufferSource): string {
let result = "";
const buffer = source instanceof ArrayBuffer ? source : source.buffer;
const bytes = new Uint8Array(buffer);
for (let i = 0; i < bytes.byteLength; i++) {
result += String.fromCharCode(bytes[i]);
}
return btoa(result);
}
function toBinding(
name: string,
variable: CfVariable
): Record<string, unknown> {
if (typeof variable === "string") {
return { name, type: "plain_text", text: variable };
}
if ("namespaceId" in variable) {
return {
name,
type: "kv_namespace",
namespace_id: variable.namespaceId,
};
}
if ("class_name" in variable) {
return {
name,
type: "durable_object_namespace",
class_name: variable.class_name,
...(variable.script_name && {
script_name: variable.script_name,
}),
};
}
const { format, algorithm, usages, data } = variable;
if (format) {
let key_base64;
let key_jwk;
if (data instanceof ArrayBuffer || ArrayBuffer.isView(data)) {
key_base64 = toBase64(data);
} else {
key_jwk = data;
}
return {
name,
type: "secret_key",
format,
algorithm,
usages,
key_base64,
key_jwk,
};
}
throw new TypeError("Unsupported variable: " + variable);
}
export function toMimeType(type: CfModuleType): string {

@@ -87,3 +26,3 @@ switch (type) {

function toModule(module: CfModule, entryType?: CfModuleType): Blob {
function toModule(module: CfModule, entryType: CfModuleType): Blob {
const { type: moduleType, content } = module;

@@ -95,2 +34,25 @@ const type = toMimeType(moduleType ?? entryType);

interface WorkerMetadata {
compatibility_date?: string;
compatibility_flags?: string[];
usage_model?: "bundled" | "unbound";
migrations?: CfDurableObjectMigrations;
bindings: (
| { type: "kv_namespace"; name: string; namespace_id: string }
| { type: "plain_text"; name: string; text: string }
| {
type: "durable_object_namespace";
name: string;
class_name: string;
script_name?: string;
}
| {
type: "service";
name: string;
service: string;
environment: string;
}
)[];
}
/**

@@ -104,3 +66,3 @@ * Creates a `FormData` upload from a `CfWorkerInit`.

modules,
variables,
bindings,
migrations,

@@ -111,14 +73,42 @@ usage_model,

} = worker;
const { name, type: mainType } = main;
const bindings = [];
for (const [name, variable] of Object.entries(variables ?? {})) {
const binding = toBinding(name, variable);
bindings.push(binding);
}
const metadataBindings: WorkerMetadata["bindings"] = [];
// TODO: this object should be typed
const metadata = {
...(mainType !== "commonjs" ? { main_module: name } : { body_part: name }),
bindings,
bindings.kv_namespaces?.forEach(({ id, binding }) => {
metadataBindings.push({
name: binding,
type: "kv_namespace",
namespace_id: id,
});
});
bindings.durable_objects?.bindings.forEach(
({ name, class_name, script_name }) => {
metadataBindings.push({
name,
type: "durable_object_namespace",
class_name: class_name,
...(script_name && { script_name }),
});
}
);
Object.entries(bindings.vars || {})?.forEach(([key, value]) => {
metadataBindings.push({ name: key, type: "plain_text", text: value });
});
bindings.services?.forEach(({ name, service, environment }) => {
metadataBindings.push({
name,
type: "service",
service,
environment,
});
});
const metadata: WorkerMetadata = {
...(main.type !== "commonjs"
? { main_module: main.name }
: { body_part: main.name }),
bindings: metadataBindings,
...(compatibility_date && { compatibility_date }),

@@ -132,3 +122,3 @@ ...(compatibility_flags && { compatibility_flags }),

if (mainType === "commonjs" && modules && modules.length > 0) {
if (main.type === "commonjs" && modules && modules.length > 0) {
throw new TypeError(

@@ -141,3 +131,3 @@ "More than one module can only be specified when type = 'esm'"

const { name } = module;
const blob = toModule(module, mainType ?? "esm");
const blob = toModule(module, main.type ?? "esm");
formData.set(name, blob, name);

@@ -144,0 +134,0 @@ }

@@ -1,5 +0,5 @@

import cfetch from "../cfetch";
import fetch from "node-fetch";
import { fetchResult } from "../cfetch";
import { toFormData } from "./form_data";
import type { CfAccount, CfWorkerInit } from "./worker";
import fetch from "node-fetch";

@@ -63,3 +63,3 @@ /**

const { exchange_url } = await cfetch<{ exchange_url: string }>(initUrl);
const { exchange_url } = await fetchResult<{ exchange_url: string }>(initUrl);
const { inspector_websocket, token } = (await (

@@ -110,3 +110,3 @@ await fetch(exchange_url)

const { preview_token } = await cfetch<{ preview_token: string }>(url, {
const { preview_token } = await fetchResult<{ preview_token: string }>(url, {
method: "POST",

@@ -113,0 +113,0 @@ // @ts-expect-error TODO: fix this

@@ -66,12 +66,21 @@ import type { CfPreviewToken } from "./preview";

/**
* A map of variable names to values.
*/
interface CfVars {
[key: string]: string;
}
/**
* A KV namespace.
*/
export interface CfKvNamespace {
/**
* The namespace ID.
*/
namespaceId: string;
interface CfKvNamespace {
binding: string;
id: string;
}
export interface CfDurableObject {
/**
* A Durable Object.
*/
interface CfDurableObject {
name: string;
class_name: string;

@@ -81,3 +90,12 @@ script_name?: string;

interface CfDOMigrations {
/**
* A Service.
*/
interface CfService {
name: string;
service: string;
environment: string;
}
export interface CfDurableObjectMigrations {
old_tag?: string;

@@ -93,31 +111,2 @@ new_tag: string;

/**
* A `WebCrypto` key.
*
* @link https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/importKey
*/
export interface CfCryptoKey {
/**
* The format.
*/
format: string;
/**
* The algorithm.
*/
algorithm: string;
/**
* The usages.
*/
usages: string[];
/**
* The data.
*/
data: BufferSource | JsonWebKey;
}
/**
* A variable (aka. environment variable).
*/
export type CfVariable = string | CfKvNamespace | CfCryptoKey | CfDurableObject;
/**
* Options for creating a `CfWorker`.

@@ -129,3 +118,3 @@ */

*/
name: string | void;
name: string | undefined;
/**

@@ -138,11 +127,16 @@ * The entrypoint module.

*/
modules: void | CfModule[];
modules: undefined | CfModule[];
/**
* The map of names to variables. (aka. environment variables)
* All the bindings
*/
variables?: { [name: string]: CfVariable };
migrations: void | CfDOMigrations;
compatibility_date: string | void;
compatibility_flags: void | string[];
usage_model: void | "bundled" | "unbound";
bindings: {
kv_namespaces?: CfKvNamespace[];
durable_objects?: { bindings: CfDurableObject[] };
vars?: CfVars;
services?: CfService[];
};
migrations: undefined | CfDurableObjectMigrations;
compatibility_date: string | undefined;
compatibility_flags: undefined | string[];
usage_model: undefined | "bundled" | "unbound";
}

@@ -149,0 +143,0 @@

@@ -0,10 +1,9 @@

import { hideBin } from "yargs/helpers";
import { main } from ".";
main(process.argv.slice(2)).catch((cause) => {
const { name, message } = cause;
if (name === "CloudflareError") {
console.error("\x1b[31m", message);
return;
}
throw cause;
main(hideBin(process.argv)).catch(() => {
// The logging of any error that was thrown from `main()` is handled in the `yargs.fail()` handler.
// Here we just want to ensure that the process exits with a non-zero code.
// We don't want to do this inside the `main()` function, since that would kill the process when running our tests.
process.exit(1);
});

@@ -1,122 +0,487 @@

// we're going to manually write both the type definition AND
// the validator for the config, so that we can give better error messages
/**
* This is the static type definition for the configuration object.
* It reflects the configuration that you can write in wrangler.toml,
* and optionally augment with arguments passed directly to wrangler.
* The type definition doesn't fully reflect the constraints applied
* to the configuration, but it is a good starting point. Later, we
* also defined a validator function that will validate the configuration
* with the same rules as the type definition, as well as the extra
* constraints. The type definition is good for asserting correctness
* in the wrangler codebase, whereas the validator function is useful
* for signalling errors in the configuration to a user of wrangler.
*
* For more information about the configuration object, see the
* documentation at https://developers.cloudflare.com/workers/cli-wrangler/configuration
*
* Legend for the annotations:
*
* *:optional means providing a value isn't mandatory
* *:deprecated means the field itself isn't necessary anymore in wrangler.toml
* *:breaking means the deprecation/optionality is a breaking change from wrangler 1
* *:todo means there's more work to be done (with details attached)
* *:inherited means the field is copied to all environments
*/
export type Config = {
/**
* The name of your worker. Alphanumeric + dashes only.
*
* @optional
* @inherited
*/
name?: string;
type DOMigration = {
tag: string;
new_classes?: string[];
renamed_classes?: string[];
deleted_classes?: string[];
};
/**
* The entrypoint/path to the JavaScript file that will be executed.
*
* @optional
* @inherited
* @todo this needs to be implemented!
*/
entry?: string;
type Project = "webpack" | "javascript" | "rust";
/**
* This is the ID of the account associated with your zone.
* You might have more than one account, so make sure to use
* the ID of the account associated with the zone/route you
* provide, if you provide one. It can also be specified through
* the CF_ACCOUNT_ID environment variable.
*
* @optional
* @inherited
*/
account_id?: string;
type Site = {
// inherited
bucket: string;
"entry-point": string;
include?: string[];
exclude?: string[];
};
/**
* The project "type". A holdover from wrangler 1.x.
* Valid values were "webpack", "javascript", and "rust".
*
* @deprecated DO NOT USE THIS. Most common features now work out of the box with wrangler, including modules, jsx, typescript, etc. If you need anything more, use a custom build.
* @optional
* @inherited
* @breaking
*/
type?: "webpack" | "javascript" | "rust";
type Dev = {
ip?: string;
port?: number;
local_protocol?: string;
upstream_protocol?: string;
};
/**
* A date in the form yyyy-mm-dd, which will be used to determine
* which version of the Workers runtime is used. More details at
* https://developers.cloudflare.com/workers/platform/compatibility-dates
* @optional true for `dev`, false for `publish`
* @inherited
*/
compatibility_date?: string;
type Vars = { [key: string]: string };
/**
* A list of flags that enable features from upcoming features of
* the Workers runtime, usually used together with compatibility_flags.
* More details at
* https://developers.cloudflare.com/workers/platform/compatibility-dates
*
* @optional
* @inherited
* @todo This could be an enum!
*/
compatibility_flags?: string[];
type Cron = string; // TODO: we should be able to parse a cron pattern with ts
/**
* Whether we use <name>.<subdomain>.workers.dev to
* test and deploy your worker.
*
* @default `true` (This is a breaking change from wrangler 1)
* @optional
* @inherited
* @breaking
*/
workers_dev?: boolean;
type KVNamespace = {
binding?: string;
preview_id: string;
id: string;
};
/**
* The zone ID of the zone you want to deploy to. You can find this
* in your domain page on the dashboard.
*
* @deprecated This is unnecessary since we can deduce this from routes directly.
* @optional
* @inherited
*/
zone_id?: string;
type DurableObject = {
name: string;
class_name: string;
script_name?: string;
};
/**
* A list of routes that your worker should be deployed to.
* Only one of `routes` or `route` is required.
*
* @optional false only when workers_dev is false, and there's no scheduled worker
* @inherited
*/
routes?: string[];
type Build = {
command?: string;
cwd?: string;
watch_dir?: string;
} & (
| {
upload?: {
format: "service-worker";
main: string;
};
}
| {
upload?: {
format: "modules";
dir?: string;
main?: string;
rules?: {
type: "ESModule" | "CommonJS" | "Text" | "Data" | "CompiledWasm";
globs: string[]; // can we use typescript for these patterns?
fallthrough?: boolean;
/**
* A route that your worker should be deployed to. Literally
* the same as routes, but only one.
* Only one of `routes` or `route` is required.
*
* @optional false only when workers_dev is false, and there's no scheduled worker
* @inherited
*/
route?: string;
/**
* Path to the webpack config to use when building your worker.
* A holdover from wrangler 1.x, used with `type: "webpack"`.
*
* @deprecated DO NOT USE THIS. Most common features now work out of the box with wrangler, including modules, jsx, typescript, etc. If you need anything more, use a custom build.
* @inherited
* @breaking
*/
webpack_config?: string;
/**
* The function to use to replace jsx syntax.
*
* @default `"React.createElement"`
* @optional
* @inherited
*/
jsx_factory?: string;
/**
* The function to use to replace jsx fragment syntax.
*
* @default `"React.Fragment"`
* @optional
* @inherited
*/
jsx_fragment?: string;
/**
* A map of environment variables to set when deploying your worker.
* Of note, they can only be strings. Which is unfortunate, really.
* (TODO: verify that they can only be strings?)
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `{}`
* @optional
* @inherited false
*/
vars?: { [key: string]: string };
/**
* A list of durable objects that your worker should be bound to.
* For more information about Durable Objects, see the documentation at
* https://developers.cloudflare.com/workers/learning/using-durable-objects
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `{ bindings: [] }`
* @optional
* @inherited false
*/
durable_objects?: {
bindings: {
/** The name of the binding used to refer to the Durable Object */
name: string;
/** The exported class name of the Durable Object */
class_name: string;
/** The script where the Durable Object is defined (if it's external to this worker) */
script_name?: string;
}[];
};
/**
* These specify any Workers KV Namespaces you want to
* access from inside your Worker. To learn more about KV Namespaces,
* see the documentation at https://developers.cloudflare.com/workers/learning/how-kv-works
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `[]`
* @optional
* @inherited false
*/
kv_namespaces?: {
/** The binding name used to refer to the KV Namespace */
binding: string;
/** The ID of the KV namespace */
id: string;
/** The ID of the KV namespace used during `wrangler dev` */
preview_id?: string;
}[];
/**
* A list of services that your worker should be bound to.
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `[]`
* @optional
* @inherited false
*/
experimental_services?: {
/** The binding name used to refer to the Service */
name: string;
/** The name of the Service being bound */
service: string;
/** The Service's environment */
environment: string;
}[];
/**
* A list of migrations that should be uploaded with your Worker.
* These define changes in your Durable Object declarations.
* More details at https://developers.cloudflare.com/workers/learning/using-durable-objects#configuring-durable-object-classes-with-migrations
* NB: these ARE inherited, and SHOULD NOT be duplicated across all environments.
*
* @default `[]`
* @optional
* @inherited true
*/
migrations?: {
/** A unique identifier for this migration. */
tag: string;
/** The new Durable Objects being defined. */
new_classes?: string[];
/** The Durable Objects being renamed. */
renamed_classes?: {
from: string;
to: string;
}[];
/** The Durable Objects being removed. */
deleted_classes?: string[];
}[];
/**
* The definition of a Worker Site, a feature that lets you upload
* static assets with your Worker.
* More details at https://developers.cloudflare.com/workers/platform/sites
* NB: This IS inherited, and SHOULD NOT be duplicated across all environments.
*
* @default `undefined`
* @optional
* @inherited true
*/
site?: {
/**
* The directory containing your static assets. It must be
* a path relative to your wrangler.toml file.
* Example: bucket = "./public"
*
* optional false
*/
bucket: string;
/**
* The location of your Worker script.
*
* @deprecated DO NOT use this (it's a holdover from wrangler 1.x). Either use the top level `entry` field, or pass the path to your entry file as a command line argument.
* @todo we should use a top level "entry" property instead
* @breaking
*/
"entry-point": string;
/**
* An exclusive list of .gitignore-style patterns that match file
* or directory names from your bucket location. Only matched
* items will be uploaded. Example: include = ["upload_dir"]
*
* @optional
* @default `[]`
* @todo this needs to be implemented!
*/
include?: string[];
/**
* A list of .gitignore-style patterns that match files or
* directories in your bucket that should be excluded from
* uploads. Example: exclude = ["ignore_dir"]
*
* @optional
* @default `[]`
* @todo this needs to be implemented!
*/
exclude?: string[];
};
/**
* "Cron" definitions to trigger a worker's "scheduled" function.
* Lets you call workers periodically, much like a cron job.
* More details here https://developers.cloudflare.com/workers/platform/cron-triggers
*
* @inherited
* @default `{ crons: [] }`
* @optional
* @todo can we use typescript for cron patterns?
*/
triggers?: { crons: string[] };
/**
* Options to configure the development server that your worker will use.
* NB: This is NOT inherited, and SHOULD NOT be duplicated across all environments.
*
* @default `{}`
* @optional
* @inherited false
*/
dev?: {
/**
* IP address for the local dev server to listen on,
*
* @default `127.0.0.1`
* @todo this needs to be implemented
*/
ip?: string;
/**
* Port for the local dev server to listen on
*
* @default `8787`
*/
port?: number;
/**
* Protocol that local wrangler dev server listens to requests on.
*
* @default `http`
* @todo this needs to be implemented
*/
local_protocol?: string;
/**
* Protocol that wrangler dev forwards requests on
*
* @default `https`
* @todo this needs to be implemented
*/
upstream_protocol?: string;
};
/**
* Specifies the Usage Model for your Worker. There are two options -
* [bundled](https://developers.cloudflare.com/workers/platform/limits#bundled-usage-model) and
* [unbound](https://developers.cloudflare.com/workers/platform/limits#unbound-usage-model).
* For newly created Workers, if the Usage Model is omitted
* it will be set to the [default Usage Model set on the account](https://dash.cloudflare.com/?account=workers/default-usage-model).
* For existing Workers, if the Usage Model is omitted, it will be
* set to the Usage Model configured in the dashboard for that Worker.
*/
usage_model?: undefined | "bundled" | "unbound";
/**
* Configures a custom build step to be run by Wrangler when
* building your Worker. Refer to the [custom builds documentation](https://developers.cloudflare.com/workers/cli-wrangler/configuration#build)
* for more details.
*
* @default `undefined`
* @optional
* @inherited false
*/
build?: {
/** The command used to build your Worker. On Linux and macOS, the command is executed in the `sh` shell and the `cmd` shell for Windows. The `&&` and `||` shell operators may be used. */
command?: string;
/** The directory in which the command is executed. */
cwd?: string;
/** The directory to watch for changes while using wrangler dev, defaults to the current working directory */
watch_dir?: string;
} & /**
* Much of the rest of this configuration isn't necessary anymore
* in wrangler2. We infer the format automatically, and we can pass
* the path to the script either in the CLI (or, @todo, as the top level
* `entry` property).
*/ (
| {
upload?: {
/**
* The format of the Worker script, must be "service-worker".
*
* @deprecated We infer the format automatically now.
*/
format: "service-worker";
/**
* The path to the Worker script. This should be replaced
* by the top level `entry' property.
*
* @deprecated This will be replaced by the top level `entry' property.
*/
main: string;
};
};
}
);
}
| {
/**
* When we use the module format, we only really
* need to specify the entry point. The format is deduced
* automatically in wrangler2.
*/
upload?: {
/**
* The format of the Worker script, must be "modules".
*
* @deprecated We infer the format automatically now.
*/
format: "modules";
type UsageModel = "bundled" | "unbound";
/**
* The directory you wish to upload your modules from,
* defaults to the dist relative to the project root directory.
*
* @deprecated
* @breaking
*/
dir?: string;
type Env = {
name?: string; // inherited
account_id?: string; // inherited
workers_dev?: boolean; // inherited
compatibility_date?: string; // inherited
compatibility_flags?: string[]; // inherited
zone_id?: string; // inherited
routes?: string[]; // inherited
route?: string; // inherited
webpack_config?: string; // inherited
site?: Site;
jsx_factory?: string; // inherited
jsx_fragment?: string; // inherited
// we should use typescript to parse cron patterns
triggers?: { crons: Cron[] }; // inherited
vars?: Vars;
durable_objects?: { bindings: DurableObject[] };
kv_namespaces?: KVNamespace[];
usage_model?: UsageModel; // inherited
/**
* The path to the Worker script. This should be replaced
* by the top level `entry' property.
*
* @deprecated This will be replaced by the top level `entry' property.
*/
main?: string;
/**
* An ordered list of rules that define which modules to import,
* and what type to import them as. You will need to specify rules
* to use Text, Data, and CompiledWasm modules, or when you wish to
* have a .js file be treated as an ESModule instead of CommonJS.
*
* @deprecated These are now inferred automatically for major file types, but you can still specify them manually.
* @todo this needs to be implemented!
* @breaking
*/
rules?: {
type: "ESModule" | "CommonJS" | "Text" | "Data" | "CompiledWasm";
globs: string[];
fallthrough?: boolean;
};
};
}
);
/**
* The `env` section defines overrides for the configuration for
* different environments. Most fields can be overridden, while
* some have to be specifically duplicated in every environment.
* For more information, see the documentation at https://developers.cloudflare.com/workers/cli-wrangler/configuration#environments
*/
env?: {
[envName: string]:
| undefined
| Omit<Config, "env" | "migrations" | "site" | "dev">;
};
};
export type Config = {
name?: string; // inherited
account_id?: string; // inherited
// @deprecated Don't use this
type?: Project; // top level
compatibility_date?: string; // inherited
compatibility_flags?: string[]; // inherited
// -- there's some mutually exclusive logic for this next block,
// but I didn't bother for now
workers_dev?: boolean; // inherited
zone_id?: string; // inherited
routes?: string[]; // inherited
route?: string; // inherited
// -- end mutually exclusive stuff
// @deprecated Don't use this
webpack_config?: string; // inherited
jsx_factory?: string; // inherited
jsx_fragment?: string; // inherited
vars?: Vars;
migrations?: DOMigration[];
durable_objects?: { bindings: DurableObject[] };
kv_namespaces?: KVNamespace[];
site?: Site; // inherited
// we should use typescript to parse cron patterns
triggers?: { crons: Cron[] }; // inherited
dev?: Dev;
usage_model?: UsageModel; // inherited
// top level
build?: Build;
env?: { [envName: string]: void | Env };
};
type ValidationResults = (
| { key: string; info: string }
| { key: string; error: string }
| { key: string; warning: string }
)[];
/**
* We also define a validation function that manually validates
* every field in the configuration as per the type definitions,
* as well as extra constraints we apply to some fields, as well
* as some constraints on combinations of fields. This is useful for
* presenting errors and messages to the user. Eventually, we will
* combine this with some automatic config rewriting tools.
*
*/
export async function validateConfig(
_config: Partial<Config>
): Promise<ValidationResults> {
const results: ValidationResults = [];
return results;
}

@@ -0,14 +1,15 @@

import assert from "node:assert";
import path from "node:path";
import { readFile } from "node:fs/promises";
import esbuild from "esbuild";
import { execa } from "execa";
import tmp from "tmp-promise";
import type { CfWorkerInit } from "./api/worker";
import { toFormData } from "./api/form_data";
import esbuild from "esbuild";
import tmp from "tmp-promise";
import { fetchResult } from "./cfetch";
import type { Config } from "./config";
import path from "path";
import { readFile } from "fs/promises";
import cfetch from "./cfetch";
import assert from "node:assert";
import makeModuleCollector from "./module-collection";
import { syncAssets } from "./sites";
import makeModuleCollector from "./module-collection";
type CfScriptFormat = void | "modules" | "service-worker";
type CfScriptFormat = undefined | "modules" | "service-worker";

@@ -21,2 +22,4 @@ type Props = {

env?: string;
compatibilityDate?: string;
compatibilityFlags?: string[];
public?: string;

@@ -27,4 +30,4 @@ site?: string;

legacyEnv?: boolean;
jsxFactory: void | string;
jsxFragment: void | string;
jsxFactory: undefined | string;
jsxFragment: undefined | string;
};

@@ -52,2 +55,14 @@

const envRootObj =
props.env && config.env ? config.env[props.env] || {} : config;
assert(
envRootObj.compatibility_date || props.compatibilityDate,
"A compatibility_date is required when publishing. Add one to your wrangler.toml file, or pass it in your terminal as --compatibility_date. See https://developers.cloudflare.com/workers/platform/compatibility-dates for more information."
);
if (accountId === undefined) {
throw new Error("No account_id provided.");
}
const triggers = props.triggers || config.triggers?.crons;

@@ -82,2 +97,13 @@ const routes = props.routes || config.routes;

if (props.config.build?.command) {
// TODO: add a deprecation message here?
console.log("running:", props.config.build.command);
const buildCommandPieces = props.config.build.command.split(" ");
await execa(buildCommandPieces[0], buildCommandPieces.slice(1), {
stdout: "inherit",
stderr: "inherit",
...(props.config.build?.cwd && { cwd: props.config.build.cwd }),
});
}
const moduleCollector = makeModuleCollector();

@@ -115,14 +141,20 @@ const result = await esbuild.build({

const chunks = Object.entries(result.metafile.outputs).find(
([_path, { entryPoint }]) =>
entryPoint ===
(props.public
? path.join(path.dirname(file), "static-asset-facade.js")
: file)
// result.metafile is defined because of the `metafile: true` option above.
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const metafile = result.metafile!;
const expectedEntryPoint = props.public
? path.join(path.dirname(file), "static-asset-facade.js")
: file;
const outputEntry = Object.entries(metafile.outputs).find(
([, { entryPoint }]) => entryPoint === expectedEntryPoint
);
if (outputEntry === undefined) {
throw new Error(
`Cannot find entry-point "${expectedEntryPoint}" in generated bundle.`
);
}
const { format } = props;
const bundle = {
type: chunks[1].exports.length > 0 ? "esm" : "commonjs",
exports: chunks[1].exports,
type: outputEntry[1].exports.length > 0 ? "esm" : "commonjs",
exports: outputEntry[1].exports,
};

@@ -144,3 +176,3 @@

const content = await readFile(chunks[0], { encoding: "utf-8" });
const content = await readFile(outputEntry[0], { encoding: "utf-8" });
await destination.cleanup();

@@ -151,7 +183,7 @@

let migrations;
if ("migrations" in config) {
const scripts = await cfetch<{ id: string; migration_tag: string }[]>(
if (config.migrations !== undefined) {
const scripts = await fetchResult<{ id: string; migration_tag: string }[]>(
`/accounts/${accountId}/workers/scripts`
);
const script = scripts.find((script) => script.id === scriptName);
const script = scripts.find(({ id }) => id === scriptName);
if (script?.migration_tag) {

@@ -164,3 +196,3 @@ // was already published once

console.warn(
`The published script ${scriptName} has a migration tag "${script.migration_tag}, which was not found in wrangler.toml. You may have already delated it. Applying all available migrations to the script...`
`The published script ${scriptName} has a migration tag "${script.migration_tag}, which was not found in wrangler.toml. You may have already deleted it. Applying all available migrations to the script...`
);

@@ -189,13 +221,17 @@ migrations = {

const assets =
props.public || props.site || props.config.site?.bucket // TODO: allow both
? await syncAssets(
accountId,
scriptName,
props.public || props.site || props.config.site?.bucket,
false
)
: { manifest: undefined, namespace: undefined };
const assetPath = props.public || props.site || props.config.site?.bucket; // TODO: allow both
const assets = assetPath
? await syncAssets(accountId, scriptName, assetPath, false)
: { manifest: undefined, namespace: undefined };
const envRootObj = props.env ? config.env[props.env] || {} : config;
const bindings: CfWorkerInit["bindings"] = {
kv_namespaces: envRootObj.kv_namespaces?.concat(
assets.namespace
? { binding: "__STATIC_CONTENT", id: assets.namespace }
: []
),
vars: envRootObj.vars,
durable_objects: envRootObj.durable_objects,
services: envRootObj.experimental_services,
};

@@ -205,35 +241,17 @@ const worker: CfWorkerInit = {

main: {
name: path.basename(chunks[0]),
name: path.basename(outputEntry[0]),
content: content,
type: bundle.type === "esm" ? "esm" : "commonjs",
},
variables: {
...(envRootObj?.vars || {}),
...(envRootObj?.kv_namespaces || []).reduce(
(obj, { binding, preview_id: _preview_id, id }) => {
return { ...obj, [binding]: { namespaceId: id } };
},
{}
),
...(envRootObj?.durable_objects?.bindings || []).reduce(
(obj, { name, class_name, script_name }) => {
return {
...obj,
[name]: { class_name, ...(script_name && { script_name }) },
};
},
{}
),
...(assets.namespace
? { __STATIC_CONTENT: { namespaceId: assets.namespace } }
: {}),
},
bindings,
...(migrations && { migrations }),
modules: assets.manifest
? moduleCollector.modules.concat({
name: "__STATIC_CONTENT_MANIFEST",
content: JSON.stringify(assets.manifest),
type: "text",
})
: moduleCollector.modules,
modules: moduleCollector.modules.concat(
assets.manifest
? {
name: "__STATIC_CONTENT_MANIFEST",
content: JSON.stringify(assets.manifest),
type: "text",
}
: []
),
compatibility_date: config.compatibility_date,

@@ -255,4 +273,4 @@ compatibility_flags: config.compatibility_flags,

// Upload the script so it has time to propogate.
const { available_on_subdomain } = await cfetch(
// Upload the script so it has time to propagate.
const { available_on_subdomain } = await fetchResult(
`${workerUrl}?available_on_subdomain=true`,

@@ -271,3 +289,3 @@ {

const userSubdomain = (
await cfetch<{ subdomain: string }>(
await fetchResult<{ subdomain: string }>(
`/accounts/${accountId}/workers/subdomain`

@@ -286,3 +304,3 @@ )

deployments.push(
cfetch(`${workerUrl}/subdomain`, {
fetchResult(`${workerUrl}/subdomain`, {
method: "POST",

@@ -311,3 +329,3 @@ body: JSON.stringify({ enabled: true }),

deployments.push(
cfetch(`${workerUrl}/routes`, {
fetchResult(`${workerUrl}/routes`, {
// TODO: PATCH will not delete previous routes on this script,

@@ -337,3 +355,3 @@ // whereas PUT will. We need to decide on the default behaviour

deployments.push(
cfetch(`${workerUrl}/schedules`, {
fetchResult(`${workerUrl}/schedules`, {
// TODO: Unlike routes, this endpoint does not support PATCH.

@@ -340,0 +358,0 @@ // So technically, this will override any previous schedules.

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc