Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

wrangler

Package Overview
Dependencies
Maintainers
4
Versions
3668
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

wrangler - npm Package Compare versions

Comparing version 0.0.0-b84d907 to 0.0.0-b8e3b01

pages/functions/buildWorker.ts

66

package.json
{
"name": "wrangler",
"version": "0.0.0-b84d907",
"version": "0.0.0-b8e3b01",
"author": "wrangler@cloudflare.com",

@@ -39,7 +39,7 @@ "description": "Command-line interface for all things Cloudflare Workers",

"dependencies": {
"@cloudflare/pages-functions-compiler": "0.3.4",
"esbuild": "0.13.12",
"miniflare": "2.0.0-rc.2",
"esbuild": "0.14.14",
"miniflare": "2.2.0",
"path-to-regexp": "^6.2.0",
"semiver": "^1.1.0",
"serve": "^13.0.2"
"xxhash-addon": "^1.4.0"
},

@@ -50,31 +50,38 @@ "optionalDependencies": {

"devDependencies": {
"@babel/types": "^7.16.0",
"@iarna/toml": "^2.2.5",
"@types/cloudflare": "^2.7.6",
"@types/express": "^4.17.13",
"@types/react": "^17.0.34",
"@types/estree": "^0.0.50",
"@types/mime": "^2.0.3",
"@types/react": "^17.0.37",
"@types/serve-static": "^1.13.10",
"@types/signal-exit": "^3.0.1",
"@types/ws": "^8.2.0",
"@types/yargs": "^17.0.5",
"@types/ws": "^8.2.1",
"@types/yargs": "^17.0.7",
"acorn": "^8.6.0",
"acorn-walk": "^8.2.0",
"chokidar": "^3.5.2",
"clipboardy": "^3.0.0",
"command-exists": "^1.2.9",
"execa": "^5.1.1",
"express": "^4.17.1",
"devtools-protocol": "^0.0.955664",
"execa": "^6.0.0",
"faye-websocket": "^0.11.4",
"finalhandler": "^1.1.2",
"find-up": "^6.2.0",
"formdata-node": "^4.3.1",
"http-proxy": "^1.18.1",
"http-proxy-middleware": "^2.0.1",
"ignore": "^5.2.0",
"ink": "^3.2.0",
"ink-select-input": "^4.2.0",
"ink-select-input": "^4.2.1",
"ink-table": "^3.0.0",
"ink-text-input": "^4.0.1",
"node-fetch": "^3.1.0",
"ink-testing-library": "^2.1.0",
"ink-text-input": "^4.0.2",
"jest-fetch-mock": "^3.0.3",
"mime": "^3.0.0",
"open": "^8.4.0",
"path-to-regexp": "^6.2.0",
"react": "^17.0.2",
"react-error-boundary": "^3.1.4",
"serve-static": "^1.14.1",
"signal-exit": "^3.0.5",
"signal-exit": "^3.0.6",
"tmp-promise": "^3.0.3",
"ws": "^8.2.3",
"yargs": "^17.2.1"
"undici": "4.13.0",
"ws": "^8.3.0",
"yargs": "^17.3.0"
},

@@ -84,2 +91,3 @@ "files": [

"bin",
"pages",
"miniflare-config-stubs",

@@ -96,3 +104,4 @@ "wrangler-dist",

"start": "npm run bundle && NODE_OPTIONS=--enable-source-maps ./bin/wrangler.js",
"test": "CF_API_TOKEN=some-api-token CF_ACCOUNT_ID=some-account-id jest --silent=false --verbose=true"
"test": "CF_API_TOKEN=some-api-token CF_ACCOUNT_ID=some-account-id jest --silent=false --verbose=true",
"test-watch": "npm run test -- --runInBand --testTimeout=50000 --watch"
},

@@ -103,5 +112,7 @@ "engines": {

"jest": {
"restoreMocks": true,
"testTimeout": 30000,
"testRegex": ".*.(test|spec)\\.[jt]sx?$",
"transformIgnorePatterns": [
"node_modules/(?!node-fetch|fetch-blob|find-up|locate-path|p-locate|p-limit|yocto-queue|path-exists|data-uri-to-buffer|formdata-polyfill)"
"node_modules/(?!find-up|locate-path|p-locate|p-limit|yocto-queue|path-exists|execa|strip-final-newline|npm-run-path|path-key|onetime|mimic-fn|human-signals|is-stream)"
],

@@ -118,4 +129,7 @@ "moduleNameMapper": {

]
}
},
"setupFilesAfterEnv": [
"<rootDir>/src/__tests__/jest.setup.ts"
]
}
}
}

@@ -1,153 +0,424 @@

import * as fs from "node:fs";
import * as fsp from "node:fs/promises";
import * as path from "node:path";
import * as TOML from "@iarna/toml";
import { main } from "../index";
// @ts-expect-error we're mocking cfetch, so of course setMock isn't a thing
import { setMock, unsetAllMocks } from "../cfetch";
import { mockConfirm } from "./mock-dialogs";
import { runWrangler } from "./run-wrangler";
import { runInTempDir } from "./run-in-tmp";
import { mockConsoleMethods } from "./mock-console";
import * as fs from "node:fs";
jest.mock("../cfetch", () => {
return jest.requireActual("./mock-cfetch");
});
describe("wrangler", () => {
runInTempDir();
async function w(cmd: void | string, options?: { tap: boolean }) {
const tapped = options?.tap ? tap() : undefined;
await main([...(cmd ? cmd.split(" ") : [])]);
tapped?.off();
return { stdout: tapped?.out, stderr: tapped?.err };
}
const std = mockConsoleMethods();
function tap() {
const oldLog = console.log;
const oldError = console.error;
describe("no command", () => {
it("should display a list of available commands", async () => {
await runWrangler();
const toReturn = {
off: () => {
console.log = oldLog;
console.error = oldError;
},
out: "",
err: "",
};
expect(std.out).toMatchInlineSnapshot(`
"wrangler
console.log = (...args) => {
toReturn.out += args.join("");
oldLog.apply(console, args);
// console.trace(...args); // use this if you want to find the true source of your console.log
};
console.error = (...args) => {
toReturn.err += args.join("");
oldError.apply(console, args);
};
Commands:
wrangler init [name] 📥 Create a wrangler.toml configuration file
wrangler whoami 🕵️ Retrieve your user info and test your auth config
wrangler dev <filename> 👂 Start a local server for developing your worker
wrangler publish [script] 🆙 Publish your Worker to Cloudflare.
wrangler tail [name] 🦚 Starts a log tailing session for a deployed Worker.
wrangler secret 🤫 Generate a secret that can be referenced in the worker script
wrangler kv:namespace 🗂️ Interact with your Workers KV Namespaces
wrangler kv:key 🔑 Individually manage Workers KV key-value pairs
wrangler kv:bulk 💪 Interact with multiple Workers KV key-value pairs at once
wrangler pages ⚡️ Configure Cloudflare Pages
return toReturn;
}
Flags:
-c, --config Path to .toml configuration file [string]
-h, --help Show help [boolean]
-v, --version Show version number [boolean]
describe("wrangler", () => {
it("should run", async () => {
const { stdout } = await w(undefined, { tap: true });
Options:
-l, --local Run on my machine [boolean] [default: false]"
`);
expect(stdout).toMatchInlineSnapshot(`
"wrangler
expect(std.err).toMatchInlineSnapshot(`""`);
});
});
Commands:
wrangler init [name] 📥 Create a wrangler.toml configuration file
wrangler dev <filename> 👂 Start a local server for developing your worker
wrangler publish [script] 🆙 Publish your Worker to Cloudflare.
wrangler tail [name] 🦚 Starts a log tailing session for a deployed Worker.
wrangler secret 🤫 Generate a secret that can be referenced in the worker script
wrangler kv:namespace 🗂️ Interact with your Workers KV Namespaces
wrangler kv:key 🔑 Individually manage Workers KV key-value pairs
wrangler kv:bulk 💪 Interact with multiple Workers KV key-value pairs at once
wrangler pages ⚡️ Configure Cloudflare Pages
describe("invalid command", () => {
it("should display an error", async () => {
let err: Error | undefined;
try {
await runWrangler("invalid-command");
} catch (e) {
err = e;
} finally {
expect(err?.message).toBe(`Unknown command: invalid-command.`);
}
Flags:
--config Path to .toml configuration file [string]
--help Show help [boolean]
--version Show version number [boolean]
expect(std.out).toMatchInlineSnapshot(`""`);
expect(std.err).toMatchInlineSnapshot(`
"wrangler
Options:
--local Run on my machine [boolean] [default: false]"
`);
});
Commands:
wrangler init [name] 📥 Create a wrangler.toml configuration file
wrangler whoami 🕵️ Retrieve your user info and test your auth config
wrangler dev <filename> 👂 Start a local server for developing your worker
wrangler publish [script] 🆙 Publish your Worker to Cloudflare.
wrangler tail [name] 🦚 Starts a log tailing session for a deployed Worker.
wrangler secret 🤫 Generate a secret that can be referenced in the worker script
wrangler kv:namespace 🗂️ Interact with your Workers KV Namespaces
wrangler kv:key 🔑 Individually manage Workers KV key-value pairs
wrangler kv:bulk 💪 Interact with multiple Workers KV key-value pairs at once
wrangler pages ⚡️ Configure Cloudflare Pages
describe("init", () => {
const ogcwd = process.cwd();
Flags:
-c, --config Path to .toml configuration file [string]
-h, --help Show help [boolean]
-v, --version Show version number [boolean]
beforeEach(() => {
process.chdir(path.join(__dirname, "fixtures", "init"));
});
Options:
-l, --local Run on my machine [boolean] [default: false]
afterEach(async () => {
await fsp.rm("./wrangler.toml");
process.chdir(ogcwd);
Unknown command: invalid-command."
`);
});
});
describe("init", () => {
it("should create a wrangler.toml", async () => {
await w("init");
mockConfirm({
text: "No package.json found. Would you like to create one?",
result: false,
});
await runWrangler("init");
const parsed = TOML.parse(await fsp.readFile("./wrangler.toml", "utf-8"));
expect(typeof parsed.compatibility_date).toBe("string");
expect(fs.existsSync("./package.json")).toBe(false);
expect(fs.existsSync("./tsconfig.json")).toBe(false);
});
it("should error when wrangler.toml already exists", async () => {
fs.closeSync(fs.openSync("./wrangler.toml", "w"));
const { stderr } = await w("init", { tap: true });
expect(stderr.endsWith("wrangler.toml already exists.")).toBe(true);
it("should display warning when wrangler.toml already exists, and exit if user does not want to carry on", async () => {
fs.writeFileSync(
"./wrangler.toml",
'compatibility_date="something-else"', // use a fake value to make sure the file is not overwritten
"utf-8"
);
mockConfirm({
text: "Do you want to continue initializing this project?",
result: false,
});
await runWrangler("init");
expect(std.warn).toContain("wrangler.toml file already exists!");
const parsed = TOML.parse(await fsp.readFile("./wrangler.toml", "utf-8"));
expect(parsed.compatibility_date).toBe("something-else");
});
});
describe("kv:namespace", () => {
afterAll(() => {
unsetAllMocks();
it("should display warning when wrangler.toml already exists, but continue if user does want to carry on", async () => {
fs.writeFileSync(
"./wrangler.toml",
`compatibility_date="something-else"`,
"utf-8"
);
mockConfirm(
{
text: "Do you want to continue initializing this project?",
result: true,
},
{
text: "No package.json found. Would you like to create one?",
result: false,
}
);
await runWrangler("init");
expect(std.warn).toContain("wrangler.toml file already exists!");
const parsed = TOML.parse(await fsp.readFile("./wrangler.toml", "utf-8"));
expect(parsed.compatibility_date).toBe("something-else");
});
let KVNamespaces: { title: string; id: string }[] = [];
it("can create a namespace", async () => {
setMock("/accounts/:accountId/storage/kv/namespaces", (uri, init) => {
expect(init.method === "POST");
const body = JSON.parse(init.body);
expect(body.title).toBe("worker-UnitTestNamespace");
KVNamespaces.push({ title: body.title, id: "some-namespace-id" });
return { id: "some-namespace-id" };
it("should create a package.json if none is found and user confirms", async () => {
mockConfirm(
{
text: "No package.json found. Would you like to create one?",
result: true,
},
{
text: "Would you like to use typescript?",
result: false,
}
);
await runWrangler("init");
expect(fs.existsSync("./package.json")).toBe(true);
const packageJson = JSON.parse(
fs.readFileSync("./package.json", "utf-8")
);
expect(packageJson.name).toEqual("worker"); // TODO: should we infer the name from the directory?
expect(packageJson.version).toEqual("0.0.1");
expect(packageJson.devDependencies).toEqual({
wrangler: expect.any(String),
});
expect(fs.existsSync("./tsconfig.json")).toBe(false);
});
await w('kv:namespace create "UnitTestNamespace"');
expect(
KVNamespaces.find((ns) => ns.title === `worker-UnitTestNamespace`)
).toBeTruthy();
it("should not touch an existing package.json in the same directory", async () => {
mockConfirm(
{
text: "Would you like to install wrangler into your package.json?",
result: false,
},
{
text: "Would you like to use typescript?",
result: false,
}
);
fs.writeFileSync(
"./package.json",
JSON.stringify({ name: "test", version: "1.0.0" }),
"utf-8"
);
await runWrangler("init");
const packageJson = JSON.parse(
fs.readFileSync("./package.json", "utf-8")
);
expect(packageJson.name).toEqual("test");
expect(packageJson.version).toEqual("1.0.0");
});
let createdNamespace: { id: string; title: string };
it("can list namespaces", async () => {
setMock(
"/accounts/:accountId/storage/kv/namespaces\\?:qs",
(uri, init) => {
expect(init).toBe(undefined);
return KVNamespaces;
it("should offer to install wrangler into an existing package.json", async () => {
mockConfirm(
{
text: "Would you like to install wrangler into your package.json?",
result: true,
},
{
text: "Would you like to use typescript?",
result: false,
}
);
const { stdout } = await w("kv:namespace list", { tap: true });
const namespaces = JSON.parse(stdout);
createdNamespace = namespaces.find(
(ns) => ns.title === "worker-UnitTestNamespace"
fs.writeFileSync(
"./package.json",
JSON.stringify({ name: "test", version: "1.0.0" }),
"utf-8"
);
expect(createdNamespace.title).toBe("worker-UnitTestNamespace");
await runWrangler("init");
const packageJson = JSON.parse(
fs.readFileSync("./package.json", "utf-8")
);
expect(packageJson.name).toEqual("test");
expect(packageJson.version).toEqual("1.0.0");
expect(packageJson.devDependencies).toEqual({
wrangler: expect.any(String),
});
});
it("can delete a namespace", async () => {
const namespaceIdToDelete = createdNamespace.id;
setMock(
"/accounts/:accountId/storage/kv/namespaces/:namespaceId",
(uri, init) => {
expect(init.method).toBe("DELETE");
KVNamespaces = KVNamespaces.filter(
(ns) => ns.id !== namespaceIdToDelete
);
it("should not touch an existing package.json in an ancestor directory", async () => {
mockConfirm(
{
text: "Would you like to install wrangler into your package.json?",
result: false,
},
{
text: "Would you like to use typescript?",
result: false,
}
);
await w(`kv:namespace delete --namespace-id ${namespaceIdToDelete}`);
expect(KVNamespaces.find((ns) => ns.id === namespaceIdToDelete)).toBe(
undefined
fs.writeFileSync(
"./package.json",
JSON.stringify({ name: "test", version: "1.0.0" }),
"utf-8"
);
fs.mkdirSync("./sub-1/sub-2", { recursive: true });
process.chdir("./sub-1/sub-2");
await runWrangler("init");
expect(fs.existsSync("./package.json")).toBe(false);
expect(fs.existsSync("../../package.json")).toBe(true);
const packageJson = JSON.parse(
fs.readFileSync("../../package.json", "utf-8")
);
expect(packageJson).toMatchInlineSnapshot(`
Object {
"name": "test",
"version": "1.0.0",
}
`);
});
it("should create a tsconfig.json and install `workers-types` if none is found and user confirms", async () => {
mockConfirm(
{
text: "No package.json found. Would you like to create one?",
result: true,
},
{
text: "Would you like to use typescript?",
result: true,
}
);
await runWrangler("init");
expect(fs.existsSync("./tsconfig.json")).toBe(true);
const tsconfigJson = JSON.parse(
fs.readFileSync("./tsconfig.json", "utf-8")
);
expect(tsconfigJson.compilerOptions.types).toEqual([
"@cloudflare/workers-types",
]);
const packageJson = JSON.parse(
fs.readFileSync("./package.json", "utf-8")
);
expect(packageJson.devDependencies).toEqual({
"@cloudflare/workers-types": expect.any(String),
wrangler: expect.any(String),
});
});
it("should not touch an existing tsconfig.json in the same directory", async () => {
fs.writeFileSync(
"./package.json",
JSON.stringify({
name: "test",
version: "1.0.0",
devDependencies: {
wrangler: "0.0.0",
"@cloudflare/workers-types": "0.0.0",
},
}),
"utf-8"
);
fs.writeFileSync(
"./tsconfig.json",
JSON.stringify({ compilerOptions: {} }),
"utf-8"
);
await runWrangler("init");
const tsconfigJson = JSON.parse(
fs.readFileSync("./tsconfig.json", "utf-8")
);
expect(tsconfigJson.compilerOptions).toEqual({});
});
it("should offer to install type definitions in an existing typescript project", async () => {
mockConfirm(
{
text: "Would you like to install wrangler into your package.json?",
result: false,
},
{
text: "Would you like to install the type definitions for Workers into your package.json?",
result: true,
}
);
fs.writeFileSync(
"./package.json",
JSON.stringify({
name: "test",
version: "1.0.0",
}),
"utf-8"
);
fs.writeFileSync(
"./tsconfig.json",
JSON.stringify({ compilerOptions: {} }),
"utf-8"
);
await runWrangler("init");
const tsconfigJson = JSON.parse(
fs.readFileSync("./tsconfig.json", "utf-8")
);
// unchanged tsconfig
expect(tsconfigJson.compilerOptions).toEqual({});
const packageJson = JSON.parse(
fs.readFileSync("./package.json", "utf-8")
);
expect(packageJson.devDependencies).toEqual({
"@cloudflare/workers-types": expect.any(String),
});
});
it("should not touch an existing tsconfig.json in an ancestor directory", async () => {
fs.writeFileSync(
"./package.json",
JSON.stringify({
name: "test",
version: "1.0.0",
devDependencies: {
wrangler: "0.0.0",
"@cloudflare/workers-types": "0.0.0",
},
}),
"utf-8"
);
fs.writeFileSync(
"./tsconfig.json",
JSON.stringify({ compilerOptions: {} }),
"utf-8"
);
fs.mkdirSync("./sub-1/sub-2", { recursive: true });
process.chdir("./sub-1/sub-2");
await runWrangler("init");
expect(fs.existsSync("./tsconfig.json")).toBe(false);
expect(fs.existsSync("../../tsconfig.json")).toBe(true);
const tsconfigJson = JSON.parse(
fs.readFileSync("../../tsconfig.json", "utf-8")
);
expect(tsconfigJson.compilerOptions).toEqual({});
});
it("should error if `--type` is used", async () => {
let err: undefined | Error;
try {
await runWrangler("init --type");
} catch (e) {
err = e;
} finally {
expect(err?.message).toBe(`The --type option is no longer supported.`);
}
});
it("should error if `--type javascript` is used", async () => {
let err: undefined | Error;
try {
await runWrangler("init --type javascript");
} catch (e) {
err = e;
} finally {
expect(err?.message).toBe(`The --type option is no longer supported.`);
}
});
it("should error if `--type rust` is used", async () => {
let err: undefined | Error;
try {
await runWrangler("init --type rust");
} catch (e) {
err = e;
} finally {
expect(err?.message).toBe(`The --type option is no longer supported.`);
}
});
it("should error if `--type webpack` is used", async () => {
let err: undefined | Error;
try {
await runWrangler("init --type webpack");
} catch (e) {
err = e;
} finally {
expect(err?.message).toBe(
`The --type option is no longer supported.
If you wish to use webpack then you will need to create a custom build.`
);
}
});
});
});
import type {
CfWorkerInit,
CfModuleType,
CfVariable,
CfModule,
CfKvNamespace,
CfCryptoKey,
CfDurableObjectMigrations,
} from "./worker.js";
import { FormData, Blob } from "formdata-node";
import { FormData, File } from "undici";
// Credit: https://stackoverflow.com/a/9458996
function toBase64(source: BufferSource): string {
let result = "";
const buffer = source instanceof ArrayBuffer ? source : source.buffer;
const bytes = new Uint8Array(buffer);
for (let i = 0; i < bytes.byteLength; i++) {
result += String.fromCharCode(bytes[i]);
}
return btoa(result);
}
function toBinding(
name: string,
variable: CfVariable
): Record<string, unknown> {
if (typeof variable === "string") {
return { name, type: "plain_text", text: variable };
}
const { namespaceId } = variable as CfKvNamespace;
if (namespaceId) {
return { name, type: "kv_namespace", namespace_id: namespaceId };
}
const { format, algorithm, usages, data } = variable as CfCryptoKey;
if (format) {
let key_base64;
let key_jwk;
if (data instanceof ArrayBuffer || ArrayBuffer.isView(data)) {
key_base64 = toBase64(data);
} else {
key_jwk = data;
}
return {
name,
type: "secret_key",
format,
algorithm,
usages,
key_base64,
key_jwk,
};
}
throw new TypeError("Unsupported variable: " + variable);
}
export function toMimeType(type: CfModuleType): string {

@@ -75,7 +26,33 @@ switch (type) {

function toModule(module: CfModule, entryType?: CfModuleType): Blob {
function toModule(module: CfModule, entryType: CfModuleType): File {
const { type: moduleType, content } = module;
const type = toMimeType(moduleType ?? entryType);
return new File([content], module.name, { type });
}
return new Blob([content], { type });
export interface WorkerMetadata {
/** The name of the entry point module. Only exists when the worker is in the ES module format */
main_module?: string;
/** The name of the entry point module. Only exists when the worker is in the Service Worker format */
body_part?: string;
compatibility_date?: string;
compatibility_flags?: string[];
usage_model?: "bundled" | "unbound";
migrations?: CfDurableObjectMigrations;
bindings: (
| { type: "kv_namespace"; name: string; namespace_id: string }
| { type: "plain_text"; name: string; text: string }
| {
type: "durable_object_namespace";
name: string;
class_name: string;
script_name?: string;
}
| {
type: "service";
name: string;
service: string;
environment: string;
}
)[];
}

@@ -88,28 +65,60 @@

const formData = new FormData();
const { main, modules, variables, usage_model, compatibility_date } = worker;
const { name, type: mainType } = main;
const {
main,
modules,
bindings,
migrations,
usage_model,
compatibility_date,
compatibility_flags,
} = worker;
const bindings = [];
for (const [name, variable] of Object.entries(variables ?? {})) {
const binding = toBinding(name, variable);
bindings.push(binding);
}
const metadataBindings: WorkerMetadata["bindings"] = [];
const singleton = mainType === "commonjs";
const metadata = {
main_module: singleton ? undefined : name,
body_part: singleton ? name : undefined,
bindings,
bindings.kv_namespaces?.forEach(({ id, binding }) => {
metadataBindings.push({
name: binding,
type: "kv_namespace",
namespace_id: id,
});
});
bindings.durable_objects?.bindings.forEach(
({ name, class_name, script_name }) => {
metadataBindings.push({
name,
type: "durable_object_namespace",
class_name: class_name,
...(script_name && { script_name }),
});
}
);
Object.entries(bindings.vars || {})?.forEach(([key, value]) => {
metadataBindings.push({ name: key, type: "plain_text", text: value });
});
bindings.services?.forEach(({ name, service, environment }) => {
metadataBindings.push({
name,
type: "service",
service,
environment,
});
});
const metadata: WorkerMetadata = {
...(main.type !== "commonjs"
? { main_module: main.name }
: { body_part: main.name }),
bindings: metadataBindings,
...(compatibility_date && { compatibility_date }),
...(compatibility_flags && { compatibility_flags }),
...(usage_model && { usage_model }),
...(migrations && { migrations }),
};
if (compatibility_date) {
// @ts-expect-error - we should type metadata
metadata.compatibility_date = compatibility_date;
}
if (usage_model) {
// @ts-expect-error - we should type metadata
metadata.usage_model = usage_model;
}
formData.set("metadata", JSON.stringify(metadata));
if (singleton && modules && modules.length > 0) {
if (main.type === "commonjs" && modules && modules.length > 0) {
throw new TypeError(

@@ -122,4 +131,4 @@ "More than one module can only be specified when type = 'esm'"

const { name } = module;
const blob = toModule(module, mainType ?? "esm");
formData.set(name, blob, name);
const file = toModule(module, main.type ?? "esm");
formData.set(name, file);
}

@@ -126,0 +135,0 @@

@@ -1,3 +0,4 @@

import cfetch from "../cfetch";
import { fetchJson } from "../util/fetch";
import { URL } from "node:url";
import { fetch } from "undici";
import { fetchResult } from "../cfetch";
import { toFormData } from "./form_data";

@@ -12,3 +13,3 @@ import type { CfAccount, CfWorkerInit } from "./worker";

*/
export type CfPreviewMode = { workers_dev: boolean } | { routes: string[] };
type CfPreviewMode = { workers_dev: boolean } | { routes: string[] };

@@ -64,7 +65,7 @@ /**

const { exchange_url: tokenUrl } = await cfetch<{ exchange_url: string }>(
initUrl
);
const { inspector_websocket: url, token } = await fetchJson()(tokenUrl);
const { host } = new URL(url);
const { exchange_url } = await fetchResult<{ exchange_url: string }>(initUrl);
const { inspector_websocket, token } = (await (
await fetch(exchange_url)
).json()) as { inspector_websocket: string; token: string };
const { host } = new URL(inspector_websocket);
const query = `cf_workers_preview_token=${token}`;

@@ -75,3 +76,3 @@

host,
inspectorUrl: new URL(`${url}?${query}`),
inspectorUrl: new URL(`${inspector_websocket}?${query}`),
prewarmUrl: new URL(

@@ -102,23 +103,27 @@ `https://${host}/cdn-cgi/workers/preview/prewarm?${query}`

const { accountId, zoneId } = account;
const scriptId = zoneId ? randomId() : host.split(".")[0];
const scriptId = zoneId ? randomId() : worker.name || host.split(".")[0];
const url = `/accounts/${accountId}/workers/scripts/${scriptId}/edge-preview`;
const mode = zoneId ? { routes: ["*/*"] } : { workers_dev: true };
const mode: CfPreviewMode = zoneId
? { routes: ["*/*"] }
: { workers_dev: true };
const formData = toFormData(worker);
formData.set("wrangler-session-config", JSON.stringify(mode));
const { preview_token: token } = await cfetch<{ preview_token: string }>(
url,
{
method: "POST",
// @ts-expect-error TODO: fix this
body: formData,
headers: {
"cf-preview-upload-config-token": value,
},
}
);
const { preview_token } = await fetchResult<{ preview_token: string }>(url, {
method: "POST",
body: formData,
headers: {
"cf-preview-upload-config-token": value,
},
});
return {
value: token,
host,
value: preview_token,
// TODO: verify this works with zoned workers
host:
worker.name && !zoneId
? `${worker.name}.${host.split(".").slice(1).join(".")}`
: host,
inspectorUrl,

@@ -125,0 +130,0 @@ prewarmUrl,

import type { CfPreviewToken } from "./preview";
import { previewToken } from "./preview";
import fetch from "node-fetch";
import { fetch } from "undici";

@@ -56,3 +56,3 @@ /**

*/
content: string | BufferSource;
content: string | Buffer;
/**

@@ -67,40 +67,44 @@ * The module type.

/**
* A map of variable names to values.
*/
interface CfVars {
[key: string]: string;
}
/**
* A KV namespace.
*/
export interface CfKvNamespace {
/**
* The namespace ID.
*/
namespaceId: string;
interface CfKvNamespace {
binding: string;
id: string;
}
/**
* A `WebCrypto` key.
*
* @link https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/importKey
* A Durable Object.
*/
export interface CfCryptoKey {
/**
* The format.
*/
format: string;
/**
* The algorithm.
*/
algorithm: string;
/**
* The usages.
*/
usages: string[];
/**
* The data.
*/
data: BufferSource | JsonWebKey;
interface CfDurableObject {
name: string;
class_name: string;
script_name?: string;
}
/**
* A variable (aka. environment variable).
* A Service.
*/
export type CfVariable = string | CfKvNamespace | CfCryptoKey;
interface CfService {
name: string;
service: string;
environment: string;
}
export interface CfDurableObjectMigrations {
old_tag?: string;
new_tag: string;
steps: {
new_classes?: string[];
renamed_classes?: string[];
deleted_classes?: string[];
}[];
}
/**

@@ -111,2 +115,6 @@ * Options for creating a `CfWorker`.

/**
* The name of the worker.
*/
name: string | undefined;
/**
* The entrypoint module.

@@ -118,9 +126,16 @@ */

*/
modules: void | CfModule[];
modules: undefined | CfModule[];
/**
* The map of names to variables. (aka. environment variables)
* All the bindings
*/
variables?: { [name: string]: CfVariable };
compatibility_date?: string;
usage_model?: void | "bundled" | "unbound";
bindings: {
kv_namespaces?: CfKvNamespace[];
durable_objects?: { bindings: CfDurableObject[] };
vars?: CfVars;
services?: CfService[];
};
migrations: undefined | CfDurableObjectMigrations;
compatibility_date: string | undefined;
compatibility_flags: undefined | string[];
usage_model: undefined | "bundled" | "unbound";
}

@@ -127,0 +142,0 @@

@@ -0,10 +1,9 @@

import { hideBin } from "yargs/helpers";
import { main } from ".";
main(process.argv.slice(2)).catch((cause) => {
const { name, message } = cause;
if (name === "CloudflareError") {
console.error("\x1b[31m", message);
return;
}
throw cause;
main(hideBin(process.argv)).catch(() => {
// The logging of any error that was thrown from `main()` is handled in the `yargs.fail()` handler.
// Here we just want to ensure that the process exits with a non-zero code.
// We don't want to do this inside the `main()` function, since that would kill the process when running our tests.
process.exit(1);
});

@@ -1,96 +0,641 @@

// we're going to manually write both the type definition AND
// the validator for the config, so that we can give better error messages
import assert from "node:assert";
type Project = "webpack" | "javascript" | "rust";
/**
* This is the static type definition for the configuration object.
* It reflects the configuration that you can write in wrangler.toml,
* and optionally augment with arguments passed directly to wrangler.
* The type definition doesn't fully reflect the constraints applied
* to the configuration, but it is a good starting point. Later, we
* also defined a validator function that will validate the configuration
* with the same rules as the type definition, as well as the extra
* constraints. The type definition is good for asserting correctness
* in the wrangler codebase, whereas the validator function is useful
* for signalling errors in the configuration to a user of wrangler.
*
* For more information about the configuration object, see the
* documentation at https://developers.cloudflare.com/workers/cli-wrangler/configuration
*
* Legend for the annotations:
*
* *:optional means providing a value isn't mandatory
* *:deprecated means the field itself isn't necessary anymore in wrangler.toml
* *:breaking means the deprecation/optionality is a breaking change from wrangler 1
* *:todo means there's more work to be done (with details attached)
* *:inherited means the field is copied to all environments
*/
export type Config = {
/**
* The name of your worker. Alphanumeric + dashes only.
*
* @optional
* @inherited
*/
name?: string;
type Site = {
// inherited
bucket: string;
"entry-point": string;
include?: string[];
exclude?: string[];
};
/**
* The entrypoint/path to the JavaScript file that will be executed.
*
* @optional
* @inherited
* @todo this needs to be implemented!
*/
entry?: string;
type Dev = {
ip?: string;
port?: number;
local_protocol?: string;
upstream_protocol?: string;
};
/**
* This is the ID of the account associated with your zone.
* You might have more than one account, so make sure to use
* the ID of the account associated with the zone/route you
* provide, if you provide one. It can also be specified through
* the CF_ACCOUNT_ID environment variable.
*
* @optional
* @inherited
*/
account_id?: string;
type Vars = { [key: string]: string };
/**
* The project "type". A holdover from wrangler 1.x.
* Valid values were "webpack", "javascript", and "rust".
*
* @deprecated DO NOT USE THIS. Most common features now work out of the box with wrangler, including modules, jsx, typescript, etc. If you need anything more, use a custom build.
* @optional
* @inherited
* @breaking
*/
type?: "webpack" | "javascript" | "rust";
type Cron = string; // TODO: we should be able to parse a cron pattern with ts
/**
* A date in the form yyyy-mm-dd, which will be used to determine
* which version of the Workers runtime is used. More details at
* https://developers.cloudflare.com/workers/platform/compatibility-dates
* @optional true for `dev`, false for `publish`
* @inherited
*/
compatibility_date?: string;
type KVNamespace = {
binding?: string;
preview_id: string;
id: string;
/**
* A list of flags that enable features from upcoming features of
* the Workers runtime, usually used together with compatibility_flags.
* More details at
* https://developers.cloudflare.com/workers/platform/compatibility-dates
*
* @optional
* @inherited
*/
compatibility_flags?: string[];
/**
* Whether we use <name>.<subdomain>.workers.dev to
* test and deploy your worker.
*
* @default `true` (This is a breaking change from wrangler 1)
* @optional
* @inherited
* @breaking
*/
workers_dev?: boolean;
/**
* The zone ID of the zone you want to deploy to. You can find this
* in your domain page on the dashboard.
*
* @deprecated This is unnecessary since we can deduce this from routes directly.
* @optional
* @inherited
*/
zone_id?: string;
/**
* A list of routes that your worker should be deployed to.
* Only one of `routes` or `route` is required.
*
* @optional false only when workers_dev is false, and there's no scheduled worker
* @inherited
*/
routes?: string[];
/**
* A route that your worker should be deployed to. Literally
* the same as routes, but only one.
* Only one of `routes` or `route` is required.
*
* @optional false only when workers_dev is false, and there's no scheduled worker
* @inherited
*/
route?: string;
/**
* Path to the webpack config to use when building your worker.
* A holdover from wrangler 1.x, used with `type: "webpack"`.
*
* @deprecated DO NOT USE THIS. Most common features now work out of the box with wrangler, including modules, jsx, typescript, etc. If you need anything more, use a custom build.
* @inherited
* @breaking
*/
webpack_config?: string;
/**
* The function to use to replace jsx syntax.
*
* @default `"React.createElement"`
* @optional
* @inherited
*/
jsx_factory?: string;
/**
* The function to use to replace jsx fragment syntax.
*
* @default `"React.Fragment"`
* @optional
* @inherited
*/
jsx_fragment?: string;
/**
* A map of environment variables to set when deploying your worker.
* Of note, they can only be strings. Which is unfortunate, really.
* (TODO: verify that they can only be strings?)
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `{}`
* @optional
* @inherited false
*/
vars?: { [key: string]: string };
/**
* A list of durable objects that your worker should be bound to.
* For more information about Durable Objects, see the documentation at
* https://developers.cloudflare.com/workers/learning/using-durable-objects
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `{ bindings: [] }`
* @optional
* @inherited false
*/
durable_objects?: {
bindings: {
/** The name of the binding used to refer to the Durable Object */
name: string;
/** The exported class name of the Durable Object */
class_name: string;
/** The script where the Durable Object is defined (if it's external to this worker) */
script_name?: string;
}[];
};
/**
* These specify any Workers KV Namespaces you want to
* access from inside your Worker. To learn more about KV Namespaces,
* see the documentation at https://developers.cloudflare.com/workers/learning/how-kv-works
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `[]`
* @optional
* @inherited false
*/
kv_namespaces?: {
/** The binding name used to refer to the KV Namespace */
binding: string;
/** The ID of the KV namespace */
id: string;
/** The ID of the KV namespace used during `wrangler dev` */
preview_id?: string;
}[];
/**
* A list of services that your worker should be bound to.
* NB: these are not inherited, and HAVE to be duplicated across all environments.
*
* @default `[]`
* @optional
* @inherited false
*/
experimental_services?: {
/** The binding name used to refer to the Service */
name: string;
/** The name of the Service being bound */
service: string;
/** The Service's environment */
environment: string;
}[];
/**
* A list of migrations that should be uploaded with your Worker.
* These define changes in your Durable Object declarations.
* More details at https://developers.cloudflare.com/workers/learning/using-durable-objects#configuring-durable-object-classes-with-migrations
* NB: these ARE inherited, and SHOULD NOT be duplicated across all environments.
*
* @default `[]`
* @optional
* @inherited true
*/
migrations?: {
/** A unique identifier for this migration. */
tag: string;
/** The new Durable Objects being defined. */
new_classes?: string[];
/** The Durable Objects being renamed. */
renamed_classes?: {
from: string;
to: string;
}[];
/** The Durable Objects being removed. */
deleted_classes?: string[];
}[];
/**
* The definition of a Worker Site, a feature that lets you upload
* static assets with your Worker.
* More details at https://developers.cloudflare.com/workers/platform/sites
* NB: This IS inherited, and SHOULD NOT be duplicated across all environments.
*
* @default `undefined`
* @optional
* @inherited true
*/
site?: {
/**
* The directory containing your static assets. It must be
* a path relative to your wrangler.toml file.
* Example: bucket = "./public"
*
* optional false
*/
bucket: string;
/**
* The location of your Worker script.
*
* @deprecated DO NOT use this (it's a holdover from wrangler 1.x). Either use the top level `entry` field, or pass the path to your entry file as a command line argument.
* @breaking
*/
"entry-point"?: string;
/**
* An exclusive list of .gitignore-style patterns that match file
* or directory names from your bucket location. Only matched
* items will be uploaded. Example: include = ["upload_dir"]
*
* @optional
* @default `[]`
*/
include?: string[];
/**
* A list of .gitignore-style patterns that match files or
* directories in your bucket that should be excluded from
* uploads. Example: exclude = ["ignore_dir"]
*
* @optional
* @default `[]`
*/
exclude?: string[];
};
/**
* "Cron" definitions to trigger a worker's "scheduled" function.
* Lets you call workers periodically, much like a cron job.
* More details here https://developers.cloudflare.com/workers/platform/cron-triggers
*
* @inherited
* @default `{ crons: [] }`
* @optional
*/
triggers?: { crons: string[] };
/**
* Options to configure the development server that your worker will use.
* NB: This is NOT inherited, and SHOULD NOT be duplicated across all environments.
*
* @default `{}`
* @optional
* @inherited false
*/
dev?: {
/**
* IP address for the local dev server to listen on,
*
* @default `127.0.0.1`
* @todo this needs to be implemented
*/
ip?: string;
/**
* Port for the local dev server to listen on
*
* @default `8787`
*/
port?: number;
/**
* Protocol that local wrangler dev server listens to requests on.
*
* @default `http`
* @todo this needs to be implemented
*/
local_protocol?: string;
/**
* Protocol that wrangler dev forwards requests on
*
* @default `https`
* @todo this needs to be implemented
*/
upstream_protocol?: string;
};
/**
* Specifies the Usage Model for your Worker. There are two options -
* [bundled](https://developers.cloudflare.com/workers/platform/limits#bundled-usage-model) and
* [unbound](https://developers.cloudflare.com/workers/platform/limits#unbound-usage-model).
* For newly created Workers, if the Usage Model is omitted
* it will be set to the [default Usage Model set on the account](https://dash.cloudflare.com/?account=workers/default-usage-model).
* For existing Workers, if the Usage Model is omitted, it will be
* set to the Usage Model configured in the dashboard for that Worker.
*/
usage_model?: undefined | "bundled" | "unbound";
/**
* Configures a custom build step to be run by Wrangler when
* building your Worker. Refer to the [custom builds documentation](https://developers.cloudflare.com/workers/cli-wrangler/configuration#build)
* for more details.
*
* @default `undefined`
* @optional
* @inherited false
*/
build?: {
/** The command used to build your Worker. On Linux and macOS, the command is executed in the `sh` shell and the `cmd` shell for Windows. The `&&` and `||` shell operators may be used. */
command?: string;
/** The directory in which the command is executed. */
cwd?: string;
/** The directory to watch for changes while using wrangler dev, defaults to the current working directory */
watch_dir?: string;
} & /**
* Much of the rest of this configuration isn't necessary anymore
* in wrangler2. We infer the format automatically, and we can pass
* the path to the script either in the CLI (or, @todo, as the top level
* `entry` property).
*/ (
| {
upload?: {
/**
* The format of the Worker script, must be "service-worker".
*
* @deprecated We infer the format automatically now.
*/
format?: "service-worker";
/**
* The path to the Worker script. This should be replaced
* by the top level `entry' property.
*
* @deprecated This will be replaced by the top level `entry' property.
*/
main: string;
};
}
| {
/**
* When we use the module format, we only really
* need to specify the entry point. The format is deduced
* automatically in wrangler2.
*/
upload?: {
/**
* The format of the Worker script, must be "modules".
*
* @deprecated We infer the format automatically now.
*/
format?: "modules";
/**
* The directory you wish to upload your modules from,
* defaults to the dist relative to the project root directory.
*
* @deprecated
* @breaking
*/
dir?: string;
/**
* The path to the Worker script. This should be replaced
* by the top level `entry' property.
*
* @deprecated This will be replaced by the top level `entry' property.
*/
main?: string;
/**
* An ordered list of rules that define which modules to import,
* and what type to import them as. You will need to specify rules
* to use Text, Data, and CompiledWasm modules, or when you wish to
* have a .js file be treated as an ESModule instead of CommonJS.
*
* @deprecated These are now inferred automatically for major file types, but you can still specify them manually.
* @todo this needs to be implemented!
* @breaking
*/
rules?: {
type: "ESModule" | "CommonJS" | "Text" | "Data" | "CompiledWasm";
globs: string[];
fallthrough?: boolean;
};
};
}
);
/**
* The `env` section defines overrides for the configuration for
* different environments. Most fields can be overridden, while
* some have to be specifically duplicated in every environment.
* For more information, see the documentation at https://developers.cloudflare.com/workers/cli-wrangler/configuration#environments
*/
env?: {
[envName: string]:
| undefined
| Omit<Config, "env" | "migrations" | "site" | "dev">;
};
};
type Build = {
command?: string;
cwd?: string;
watch_dir?: string;
} & (
| {
upload?: {
format: "service-worker";
main: string;
};
type ValidationResults = (
| { key: string; info: string }
| { key: string; error: string }
| { key: string; warning: string }
)[];
/**
* We also define a validation function that manually validates
* every field in the configuration as per the type definitions,
* as well as extra constraints we apply to some fields, as well
* as some constraints on combinations of fields. This is useful for
* presenting errors and messages to the user. Eventually, we will
* combine this with some automatic config rewriting tools.
*
*/
export async function validateConfig(
_config: Partial<Config>
): Promise<ValidationResults> {
const results: ValidationResults = [];
return results;
}
/**
* Process the environments (`env`) specified in the `config`.
*
* The environments configuration is complicated since each environment is a customized version of the main config.
* Some of the configuration can be inherited from the main config, while other configuration must replace what is in the main config.
*
* This function ensures that each environment is set up correctly with inherited configuration, as necessary.
* It will log a warning if an environment is missing required configuration.
*/
export function normaliseAndValidateEnvironmentsConfig(config: Config) {
if (config.env == undefined) {
// There are no environments specified so there is nothing to do here.
return;
}
const environments = config.env;
for (const envKey of Object.keys(environments)) {
const environment = environments[envKey];
// Given how TOML works, there should never be an environment containing nothing.
// I.e. if there is a section in a TOML file, then the parser will create an object for it.
// But it may be possible in the future if we change how the configuration is stored.
assert(
environment,
`Environment ${envKey} is specified in the config but not defined.`
);
// Fall back on "inherited fields" from the config, if not specified in the environment.
const inheritedFields = [
"name",
"account_id",
"workers_dev",
"compatibility_date",
"compatibility_flags",
"zone_id",
"routes",
"route",
"jsx_factory",
"jsx_fragment",
"site",
"triggers",
"usage_model",
];
for (const inheritedField of inheritedFields) {
if (config[inheritedField] !== undefined) {
if (environment[inheritedField] === undefined) {
environment[inheritedField] = config[inheritedField]; // TODO: - shallow or deep copy?
}
}
}
| {
upload?: {
format: "modules";
dir?: string;
main?: string;
rules?: {
type: "ESModule" | "CommonJS" | "Text" | "Data" | "CompiledWasm";
globs: string[]; // can we use typescript for these patterns?
fallthrough?: boolean;
};
};
// Warn if there is a "required" field in the top level config that has not been specified specified in the environment.
// These required fields are `vars`, `durable_objects`, `kv_namespaces` and `experimental_services`.
// Each of them has different characteristics that need to be checked.
// `vars` is just an object
if (config.vars !== undefined) {
if (environment.vars === undefined) {
console.warn(
`In your configuration, "vars" exists at the top level, but not on "env.${envKey}".\n` +
`This is not what you probably want, since "vars" is not inherited by environments.\n` +
`Please add "vars" to "env.${envKey}".`
);
} else {
for (const varField of Object.keys(config.vars)) {
if (!(varField in environment.vars)) {
console.warn(
`In your configuration, "vars.${varField}" exists at the top level, but not on "env.${envKey}".\n` +
`This is not what you probably want, since "vars" is not inherited by environments.\n` +
`Please add "vars.${varField}" to "env.${envKey}".`
);
}
}
}
}
);
type UsageModel = "bundled" | "unbound";
// `durable_objects` is an object containing a `bindings` array
if (config.durable_objects !== undefined) {
if (environment.durable_objects === undefined) {
console.warn(
`In your configuration, "durable_objects.bindings" exists at the top level, but not on "env.${envKey}".\n` +
`This is not what you probably want, since "durable_objects" is not inherited by environments.\n` +
`Please add "durable_objects.bindings" to "env.${envKey}".`
);
} else {
const envBindingNames = new Set(
environment.durable_objects.bindings.map((b) => b.name)
);
for (const bindingName of config.durable_objects.bindings.map(
(b) => b.name
)) {
if (!envBindingNames.has(bindingName)) {
console.warn(
`In your configuration, there is a durable_objects binding with name "${bindingName}" at the top level, but not on "env.${envKey}".\n` +
`This is not what you probably want, since "durable_objects" is not inherited by environments.\n` +
`Please add a binding for "${bindingName}" to "env.${envKey}.durable_objects.bindings".`
);
}
}
}
}
type Env = {
name?: string; // inherited
account_id?: string; // inherited
workers_dev?: boolean; // inherited
zone_id?: string; // inherited
routes?: string[]; // inherited
route?: string; // inherited
webpack_config?: string; // inherited
site?: Site;
// we should use typescript to parse cron patterns
triggers?: { crons: Cron[] }; // inherited
kv_namespaces?: KVNamespace[];
};
// `kv_namespaces` contains an array of namespace bindings
if (config.kv_namespaces !== undefined) {
if (environment.kv_namespaces === undefined) {
console.warn(
`In your configuration, "kv_namespaces" exists at the top level, but not on "env.${envKey}".\n` +
`This is not what you probably want, since "kv_namespaces" is not inherited by environments.\n` +
`Please add "kv_namespaces" to "env.${envKey}".`
);
} else {
const envBindings = new Set(
environment.kv_namespaces.map((kvNamespace) => kvNamespace.binding)
);
for (const bindingName of config.kv_namespaces.map(
(kvNamespace) => kvNamespace.binding
)) {
if (!envBindings.has(bindingName)) {
console.warn(
`In your configuration, there is a kv_namespaces with binding "${bindingName}" at the top level, but not on "env.${envKey}".\n` +
`This is not what you probably want, since "kv_namespaces" is not inherited by environments.\n` +
`Please add a binding for "${bindingName}" to "env.${envKey}.kv_namespaces".`
);
}
}
}
}
export type Config = {
name?: string; // inherited
account_id?: string; // inherited
// @deprecated Don't use this
type?: Project; // top level
// -- there's some mutually exclusive logic for this next block,
// but I didn't bother for now
workers_dev?: boolean; // inherited
zone_id?: string; // inherited
routes?: string[]; // inherited
route?: string; // inherited
// -- end mutually exclusive stuff
// @deprecated Don't use this
webpack_config?: string; // inherited
vars?: Vars;
kv_namespaces?: KVNamespace[];
site?: Site; // inherited
// we should use typescript to parse cron patterns
triggers?: { crons: Cron[] }; // inherited
dev?: Dev;
usage_model?: UsageModel; // inherited
// top level
build?: Build;
env?: { [envName: string]: Env };
};
// `experimental_services` contains an array of namespace bindings
if (config.experimental_services !== undefined) {
if (environment.experimental_services === undefined) {
console.warn(
`In your configuration, "experimental_services" exists at the top level, but not on "env.${envKey}".\n` +
`This is not what you probably want, since "experimental_services" is not inherited by environments.\n` +
`Please add "experimental_services" to "env.${envKey}".`
);
} else {
const envBindingNames = new Set(
environment.experimental_services.map((service) => service.name)
);
for (const bindingName of config.experimental_services.map(
(service) => service.name
)) {
if (!envBindingNames.has(bindingName)) {
console.warn(
`In your configuration, there is a experimental_services with binding name "${bindingName}" at the top level, but not on "env.${envKey}".\n` +
`This is not what you probably want, since "experimental_services" is not inherited by environments.\n` +
`Please add a service for "${bindingName}" to "env.${envKey}.experimental_services".`
);
}
}
}
}
}
}

@@ -0,25 +1,34 @@

import assert from "node:assert";
import path from "node:path";
import { readFile } from "node:fs/promises";
import * as esbuild from "esbuild";
import type { Metafile } from "esbuild";
import { execaCommand } from "execa";
import tmp from "tmp-promise";
import type { CfWorkerInit } from "./api/worker";
import { toFormData } from "./api/form_data";
import esbuild from "esbuild";
import tmp from "tmp-promise";
import { fetchResult } from "./cfetch";
import type { Config } from "./config";
import path from "path";
import { readFile } from "fs/promises";
import cfetch from "./cfetch";
import assert from "node:assert";
import makeModuleCollector from "./module-collection";
import type { AssetPaths } from "./sites";
import { syncAssets } from "./sites";
import { URLSearchParams } from "node:url";
type CfScriptFormat = void | "modules" | "service-worker";
type CfScriptFormat = undefined | "modules" | "service-worker";
type Props = {
config: Config;
format?: CfScriptFormat;
script?: string;
name?: string;
env?: string;
public?: string;
site?: string;
triggers?: (string | number)[];
routes?: (string | number)[];
legacyEnv?: boolean;
format: CfScriptFormat | undefined;
script: string | undefined;
name: string | undefined;
env: string | undefined;
compatibilityDate: string | undefined;
compatibilityFlags: string[] | undefined;
assetPaths: AssetPaths | undefined;
triggers: (string | number)[] | undefined;
routes: (string | number)[] | undefined;
legacyEnv: boolean | undefined;
jsxFactory: undefined | string;
jsxFragment: undefined | string;
experimentalPublic: boolean;
};

@@ -32,6 +41,6 @@

export default async function publish(props: Props): Promise<void> {
if (props.public && props.format === "service-worker") {
if (props.experimentalPublic && props.format === "service-worker") {
// TODO: check config too
throw new Error(
"You cannot use the service worker format with a public directory."
"You cannot publish in the service worker format with a public directory."
);

@@ -48,18 +57,55 @@ }

const envRootObj =
props.env && config.env ? config.env[props.env] || {} : config;
assert(
envRootObj.compatibility_date || props.compatibilityDate,
"A compatibility_date is required when publishing. Add one to your wrangler.toml file, or pass it in your terminal as --compatibility_date. See https://developers.cloudflare.com/workers/platform/compatibility-dates for more information."
);
if (accountId === undefined) {
throw new Error("No account_id provided.");
}
const triggers = props.triggers || config.triggers?.crons;
const routes = props.routes || config.routes;
const jsxFactory = props.jsxFactory || config.jsx_factory;
const jsxFragment = props.jsxFragment || config.jsx_fragment;
assert(config.account_id, "missing account id");
let scriptName = props.name || config.name;
assert(
scriptName,
'You need to provide a name when publishing a worker. Either pass it as a cli arg with `--name <name>` or in your config file as `name = "<name>"`'
);
if (config.site?.["entry-point"]) {
console.warn(
"Deprecation notice: The `site.entry-point` config field is no longer used.\n" +
"The entry-point should be specified via the command line (e.g. `wrangler publish path/to/script`) or the `build.upload.main` config field.\n" +
"Please remove the `site.entry-point` field from the `wrangler.toml` file."
);
}
assert(
!config.site || config.site.bucket,
"A [site] definition requires a `bucket` field with a path to the site's public directory."
);
let file: string;
if (props.script) {
file = props.script;
assert(props.name, "name is required when using script");
// If the script name comes from the command line it is relative to the current working directory.
file = path.resolve(props.script);
} else {
assert(build?.upload?.main, "missing main file");
assert(config.name, "missing name");
file = path.join(path.dirname(__path__), build.upload.main);
// If the script name comes from the config, then it is relative to the wrangler.toml file.
if (build?.upload?.main === undefined) {
throw new Error(
"Missing entry-point: The entry-point should be specified via the command line (e.g. `wrangler publish path/to/script`) or the `build.upload.main` config field."
);
}
file = path.resolve(path.dirname(__path__), build.upload.main);
}
let scriptName = props.script ? props.name : config.name;
if (props.legacyEnv) {

@@ -71,5 +117,16 @@ scriptName += props.env ? `-${props.env}` : "";

const destination = await tmp.dir({ unsafeCleanup: true });
if (props.config.build?.command) {
// TODO: add a deprecation message here?
console.log("running:", props.config.build.command);
await execaCommand(props.config.build.command, {
shell: true,
stdout: "inherit",
stderr: "inherit",
...(props.config.build?.cwd && { cwd: props.config.build.cwd }),
});
}
const moduleCollector = makeModuleCollector();
const result = await esbuild.build({
...(props.public
...(props.experimentalPublic
? {

@@ -82,10 +139,10 @@ stdin: {

)
).replace("__ENTRY_POINT__", path.join(process.cwd(), file)),
).replace("__ENTRY_POINT__", file),
sourcefile: "static-asset-facade.js",
resolveDir: path.dirname(file),
},
nodePaths: [path.join(__dirname, "../vendor")],
}
: { entryPoints: [file] }),
bundle: true,
nodePaths: props.public ? [path.join(__dirname, "../vendor")] : undefined,
outdir: destination.path,

@@ -96,16 +153,36 @@ external: ["__STATIC_CONTENT_MANIFEST"],

metafile: true,
conditions: ["worker", "browser"],
loader: {
".js": "jsx",
".html": "text",
".pem": "text",
".txt": "text",
},
plugins: [moduleCollector.plugin],
...(jsxFactory && { jsxFactory }),
...(jsxFragment && { jsxFragment }),
});
const chunks = Object.entries(result.metafile.outputs).find(
([_path, { entryPoint }]) =>
entryPoint ===
(props.public
? path.join(path.dirname(file), "static-asset-facade.js")
: file)
// result.metafile is defined because of the `metafile: true` option above.
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const metafile = result.metafile!;
const entryPoints = Object.entries(metafile.outputs).filter(
([_path, output]) => output.entryPoint !== undefined
);
assert(
entryPoints.length > 0,
`Cannot find entry-point "${file}" in generated bundle.` +
listEntryPoints(entryPoints)
);
assert(
entryPoints.length < 2,
"More than one entry-point found for generated bundle." +
listEntryPoints(entryPoints)
);
const entryPointExports = entryPoints[0][1].exports;
const resolvedEntryPointPath = entryPoints[0][0];
const { format } = props;
const bundle = {
type: chunks[1].exports.length > 0 ? "esm" : "commonjs",
exports: chunks[1].exports,
type: entryPointExports.length > 0 ? "esm" : "commonjs",
exports: entryPointExports,
};

@@ -127,50 +204,97 @@

const content = await readFile(chunks[0], { encoding: "utf-8" });
destination.cleanup();
const assets =
props.public || props.site || props.config.site?.bucket // TODO: allow both
? await syncAssets(
accountId,
scriptName,
props.public || props.site || props.config.site?.bucket,
false
)
: { manifest: undefined, namespace: undefined };
let content = await readFile(resolvedEntryPointPath, { encoding: "utf-8" });
await destination.cleanup();
const envRootObj = props.env ? config[`env.${props.env}`] : config;
// if config.migrations
// get current migration tag
let migrations;
if (config.migrations !== undefined) {
const scripts = await fetchResult<{ id: string; migration_tag: string }[]>(
`/accounts/${accountId}/workers/scripts`
);
const script = scripts.find(({ id }) => id === scriptName);
if (script?.migration_tag) {
// was already published once
const foundIndex = config.migrations.findIndex(
(migration) => migration.tag === script.migration_tag
);
if (foundIndex === -1) {
console.warn(
`The published script ${scriptName} has a migration tag "${script.migration_tag}, which was not found in wrangler.toml. You may have already deleted it. Applying all available migrations to the script...`
);
migrations = {
old_tag: script.migration_tag,
new_tag: config.migrations[config.migrations.length - 1].tag,
steps: config.migrations.map(({ tag: _tag, ...rest }) => rest),
};
} else {
migrations = {
old_tag: script.migration_tag,
new_tag: config.migrations[config.migrations.length - 1].tag,
steps: config.migrations
.slice(foundIndex + 1)
.map(({ tag: _tag, ...rest }) => rest),
};
}
} else {
migrations = {
new_tag: config.migrations[config.migrations.length - 1].tag,
steps: config.migrations.map(({ tag: _tag, ...rest }) => rest),
};
}
}
const assets = await syncAssets(
accountId,
scriptName,
props.assetPaths,
false,
props.env
);
const bindings: CfWorkerInit["bindings"] = {
kv_namespaces: (envRootObj.kv_namespaces || []).concat(
assets.namespace
? { binding: "__STATIC_CONTENT", id: assets.namespace }
: []
),
vars: envRootObj.vars,
durable_objects: envRootObj.durable_objects,
services: envRootObj.experimental_services,
};
const workerType = bundle.type === "esm" ? "esm" : "commonjs";
if (workerType !== "esm" && assets.manifest) {
content = `const __STATIC_CONTENT_MANIFEST = ${JSON.stringify(
assets.manifest
)};\n${content}`;
}
const worker: CfWorkerInit = {
name: scriptName,
main: {
name: scriptName,
name: path.basename(resolvedEntryPointPath),
content: content,
type:
(bundle.type === "esm" ? "modules" : "service-worker") === "modules"
? "esm"
: "commonjs",
type: workerType,
},
variables: {
...(envRootObj?.vars || {}),
...(envRootObj?.kv_namespaces || []).reduce(
(obj, { binding, preview_id, id }) => {
return { ...obj, [binding]: { namespaceId: id } };
},
{}
),
...(assets.namespace
? { __STATIC_CONTENT: { namespaceId: assets.namespace } }
: {}),
},
modules: assets.manifest
? [].concat({
name: "__STATIC_CONTENT_MANIFEST",
content: JSON.stringify(assets.manifest),
type: "text",
})
: [],
bindings,
...(migrations && { migrations }),
modules: moduleCollector.modules.concat(
assets.manifest && workerType === "esm"
? {
name: "__STATIC_CONTENT_MANIFEST",
content: JSON.stringify(assets.manifest),
type: "text",
}
: []
),
compatibility_date: config.compatibility_date,
compatibility_flags: config.compatibility_flags,
usage_model: config.usage_model,
};
const start = Date.now();
const formatTime = (duration) => {
function formatTime(duration: number) {
return `(${(duration / 1000).toFixed(2)} sec)`;
};
}

@@ -183,10 +307,10 @@ const notProd = !props.legacyEnv && props.env;

// Upload the script so it has time to propogate.
const { available_on_subdomain } = await cfetch(
`${workerUrl}?available_on_subdomain=true`,
// Upload the script so it has time to propagate.
const { available_on_subdomain } = await fetchResult(
workerUrl,
{
method: "PUT",
// @ts-expect-error: TODO: fix this type error!
body: toFormData(worker),
}
},
new URLSearchParams({ available_on_subdomains: "true" })
);

@@ -199,3 +323,3 @@

const userSubdomain = (
await cfetch<{ subdomain: string }>(
await fetchResult<{ subdomain: string }>(
`/accounts/${accountId}/workers/subdomain`

@@ -214,3 +338,3 @@ )

deployments.push(
cfetch(`${workerUrl}/subdomain`, {
fetchResult(`${workerUrl}/subdomain`, {
method: "POST",

@@ -227,4 +351,4 @@ body: JSON.stringify({ enabled: true }),

// This is a temporary measure until we fix this on the edge.
.then((url) => {
sleep(3000);
.then(async (url) => {
await sleep(3000);
return url;

@@ -240,3 +364,3 @@ })

deployments.push(
cfetch(`${workerUrl}/routes`, {
fetchResult(`${workerUrl}/routes`, {
// TODO: PATCH will not delete previous routes on this script,

@@ -266,3 +390,3 @@ // whereas PUT will. We need to decide on the default behaviour

deployments.push(
cfetch(`${workerUrl}/schedules`, {
fetchResult(`${workerUrl}/schedules`, {
// TODO: Unlike routes, this endpoint does not support PATCH.

@@ -291,1 +415,9 @@ // So technically, this will override any previous schedules.

}
function listEntryPoints(
outputs: [string, ValueOf<Metafile["outputs"]>][]
): string {
return outputs.map(([_input, output]) => output.entryPoint).join("\n");
}
type ValueOf<T> = T[keyof T];

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc