You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

@vercel/backends

Package Overview
Dependencies
Maintainers
2
Versions
40
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@vercel/backends - npm Package Compare versions

Comparing version
0.0.27
to
0.0.28
+208
dist/rolldown/cjs-hooks.cjs
//#region rolldown:runtime
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
key = keys[i];
if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
get: ((k) => from[k]).bind(null, key),
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
});
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
value: mod,
enumerable: true
}) : target, mod));
//#endregion
let node_module = require("node:module");
node_module = __toESM(node_module);
let node_path = require("node:path");
node_path = __toESM(node_path);
let node_fs = require("node:fs");
let path_to_regexp = require("path-to-regexp");
let __vercel_build_utils = require("@vercel/build-utils");
//#region src/rolldown/util.ts
const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";
const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";
const setupCloseHandlers = (cb) => {
const callCallback = () => {
const result = cb();
if (result) console.log(`${BEGIN_INTROSPECTION_RESULT}${JSON.stringify(result)}${END_INTROSPECTION_RESULT}`);
};
process.on("SIGINT", callCallback);
process.on("SIGTERM", callCallback);
process.on("exit", callCallback);
};
//#endregion
//#region src/introspection/hono.ts
const apps = [];
const handle = (honoModule) => {
const TrackedHono = class extends honoModule.Hono {
constructor(...args) {
super(...args);
apps.push(this);
}
};
return TrackedHono;
};
setupCloseHandlers(() => {
const routes = extractRoutes$1();
if (routes.length > 0) return { routes };
});
function extractRoutes$1() {
const app$1 = apps.sort((a, b) => b.routes.length - a.routes.length)[0];
if (!app$1 || !app$1.routes) return [];
const routes = [];
for (const route of app$1.routes) {
const routePath = route.path;
const method = route.method.toUpperCase();
try {
const { regexp } = (0, path_to_regexp.pathToRegexp)(routePath);
if (routePath === "/") continue;
routes.push({
src: regexp.source,
dest: routePath,
methods: [method]
});
} catch (e) {
(0, __vercel_build_utils.debug)(`Error extracting routes for ${routePath}: ${e instanceof Error ? e.message : "Unknown error"}`);
}
}
return routes;
}
//#endregion
//#region src/introspection/express.ts
let app = null;
const handle$1 = (expressModule) => {
if (typeof expressModule === "function") {
const originalCreateApp = expressModule;
const createApp = (...args) => {
app = originalCreateApp(...args);
return app;
};
Object.setPrototypeOf(createApp, originalCreateApp);
Object.assign(createApp, originalCreateApp);
return createApp;
}
return expressModule;
};
setupCloseHandlers(() => {
const { routes, additionalFolders, additionalDeps } = extractRoutes();
if (routes.length > 0) return {
routes,
additionalFolders,
additionalDeps
};
});
const extractRoutes = () => {
if (!app) return {
routes: [],
additionalFolders: [],
additionalDeps: []
};
const additionalFolders = [];
const additionalDeps = [];
const routes = [];
const methods = [
"all",
"get",
"post",
"put",
"delete",
"patch",
"options",
"head"
];
const router = app._router || app.router;
if ("settings" in app) {
if ("views" in app.settings && typeof app.settings.views === "string") additionalFolders.push(app.settings.views);
if ("view engine" in app.settings && typeof app.settings["view engine"] === "string") additionalDeps.push(app.settings["view engine"]);
}
for (const route of router.stack) if (route.route) {
const m = [];
for (const method of methods) if (route.route.methods[method]) m.push(method.toUpperCase());
try {
const { regexp } = (0, path_to_regexp.pathToRegexp)(route.route.path);
if (route.route.path === "/") continue;
routes.push({
src: regexp.source,
dest: route.route.path,
methods: m
});
} catch (e) {
const message = e instanceof Error ? e.message : "Unknown error";
(0, __vercel_build_utils.debug)(`Error extracting routes for ${route.route.path}: ${message}`);
}
}
return {
routes,
additionalFolders,
additionalDeps
};
};
//#endregion
//#region src/rolldown/cjs-hooks.ts
/**
* CJS preload script that intercepts require() calls.
* - Check tmpDir first for files
* - Map bare specifiers from tmpDir to repoRootPath
* - Wrap hono/express modules with instrumentation
*/
const repoRootPath = process.env.VERCEL_INTROSPECTION_REPO_ROOT_PATH;
const tmpDirEnv = process.env.VERCEL_INTROSPECTION_TMP_DIR;
if (!repoRootPath || !tmpDirEnv) throw new Error("VERCEL_INTROSPECTION_REPO_ROOT_PATH and VERCEL_INTROSPECTION_TMP_DIR must be set");
const tmpDir = (0, node_fs.realpathSync)(tmpDirEnv);
const wrappedModules = /* @__PURE__ */ new Map();
const originalResolveFilename = node_module.default._resolveFilename;
node_module.default._resolveFilename = function(request, parent, isMain, options) {
if (request.startsWith(".") && parent?.filename) {
const parentDir = node_path.default.dirname(parent.filename);
const resolvedPath = node_path.default.resolve(parentDir, request);
if ((0, node_fs.existsSync)(resolvedPath) && (0, node_fs.statSync)(resolvedPath).isFile()) return resolvedPath;
}
if (!request.startsWith(".") && !request.startsWith("/") && parent?.filename?.startsWith(tmpDir)) {
const relativeToTmp = node_path.default.relative(tmpDir, parent.filename);
const mappedParentPath = node_path.default.join(repoRootPath, relativeToTmp);
const fakeParent = {
...parent,
filename: mappedParentPath,
paths: node_module.default._nodeModulePaths(node_path.default.dirname(mappedParentPath))
};
return originalResolveFilename.call(this, request, fakeParent, isMain, options);
}
return originalResolveFilename.call(this, request, parent, isMain, options);
};
const originalLoad = node_module.default._load;
node_module.default._load = function(request, parent, isMain) {
const result = originalLoad.call(this, request, parent, isMain);
if (request === "hono") {
if (wrappedModules.has("hono")) return wrappedModules.get("hono");
const TrackedHono = handle(result);
const wrapped = {
...result,
Hono: TrackedHono
};
wrappedModules.set("hono", wrapped);
return wrapped;
}
if (request === "express") {
if (wrappedModules.has("express")) return wrappedModules.get("express");
const wrapped = handle$1(result);
wrappedModules.set("express", wrapped);
return wrapped;
}
return result;
};
//#endregion
import { register } from "node:module";
//#region src/rolldown/esm.ts
register(new URL("./hooks.mjs", import.meta.url), import.meta.url);
//#endregion
export { };
/// <reference types="node" resolution-mode="require"/>
//#region src/rolldown/hooks.d.ts
interface ResolveContext {
parentURL?: string;
conditions?: string[];
importAttributes?: Record<string, string>;
}
interface ResolveResult {
url: string;
shortCircuit?: boolean;
format?: string;
}
interface LoadContext {
format?: string;
importAttributes?: Record<string, string>;
}
interface LoadResult {
format: string;
source?: string | Buffer;
shortCircuit?: boolean;
}
type NextResolve = (specifier: string, context: ResolveContext) => Promise<ResolveResult>;
type NextLoad = (url: string, context: LoadContext) => Promise<LoadResult>;
declare function resolve(specifier: string, context: ResolveContext, nextResolve: NextResolve): Promise<ResolveResult>;
declare function load(url: string, context: LoadContext, nextLoad: NextLoad): Promise<LoadResult>;
//#endregion
export { load, resolve };
import { fileURLToPath, pathToFileURL } from "node:url";
import { dirname, join, relative } from "node:path";
import { existsSync, realpathSync, statSync } from "node:fs";
//#region src/rolldown/hooks.ts
const getRequiredEnv = (key) => {
const value = process.env[key];
if (!value) throw new Error(`${key} is not set`);
return value;
};
const repoRootPath = getRequiredEnv("VERCEL_INTROSPECTION_REPO_ROOT_PATH");
const handlerPath = getRequiredEnv("VERCEL_INTROSPECTION_HANDLER");
const handlerBuilt = getRequiredEnv("VERCEL_INTROSPECTION_HANDLER_BUILT");
const tmpDir = realpathSync(getRequiredEnv("VERCEL_INTROSPECTION_TMP_DIR"));
let honoUrl = null;
let expressUrl = null;
async function resolve(specifier, context, nextResolve) {
let specifierAsPath = null;
try {
specifierAsPath = fileURLToPath(specifier);
} catch {}
if (specifierAsPath === handlerPath) return {
url: pathToFileURL(join(tmpDir, handlerBuilt)).href,
shortCircuit: true
};
if (specifier.startsWith(".") && context.parentURL) {
const resolvedPath = join(dirname(fileURLToPath(context.parentURL)), specifier);
if (existsSync(resolvedPath) && statSync(resolvedPath).isFile()) return {
url: pathToFileURL(resolvedPath).href,
shortCircuit: true
};
}
if (!specifier.startsWith(".") && !specifier.startsWith("/") && !specifier.startsWith("file:") && context.parentURL) {
const parentPath = fileURLToPath(context.parentURL);
if (parentPath.startsWith(tmpDir)) {
const mappedParent = join(repoRootPath, relative(tmpDir, parentPath));
const result = await nextResolve(specifier, {
...context,
parentURL: pathToFileURL(mappedParent).href
});
if (specifier === "hono") honoUrl = result.url;
else if (specifier === "express") expressUrl = result.url;
return result;
}
}
return nextResolve(specifier, context);
}
async function load(url, context, nextLoad) {
if (honoUrl === url) {
const pathToHonoExtract = new URL("../introspection/hono.mjs", import.meta.url);
return {
format: "module",
source: `
import { handle } from ${JSON.stringify(pathToHonoExtract.toString())};
import * as originalHono from ${JSON.stringify(url + "?original")};
export * from ${JSON.stringify(url + "?original")};
export const Hono = handle(originalHono);
`,
shortCircuit: true
};
}
if (expressUrl === url) {
const pathToExpressExtract = new URL("../introspection/express.mjs", import.meta.url);
return {
format: "module",
source: `
import { handle } from ${JSON.stringify(pathToExpressExtract.toString())};
import originalExpress from ${JSON.stringify(url + "?original")};
const extendedExpress = handle(originalExpress);
export * from ${JSON.stringify(url + "?original")};
export default extendedExpress;
`,
shortCircuit: true
};
}
if (url.endsWith("?original")) return nextLoad(url, context);
return nextLoad(url, context);
}
//#endregion
export { load, resolve };
import { BuildOptions, Files, Span } from "@vercel/build-utils";
//#region src/rolldown/index.d.ts
declare const rolldown: (args: Pick<BuildOptions, 'entrypoint' | 'workPath' | 'repoRootPath'> & {
span?: Span;
}) => Promise<{
files: Files;
handler: string;
framework: {
slug: string;
version: string;
};
localBuildFiles: Set<string>;
}>;
//#endregion
export { rolldown };
import { builtinModules } from "node:module";
import { FileBlob, FileFsRef, Span, isBackendFramework } from "@vercel/build-utils";
import { dirname, extname, join, relative } from "node:path";
import { existsSync } from "node:fs";
import { lstat, readFile } from "node:fs/promises";
import { build } from "rolldown";
import { nodeFileTrace } from "@vercel/nft";
import { isNativeError } from "node:util/types";
import { transform } from "oxc-transform";
import { exports } from "resolve.exports";
//#region src/rolldown/resolve-format.ts
const resolveEntrypointAndFormat = async (args) => {
const extension = extname(args.entrypoint);
const extensionMap = {
".ts": {
format: "auto",
extension: "js"
},
".mts": {
format: "esm",
extension: "mjs"
},
".cts": {
format: "cjs",
extension: "cjs"
},
".cjs": {
format: "cjs",
extension: "cjs"
},
".js": {
format: "auto",
extension: "js"
},
".mjs": {
format: "esm",
extension: "mjs"
}
};
const extensionInfo = extensionMap[extension] || extensionMap[".js"];
let resolvedFormat = extensionInfo.format === "auto" ? void 0 : extensionInfo.format;
const packageJsonPath = join(args.workPath, "package.json");
let pkg = {};
if (existsSync(packageJsonPath)) {
const source = await readFile(packageJsonPath, "utf8");
try {
pkg = JSON.parse(source.toString());
} catch (_e) {
pkg = {};
}
if (extensionInfo.format === "auto") if (pkg.type === "module") resolvedFormat = "esm";
else resolvedFormat = "cjs";
}
if (!resolvedFormat) throw new Error(`Unable to resolve format for ${args.entrypoint}`);
return {
format: resolvedFormat,
extension: resolvedFormat === "esm" ? "mjs" : "cjs"
};
};
//#endregion
//#region src/rolldown/nft.ts
const nft = async (args) => {
const nftSpan = args.span.child("vc.builder.backends.nft");
const runNft = async () => {
const nftResult = await nodeFileTrace(Array.from(args.localBuildFiles), {
base: args.repoRootPath,
processCwd: args.workPath,
ts: true,
mixedModules: true,
ignore: args.ignoreNodeModules ? (path) => path.includes("node_modules") : void 0,
async readFile(fsPath) {
try {
let source = await readFile(fsPath);
if (isTypeScriptFile(fsPath)) source = (await transform(fsPath, source.toString())).code;
return source;
} catch (error) {
if (isNativeError(error) && "code" in error && (error.code === "ENOENT" || error.code === "EISDIR")) return null;
throw error;
}
}
});
for (const file of nftResult.fileList) {
const absolutePath = join(args.repoRootPath, file);
const stats = await lstat(absolutePath);
const outputPath = file;
if (args.localBuildFiles.has(join(args.repoRootPath, outputPath))) continue;
if (stats.isSymbolicLink() || stats.isFile()) if (args.ignoreNodeModules) {
const content = await readFile(absolutePath, "utf-8");
args.files[outputPath] = new FileBlob({
data: content,
mode: stats.mode
});
} else args.files[outputPath] = new FileFsRef({
fsPath: absolutePath,
mode: stats.mode
});
}
};
await nftSpan.trace(runNft);
};
const isTypeScriptFile = (fsPath) => {
return fsPath.endsWith(".ts") || fsPath.endsWith(".tsx") || fsPath.endsWith(".mts") || fsPath.endsWith(".cts");
};
//#endregion
//#region src/rolldown/index.ts
const PLUGIN_NAME = "vercel:backends";
const CJS_SHIM_PREFIX = "\0cjs-shim:";
const rolldown = async (args) => {
const files = {};
const { format, extension } = await resolveEntrypointAndFormat(args);
const localBuildFiles = /* @__PURE__ */ new Set();
let handler = null;
const packageJsonCache = /* @__PURE__ */ new Map();
const shimMeta = /* @__PURE__ */ new Map();
const framework = {
slug: "",
version: ""
};
const getPackageJson = async (pkgPath) => {
if (packageJsonCache.has(pkgPath)) return packageJsonCache.get(pkgPath);
try {
const contents = await readFile(pkgPath, "utf-8");
const parsed = JSON.parse(contents);
packageJsonCache.set(pkgPath, parsed);
return parsed;
} catch {
packageJsonCache.set(pkgPath, null);
return null;
}
};
const isCommonJS = async (bareImport, resolvedPath, resolvedInfo) => {
const ext = extname(resolvedPath);
if (ext === ".cjs") return true;
if (ext === ".mjs") return false;
if (ext === ".js" || ext === ".ts") {
const pkgJsonPath = resolvedInfo.packageJsonPath;
if (!pkgJsonPath) return true;
const pkgJson = await getPackageJson(pkgJsonPath);
if (!pkgJson) return true;
const pkgDir = dirname(pkgJsonPath);
const relativePath = resolvedPath.slice(pkgDir.length + 1).replace(/\\/g, "/");
const pkgName = pkgJson.name || "";
const subpath = bareImport.startsWith(pkgName) ? `.${bareImport.slice(pkgName.length)}` || "." : ".";
try {
if (exports(pkgJson, subpath, {
require: false,
conditions: ["node", "import"]
})?.some((p) => p === relativePath || p === `./${relativePath}`)) return false;
if (exports(pkgJson, subpath, {
require: true,
conditions: ["node", "require"]
})?.some((p) => p === relativePath || p === `./${relativePath}`)) return true;
} catch {}
if (pkgJson.module) return false;
return pkgJson.type !== "module";
}
return true;
};
const isBareImport = (id) => {
return !id.startsWith(".") && !id.startsWith("/") && !/^[a-z][a-z0-9+.-]*:/i.test(id);
};
const isNodeModule = (resolved) => {
return resolved?.id?.includes("node_modules") ?? false;
};
const isNodeBuiltin = (id) => {
const normalizedId = id.includes(":") ? id.split(":")[1] : id;
return builtinModules.includes(normalizedId);
};
const isLocalImport = (id) => {
return !id.startsWith("node:") && !id.includes("node_modules");
};
const plugin = () => {
return {
name: PLUGIN_NAME,
resolveId: {
order: "pre",
async handler(id, importer, rOpts) {
if (id.startsWith(CJS_SHIM_PREFIX)) return {
id,
external: false
};
const resolved = await this.resolve(id, importer, rOpts);
if (isNodeBuiltin(id)) return {
id: id.startsWith("node:") ? id : `node:${id}`,
external: true
};
if (resolved?.id && isLocalImport(resolved.id)) localBuildFiles.add(resolved.id);
else if (!resolved) localBuildFiles.add(join(args.workPath, id));
if (importer?.startsWith(CJS_SHIM_PREFIX) && isBareImport(id)) return {
id,
external: true
};
if (importer && isBareImport(id) && isNodeModule(resolved)) {
if (isBackendFramework(id) && resolved?.packageJsonPath) try {
const pkg = await readFile(resolved.packageJsonPath, "utf8");
const pkgJson = JSON.parse(pkg);
framework.slug = pkgJson.name;
framework.version = pkgJson.version;
} catch {}
if (resolved ? await isCommonJS(id, resolved.id, resolved) : false) {
const importerPkgJsonPath = (await this.resolve(importer))?.packageJsonPath;
if (importerPkgJsonPath) {
const importerPkgDir = relative(args.repoRootPath, dirname(importerPkgJsonPath));
const shimId$1 = `${CJS_SHIM_PREFIX}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
shimMeta.set(shimId$1, {
pkgDir: importerPkgDir,
pkgName: id
});
return {
id: shimId$1,
external: false
};
}
const shimId = `${CJS_SHIM_PREFIX}${id.replace(/\//g, "_")}`;
shimMeta.set(shimId, {
pkgDir: "",
pkgName: id
});
return {
id: shimId,
external: false
};
}
return {
id,
external: true
};
}
if (importer && isBareImport(id)) return resolved;
if (resolved && !isNodeModule(resolved)) return resolved;
return resolved;
}
},
load: { async handler(id) {
if (id.startsWith(CJS_SHIM_PREFIX)) {
const meta = shimMeta.get(id);
if (!meta) return { code: `module.exports = require('${id.slice(10)}');` };
const { pkgDir, pkgName } = meta;
return { code: `
import { createRequire } from 'node:module';
import { fileURLToPath } from 'node:url';
import { dirname, join } from 'node:path';
const requireFromContext = createRequire(join(dirname(fileURLToPath(import.meta.url)), '${pkgDir ? join("..", pkgDir, "package.json") : "../package.json"}'));
module.exports = requireFromContext('${pkgName}');
`.trim() };
}
return null;
} }
};
};
const runRolldown = () => build({
input: args.entrypoint,
write: false,
cwd: args.workPath,
platform: "node",
transform: { define: format === "esm" ? {
__dirname: "import.meta.dirname",
__filename: "import.meta.filename"
} : void 0 },
tsconfig: true,
plugins: [plugin()],
output: {
cleanDir: true,
format,
entryFileNames: `[name].${extension}`,
preserveModules: true,
preserveModulesRoot: args.repoRootPath,
sourcemap: false
}
});
const rolldownSpan = args.span?.child("vc.builder.backends.rolldown");
const out = await rolldownSpan?.trace(runRolldown) || await runRolldown();
for (const file of out.output) if (file.type === "chunk") {
if (file.isEntry) handler = file.fileName;
files[file.fileName] = new FileBlob({
data: file.code,
mode: 420
});
}
await nft({
...args,
localBuildFiles,
files,
span: rolldownSpan ?? new Span({ name: "vc.builder.backends.nft" }),
ignoreNodeModules: true
});
if (!handler) throw new Error(`Unable to resolve build handler for entrypoint: ${args.entrypoint}`);
return {
files,
handler,
framework,
localBuildFiles
};
};
//#endregion
export { rolldown };
+886
-409
import { builtinModules, createRequire } from "node:module";
import { delimiter, dirname, extname, join } from "path";
import { FileFsRef, NodejsLambda, Span, debug, defaultCachePathGlob, download, execCommand, getEnvForPackageManager, getNodeBinPaths, getNodeVersion, getPackageJson, getScriptName, glob, isExperimentalBackendsWithoutIntrospectionEnabled, runNpmInstall, runPackageJsonScript, scanParentDirs } from "@vercel/build-utils";
import { FileBlob, FileFsRef, NodejsLambda, Span, debug, defaultCachePathGlob, download, execCommand, getEnvForPackageManager, getNodeBinPaths, getNodeVersion, glob, isBackendFramework, isExperimentalBackendsWithoutIntrospectionEnabled, runNpmInstall, runPackageJsonScript, scanParentDirs } from "@vercel/build-utils";
import { createWriteStream, existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, unlinkSync, writeFileSync } from "node:fs";
import { lstat, readFile, rm } from "node:fs/promises";
import { dirname as dirname$1, extname as extname$1, isAbsolute, join as join$1, relative } from "node:path";
import { createWriteStream, existsSync, mkdtempSync, readFileSync, rmSync, unlinkSync } from "node:fs";
import { spawn } from "node:child_process";
import { tmpdir } from "node:os";
import { z } from "zod";
import { lstat, readFile, rm, writeFile } from "node:fs/promises";
import { build as build$2 } from "rolldown";

@@ -16,6 +13,9 @@ import { exports } from "resolve.exports";

import { createRequire as createRequire$1 } from "module";
import { spawn as spawn$1 } from "child_process";
import { spawn } from "child_process";
import { existsSync as existsSync$1 } from "fs";
import execa from "execa";
import { readFile as readFile$1, writeFile as writeFile$1 } from "fs/promises";
import { readFile as readFile$1, writeFile } from "fs/promises";
import { spawn as spawn$1 } from "node:child_process";
import { tmpdir } from "node:os";
import { z } from "zod";

@@ -69,195 +69,4 @@ //#region src/utils.ts

//#endregion
//#region src/introspection/util.ts
const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";
const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";
//#endregion
//#region src/introspection/index.ts
const require$1 = createRequire(import.meta.url);
const introspectApp = async (args) => {
const { span } = args;
const introspectionSpan = span.child("vc.builder.backends.introspection");
if (isExperimentalBackendsWithoutIntrospectionEnabled()) return defaultResult(args);
const cjsLoaderPath = require$1.resolve("@vercel/backends/introspection/loaders/cjs");
const rolldownEsmLoaderPath = `file://${require$1.resolve("@vercel/backends/introspection/loaders/rolldown-esm")}`;
const handlerPath = join$1(args.dir, args.handler);
const introspectionSchema = z.object({
frameworkSlug: z.string().optional(),
routes: z.array(z.object({
src: z.string(),
dest: z.string(),
methods: z.array(z.string())
})),
additionalFolders: z.array(z.string()).optional().transform((values) => {
return values?.map((val) => {
if (isAbsolute(val)) return relative(args.dir, val);
return val;
});
}),
additionalDeps: z.array(z.string()).optional()
});
let introspectionData;
await new Promise((resolvePromise) => {
try {
debug("Spawning introspection process");
const child = spawn("node", [
"-r",
cjsLoaderPath,
"--import",
rolldownEsmLoaderPath,
handlerPath
], {
stdio: [
"pipe",
"pipe",
"pipe"
],
cwd: args.dir,
env: {
...process.env,
...args.env
}
});
const tempDir = mkdtempSync(join$1(tmpdir(), "introspection-"));
const tempFilePath = join$1(tempDir, "output.txt");
const writeStream = createWriteStream(tempFilePath);
let streamClosed = false;
child.stdout?.pipe(writeStream);
let stderrBuffer = "";
child.stderr?.on("data", (data) => {
stderrBuffer += data.toString();
});
writeStream.on("error", (err) => {
debug(`Write stream error: ${err.message}`);
});
const timeout = setTimeout(() => {
debug("Introspection timeout, killing process with SIGTERM");
child.kill("SIGTERM");
}, 8e3);
const timeout2 = setTimeout(() => {
debug("Introspection timeout, killing process with SIGKILL");
child.kill("SIGKILL");
}, 9e3);
child.on("error", (err) => {
clearTimeout(timeout);
clearTimeout(timeout2);
debug(`Loader error: ${err.message}`);
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
try {
unlinkSync(tempFilePath);
} catch (cleanupErr) {
debug(`Error deleting temp file on error: ${cleanupErr}`);
}
resolvePromise(void 0);
});
else resolvePromise(void 0);
});
child.on("close", () => {
clearTimeout(timeout);
clearTimeout(timeout2);
debug("Introspection process closed");
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
let stdoutBuffer;
try {
stdoutBuffer = readFileSync(tempFilePath, "utf8");
const beginIndex = stdoutBuffer.indexOf(BEGIN_INTROSPECTION_RESULT);
const endIndex = stdoutBuffer.indexOf(END_INTROSPECTION_RESULT);
if (beginIndex !== -1 && endIndex !== -1) {
const introspectionString = stdoutBuffer.substring(beginIndex + BEGIN_INTROSPECTION_RESULT.length, endIndex);
if (introspectionString) {
introspectionData = introspectionSchema.parse(JSON.parse(introspectionString));
debug("Introspection data parsed successfully");
}
} else debug(`Introspection markers not found.\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} catch (error) {
debug(`Error parsing introspection data: ${error}\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} finally {
try {
rmSync(tempDir, {
recursive: true,
force: true
});
} catch (err) {
debug(`Error deleting temp directory: ${err}`);
}
resolvePromise(void 0);
}
});
else resolvePromise(void 0);
});
} catch (error) {
debug("Introspection error", error);
resolvePromise(void 0);
}
});
const framework = getFramework(args);
if (!introspectionData) {
introspectionSpan.setAttributes({
"introspection.success": "false",
"introspection.routes": "0"
});
return defaultResult(args);
}
const routes = [
{ handle: "filesystem" },
...introspectionData.routes,
{
src: "/(.*)",
dest: "/"
}
];
introspectionSpan.setAttributes({
"introspection.success": "true",
"introspection.routes": String(introspectionData.routes.length),
"introspection.framework": introspectionData.frameworkSlug ?? ""
});
return {
routes,
framework,
additionalFolders: introspectionData.additionalFolders ?? [],
additionalDeps: introspectionData.additionalDeps ?? []
};
};
const defaultResult = (args) => {
return {
routes: [{ handle: "filesystem" }, {
src: "/(.*)",
dest: "/"
}],
framework: getFramework(args)
};
};
const getFramework = (args) => {
try {
let version$1;
if (args.framework) {
const frameworkLibPath = require$1.resolve(`${args.framework}`, { paths: [args.dir] });
const findNearestPackageJson = (dir) => {
const packageJsonPath = join$1(dir, "package.json");
if (existsSync(packageJsonPath)) return packageJsonPath;
const parentDir = dirname$1(dir);
if (parentDir === dir) return;
return findNearestPackageJson(parentDir);
};
const nearestPackageJsonPath = findNearestPackageJson(frameworkLibPath);
if (nearestPackageJsonPath) version$1 = require$1(nearestPackageJsonPath).version;
}
return {
slug: args.framework ?? "",
version: version$1 ?? ""
};
} catch (error) {
debug(`Error getting framework for ${args.framework}. Setting framework version to empty string.`, error);
return {
slug: args.framework ?? "",
version: ""
};
}
};
//#endregion
//#region src/cervel/plugin.ts
const CJS_SHIM_PREFIX = "\0cjs-shim:";
const CJS_SHIM_PREFIX$1 = "\0cjs-shim:";
const plugin = (args) => {

@@ -273,3 +82,3 @@ const packageJsonCache = /* @__PURE__ */ new Map();

*/
const getPackageJson$1 = async (pkgPath) => {
const getPackageJson = async (pkgPath) => {
if (packageJsonCache.has(pkgPath)) return packageJsonCache.get(pkgPath);

@@ -296,3 +105,3 @@ try {

if (!pkgJsonPath) return true;
const pkgJson = await getPackageJson$1(pkgJsonPath);
const pkgJson = await getPackageJson(pkgJsonPath);
if (!pkgJson) return true;

@@ -331,3 +140,3 @@ const pkgDir = dirname$1(pkgJsonPath);

async handler(id, importer, rOpts) {
if (id.startsWith(CJS_SHIM_PREFIX)) return {
if (id.startsWith(CJS_SHIM_PREFIX$1)) return {
id,

@@ -342,3 +151,3 @@ external: false

if (resolved?.id && isLocalImport(resolved.id)) tracedPaths.add(resolved.id);
if (importer?.startsWith(CJS_SHIM_PREFIX) && isBareImport(id)) return {
if (importer?.startsWith(CJS_SHIM_PREFIX$1) && isBareImport(id)) return {
id,

@@ -353,3 +162,3 @@ external: true

const importerPkgDir = relative(args.repoRootPath, dirname$1(importerPkgJsonPath));
const shimId$1 = `${CJS_SHIM_PREFIX}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
const shimId$1 = `${CJS_SHIM_PREFIX$1}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
shimMeta.set(shimId$1, {

@@ -364,3 +173,3 @@ pkgDir: importerPkgDir,

}
const shimId = `${CJS_SHIM_PREFIX}${id.replace(/\//g, "_")}`;
const shimId = `${CJS_SHIM_PREFIX$1}${id.replace(/\//g, "_")}`;
shimMeta.set(shimId, {

@@ -390,3 +199,3 @@ pkgDir: "",

load: { async handler(id) {
if (id.startsWith(CJS_SHIM_PREFIX)) {
if (id.startsWith(CJS_SHIM_PREFIX$1)) {
const meta = shimMeta.get(id);

@@ -476,3 +285,3 @@ if (!meta) return { code: `module.exports = require('${id.slice(10)}');` };

`.trim();
const rolldown = async (args) => {
const rolldown$1 = async (args) => {
const entrypointPath = join$1(args.workPath, args.entrypoint);

@@ -612,4 +421,4 @@ const outputDir = join$1(args.workPath, args.out);

//#region src/cervel/typescript.ts
const require_ = createRequire$1(import.meta.url);
const typescript = (args) => {
const require_$1 = createRequire$1(import.meta.url);
const typescript$1 = (args) => {
const { span } = args;

@@ -623,3 +432,3 @@ return span.child("vc.builder.backends.tsCompile").trace(async () => {

].includes(extension)) return;
const tscPath = resolveTscPath(args);
const tscPath = resolveTscPath$1(args);
if (!tscPath) {

@@ -629,6 +438,6 @@ console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Typecheck skipped ${Colors.gray("(TypeScript not found)")}`));

}
return doTypeCheck(args, tscPath);
return doTypeCheck$1(args, tscPath);
});
};
async function doTypeCheck(args, tscPath) {
async function doTypeCheck$1(args, tscPath) {
let stdout = "";

@@ -650,6 +459,6 @@ let stderr = "";

];
const tsconfig = await findNearestTsconfig(args.workPath);
const tsconfig = await findNearestTsconfig$1(args.workPath);
if (tsconfig) tscArgs.push("--project", tsconfig);
else tscArgs.push(args.entrypoint);
const child = spawn$1(process.execPath, tscArgs, {
const child = spawn(process.execPath, tscArgs, {
cwd: args.workPath,

@@ -687,5 +496,5 @@ stdio: [

}
const resolveTscPath = (args) => {
const resolveTscPath$1 = (args) => {
try {
return require_.resolve("typescript/bin/tsc", { paths: [args.workPath] });
return require_$1.resolve("typescript/bin/tsc", { paths: [args.workPath] });
} catch (e) {

@@ -695,11 +504,11 @@ return null;

};
const findNearestTsconfig = async (workPath) => {
const findNearestTsconfig$1 = async (workPath) => {
const tsconfigPath = join(workPath, "tsconfig.json");
if (existsSync$1(tsconfigPath)) return tsconfigPath;
if (workPath === "/") return;
return findNearestTsconfig(join(workPath, ".."));
return findNearestTsconfig$1(join(workPath, ".."));
};
//#endregion
//#region src/cervel/find-entrypoint.ts
//#region src/find-entrypoint.ts
const frameworks = [

@@ -717,3 +526,7 @@ "express",

"server",
"main"
"main",
"src/app",
"src/index",
"src/server",
"src/main"
];

@@ -730,48 +543,36 @@ const entrypointExtensions = [

const createFrameworkRegex = (framework) => new RegExp(`(?:from|require|import)\\s*(?:\\(\\s*)?["']${framework}["']\\s*(?:\\))?`, "g");
const findEntrypoint = async (cwd, options) => {
if (options?.ignoreRegex ?? false) {
for (const entrypoint of entrypoints) if (existsSync(join$1(cwd, entrypoint))) return entrypoint;
for (const entrypoint of entrypoints) if (existsSync(join$1(cwd, "src", entrypoint))) return join$1("src", entrypoint);
throw new Error("No entrypoint file found");
}
const packageJson = await readFile(join$1(cwd, "package.json"), "utf-8");
const packageJsonObject = JSON.parse(packageJson);
const framework = frameworks.find((framework$1) => packageJsonObject.dependencies?.[framework$1]);
if (!framework) {
for (const entrypoint of entrypoints) {
const entrypointPath = join$1(cwd, entrypoint);
try {
await readFile(entrypointPath, "utf-8");
return entrypoint;
} catch (e) {
continue;
}
}
throw new Error("No entrypoint or framework found");
}
const regex = createFrameworkRegex(framework);
for (const entrypoint of entrypoints) {
const findEntrypoint = async (cwd) => {
let framework;
try {
const packageJson = await readFile(join$1(cwd, "package.json"), "utf-8");
const packageJsonObject = JSON.parse(packageJson);
framework = frameworks.find((framework$1) => packageJsonObject.dependencies?.[framework$1]);
} catch (_) {}
if (!framework) for (const entrypoint of entrypoints) {
const entrypointPath = join$1(cwd, entrypoint);
try {
const content = await readFile(entrypointPath, "utf-8");
if (regex.test(content)) return entrypoint;
} catch (e) {
continue;
}
await readFile(entrypointPath, "utf-8");
return entrypoint;
} catch (_) {}
}
const regex = framework ? createFrameworkRegex(framework) : void 0;
for (const entrypoint of entrypoints) {
const entrypointPath = join$1(cwd, "src", entrypoint);
const entrypointPath = join$1(cwd, entrypoint);
try {
const content = await readFile(entrypointPath, "utf-8");
if (regex.test(content)) return join$1("src", entrypoint);
} catch (e) {
continue;
}
if (regex) {
if (regex.test(content)) return entrypoint;
}
} catch (_) {}
}
throw new Error("No entrypoint found");
};
const findEntrypointOrThrow = async (cwd) => {
const entrypoint = await findEntrypoint(cwd);
if (!entrypoint) throw new Error(`No entrypoint found in "${cwd}". Expected one of: ${entrypoints.join(", ")}`);
return entrypoint;
};
//#endregion
//#region src/cervel/index.ts
const require = createRequire(import.meta.url);
const require$2 = createRequire(import.meta.url);
const getBuildSummary = async (outputDir) => {

@@ -782,9 +583,9 @@ const buildSummary = await readFile$1(join$1(outputDir, ".cervel.json"), "utf-8");

const build$1 = async (args) => {
const entrypoint = args.entrypoint || await findEntrypoint(args.workPath);
const entrypoint = args.entrypoint || await findEntrypointOrThrow(args.workPath);
const span = args.span ?? new Span({ name: "cervel-build" });
const [, rolldownResult] = await Promise.all([typescript({
const [, rolldownResult] = await Promise.all([typescript$1({
entrypoint,
workPath: args.workPath,
span
}), rolldown({
}), rolldown$1({
entrypoint,

@@ -796,3 +597,3 @@ workPath: args.workPath,

})]);
await writeFile$1(join$1(args.workPath, args.out, ".cervel.json"), JSON.stringify({ handler: rolldownResult.result.handler }, null, 2));
await writeFile(join$1(args.workPath, args.out, ".cervel.json"), JSON.stringify({ handler: rolldownResult.result.handler }, null, 2));
console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Build complete`));

@@ -802,5 +603,5 @@ return { rolldownResult: rolldownResult.result };

const serve = async (args) => {
const entrypoint = await findEntrypoint(args.workPath);
const srvxBin = join$1(require.resolve("srvx"), "..", "..", "..", "bin", "srvx.mjs");
const tsxBin = require.resolve("tsx");
const entrypoint = await findEntrypointOrThrow(args.workPath);
const srvxBin = join$1(require$2.resolve("srvx"), "..", "..", "..", "bin", "srvx.mjs");
const tsxBin = require$2.resolve("tsx");
const restArgs = Object.entries(args.rest).filter(([, value]) => value !== void 0 && value !== false).map(([key, value]) => typeof value === "boolean" ? `--${key}` : `--${key}=${value}`);

@@ -846,126 +647,781 @@ if (!args.rest.import) restArgs.push("--import", tsxBin);

//#endregion
//#region src/build.ts
const defaultOutputDirectory = join$1(".vercel", "node");
const doBuild = async (args, downloadResult, span) => {
const buildCommandResult = await maybeExecBuildCommand(args, downloadResult);
const outputSetting = args.config.outputDirectory;
const buildCommand = args.config.projectSettings?.buildCommand;
const isCervelCommand = buildCommand?.trim().startsWith("cervel");
if (!outputSetting) {
debug("No output directory configured, using default output directory");
if (isCervelCommand) {
debug("Cervel command ran, using its default output location");
const cervelOutputDir = join$1(args.workPath, "dist");
if (existsSync(join$1(cervelOutputDir, ".cervel.json"))) {
debug("Cervel JSON file found, using its handler");
const { handler: handler$2 } = await getBuildSummary(cervelOutputDir);
return {
dir: cervelOutputDir,
handler: handler$2
};
}
throw new Error(`Build command "${buildCommand}" completed, but no output was found at ${cervelOutputDir}. Make sure your cervel command is configured correctly.`);
//#region src/rolldown/resolve-format.ts
const resolveEntrypointAndFormat = async (args) => {
const extension = extname$1(args.entrypoint);
const extensionMap = {
".ts": {
format: "auto",
extension: "js"
},
".mts": {
format: "esm",
extension: "mjs"
},
".cts": {
format: "cjs",
extension: "cjs"
},
".cjs": {
format: "cjs",
extension: "cjs"
},
".js": {
format: "auto",
extension: "js"
},
".mjs": {
format: "esm",
extension: "mjs"
}
const distDir = join$1(args.workPath, "dist");
if (existsSync(distDir)) {
debug("Dist directory found, checking for .cervel.json");
const cervelJsonPath$1 = join$1(distDir, ".cervel.json");
if (existsSync(cervelJsonPath$1)) {
const { handler: handler$3 } = await getBuildSummary(distDir);
return {
dir: distDir,
handler: handler$3
};
}
let handler$2;
try {
debug("Finding entrypoint in dist directory");
handler$2 = await findEntrypoint(distDir);
} catch (error) {
};
const extensionInfo = extensionMap[extension] || extensionMap[".js"];
let resolvedFormat = extensionInfo.format === "auto" ? void 0 : extensionInfo.format;
const packageJsonPath = join$1(args.workPath, "package.json");
let pkg = {};
if (existsSync(packageJsonPath)) {
const source = await readFile(packageJsonPath, "utf8");
try {
pkg = JSON.parse(source.toString());
} catch (_e) {
pkg = {};
}
if (extensionInfo.format === "auto") if (pkg.type === "module") resolvedFormat = "esm";
else resolvedFormat = "cjs";
}
if (!resolvedFormat) throw new Error(`Unable to resolve format for ${args.entrypoint}`);
return {
format: resolvedFormat,
extension: resolvedFormat === "esm" ? "mjs" : "cjs"
};
};
//#endregion
//#region src/rolldown/nft.ts
const nft = async (args) => {
const nftSpan = args.span.child("vc.builder.backends.nft");
const runNft = async () => {
const nftResult = await nodeFileTrace$1(Array.from(args.localBuildFiles), {
base: args.repoRootPath,
processCwd: args.workPath,
ts: true,
mixedModules: true,
ignore: args.ignoreNodeModules ? (path) => path.includes("node_modules") : void 0,
async readFile(fsPath) {
try {
debug("Finding entrypoint in dist directory with ignoreRegex");
handler$2 = await findEntrypoint(distDir, { ignoreRegex: true });
debug("Found entrypoint in dist directory with ignoreRegex", handler$2);
} catch (error$1) {
debug("Unable to detect entrypoint, building ourselves");
const buildResult$1 = await build$1({
workPath: args.workPath,
repoRootPath: args.repoRootPath,
out: defaultOutputDirectory,
span
});
const { handler: handler$3 } = await getBuildSummary(buildResult$1.rolldownResult.outputDir);
return {
dir: buildResult$1.rolldownResult.outputDir,
handler: handler$3,
files: buildResult$1.rolldownResult.outputFiles
};
let source = await readFile(fsPath);
if (isTypeScriptFile(fsPath)) source = (await transform(fsPath, source.toString())).code;
return source;
} catch (error) {
if (isNativeError(error) && "code" in error && (error.code === "ENOENT" || error.code === "EISDIR")) return null;
throw error;
}
}
await writeFile(cervelJsonPath$1, JSON.stringify({ handler: handler$2 }, null, 2));
const files = await nodeFileTrace({
keepTracedPaths: true,
tracedPaths: [join$1(distDir, handler$2)],
repoRootPath: args.repoRootPath,
workPath: args.workPath,
outDir: distDir,
span
});
for (const file of nftResult.fileList) {
const absolutePath = join$1(args.repoRootPath, file);
const stats = await lstat(absolutePath);
const outputPath = file;
if (args.localBuildFiles.has(join$1(args.repoRootPath, outputPath))) continue;
if (stats.isSymbolicLink() || stats.isFile()) if (args.ignoreNodeModules) {
const content = await readFile(absolutePath, "utf-8");
args.files[outputPath] = new FileBlob({
data: content,
mode: stats.mode
});
} else args.files[outputPath] = new FileFsRef({
fsPath: absolutePath,
mode: stats.mode
});
return {
dir: distDir,
handler: handler$2,
files
};
}
debug("No dist directory found, or unable to detect entrypoint, building ourselves");
const buildResult = await build$1({
workPath: args.workPath,
repoRootPath: args.repoRootPath,
out: defaultOutputDirectory,
span
});
const { handler: handler$1 } = await getBuildSummary(buildResult.rolldownResult.outputDir);
};
await nftSpan.trace(runNft);
};
const isTypeScriptFile = (fsPath) => {
return fsPath.endsWith(".ts") || fsPath.endsWith(".tsx") || fsPath.endsWith(".mts") || fsPath.endsWith(".cts");
};
//#endregion
//#region src/rolldown/index.ts
const PLUGIN_NAME = "vercel:backends";
const CJS_SHIM_PREFIX = "\0cjs-shim:";
const rolldown = async (args) => {
const files = {};
const { format, extension } = await resolveEntrypointAndFormat(args);
const localBuildFiles = /* @__PURE__ */ new Set();
let handler = null;
const packageJsonCache = /* @__PURE__ */ new Map();
const shimMeta = /* @__PURE__ */ new Map();
const framework = {
slug: "",
version: ""
};
const getPackageJson = async (pkgPath) => {
if (packageJsonCache.has(pkgPath)) return packageJsonCache.get(pkgPath);
try {
const contents = await readFile(pkgPath, "utf-8");
const parsed = JSON.parse(contents);
packageJsonCache.set(pkgPath, parsed);
return parsed;
} catch {
packageJsonCache.set(pkgPath, null);
return null;
}
};
const isCommonJS = async (bareImport, resolvedPath, resolvedInfo) => {
const ext = extname$1(resolvedPath);
if (ext === ".cjs") return true;
if (ext === ".mjs") return false;
if (ext === ".js" || ext === ".ts") {
const pkgJsonPath = resolvedInfo.packageJsonPath;
if (!pkgJsonPath) return true;
const pkgJson = await getPackageJson(pkgJsonPath);
if (!pkgJson) return true;
const pkgDir = dirname$1(pkgJsonPath);
const relativePath = resolvedPath.slice(pkgDir.length + 1).replace(/\\/g, "/");
const pkgName = pkgJson.name || "";
const subpath = bareImport.startsWith(pkgName) ? `.${bareImport.slice(pkgName.length)}` || "." : ".";
try {
if (exports(pkgJson, subpath, {
require: false,
conditions: ["node", "import"]
})?.some((p) => p === relativePath || p === `./${relativePath}`)) return false;
if (exports(pkgJson, subpath, {
require: true,
conditions: ["node", "require"]
})?.some((p) => p === relativePath || p === `./${relativePath}`)) return true;
} catch {}
if (pkgJson.module) return false;
return pkgJson.type !== "module";
}
return true;
};
const isBareImport = (id) => {
return !id.startsWith(".") && !id.startsWith("/") && !/^[a-z][a-z0-9+.-]*:/i.test(id);
};
const isNodeModule = (resolved) => {
return resolved?.id?.includes("node_modules") ?? false;
};
const isNodeBuiltin = (id) => {
const normalizedId = id.includes(":") ? id.split(":")[1] : id;
return builtinModules.includes(normalizedId);
};
const isLocalImport = (id) => {
return !id.startsWith("node:") && !id.includes("node_modules");
};
const plugin$1 = () => {
return {
dir: buildResult.rolldownResult.outputDir,
handler: handler$1,
files: buildResult.rolldownResult.outputFiles
name: PLUGIN_NAME,
resolveId: {
order: "pre",
async handler(id, importer, rOpts) {
if (id.startsWith(CJS_SHIM_PREFIX)) return {
id,
external: false
};
const resolved = await this.resolve(id, importer, rOpts);
if (isNodeBuiltin(id)) return {
id: id.startsWith("node:") ? id : `node:${id}`,
external: true
};
if (resolved?.id && isLocalImport(resolved.id)) localBuildFiles.add(resolved.id);
else if (!resolved) localBuildFiles.add(join$1(args.workPath, id));
if (importer?.startsWith(CJS_SHIM_PREFIX) && isBareImport(id)) return {
id,
external: true
};
if (importer && isBareImport(id) && isNodeModule(resolved)) {
if (isBackendFramework(id) && resolved?.packageJsonPath) try {
const pkg = await readFile(resolved.packageJsonPath, "utf8");
const pkgJson = JSON.parse(pkg);
framework.slug = pkgJson.name;
framework.version = pkgJson.version;
} catch {}
if (resolved ? await isCommonJS(id, resolved.id, resolved) : false) {
const importerPkgJsonPath = (await this.resolve(importer))?.packageJsonPath;
if (importerPkgJsonPath) {
const importerPkgDir = relative(args.repoRootPath, dirname$1(importerPkgJsonPath));
const shimId$1 = `${CJS_SHIM_PREFIX}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
shimMeta.set(shimId$1, {
pkgDir: importerPkgDir,
pkgName: id
});
return {
id: shimId$1,
external: false
};
}
const shimId = `${CJS_SHIM_PREFIX}${id.replace(/\//g, "_")}`;
shimMeta.set(shimId, {
pkgDir: "",
pkgName: id
});
return {
id: shimId,
external: false
};
}
return {
id,
external: true
};
}
if (importer && isBareImport(id)) return resolved;
if (resolved && !isNodeModule(resolved)) return resolved;
return resolved;
}
},
load: { async handler(id) {
if (id.startsWith(CJS_SHIM_PREFIX)) {
const meta = shimMeta.get(id);
if (!meta) return { code: `module.exports = require('${id.slice(10)}');` };
const { pkgDir, pkgName } = meta;
return { code: `
import { createRequire } from 'node:module';
import { fileURLToPath } from 'node:url';
import { dirname, join } from 'node:path';
const requireFromContext = createRequire(join(dirname(fileURLToPath(import.meta.url)), '${pkgDir ? join$1("..", pkgDir, "package.json") : "../package.json"}'));
module.exports = requireFromContext('${pkgName}');
`.trim() };
}
return null;
} }
};
};
const runRolldown = () => build$2({
input: args.entrypoint,
write: false,
cwd: args.workPath,
platform: "node",
transform: { define: format === "esm" ? {
__dirname: "import.meta.dirname",
__filename: "import.meta.filename"
} : void 0 },
tsconfig: true,
plugins: [plugin$1()],
output: {
cleanDir: true,
format,
entryFileNames: `[name].${extension}`,
preserveModules: true,
preserveModulesRoot: args.repoRootPath,
sourcemap: false
}
});
const rolldownSpan = args.span?.child("vc.builder.backends.rolldown");
const out = await rolldownSpan?.trace(runRolldown) || await runRolldown();
for (const file of out.output) if (file.type === "chunk") {
if (file.isEntry) handler = file.fileName;
files[file.fileName] = new FileBlob({
data: file.code,
mode: 420
});
}
const outputDir = join$1(args.workPath, outputSetting);
const packageJson = await getPackageJson(args.workPath);
const monorepoWithoutBuildScript = args.config.projectSettings?.monorepoManager && !getScriptName(packageJson, ["build"]);
if (!buildCommandResult || monorepoWithoutBuildScript) {
const buildResult = await build$1({
workPath: args.workPath,
repoRootPath: args.repoRootPath,
out: outputDir,
span
await nft({
...args,
localBuildFiles,
files,
span: rolldownSpan ?? new Span({ name: "vc.builder.backends.nft" }),
ignoreNodeModules: true
});
if (!handler) throw new Error(`Unable to resolve build handler for entrypoint: ${args.entrypoint}`);
return {
files,
handler,
framework,
localBuildFiles
};
};
//#endregion
//#region src/rolldown/util.ts
const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";
const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";
//#endregion
//#region src/rolldown/introspection.ts
const require$1 = createRequire(import.meta.url);
const introspectionSchema = z.object({
routes: z.array(z.object({
src: z.string(),
dest: z.string(),
methods: z.array(z.string())
})),
additionalFolders: z.array(z.string()).optional(),
additionalDeps: z.array(z.string()).optional()
});
const introspection = async (args) => {
const defaultResult$1 = {
routes: [],
additionalFolders: [],
additionalDeps: []
};
if (isExperimentalBackendsWithoutIntrospectionEnabled()) return defaultResult$1;
const introspectionSpan = args.span.child("vc.builder.backends.introspection");
const runIntrospection = async () => {
const rolldownEsmLoaderPath = `file://${require$1.resolve("@vercel/backends/rolldown/esm")}`;
const rolldownCjsLoaderPath = require$1.resolve("@vercel/backends/rolldown/cjs-hooks");
const handlerPath = join$1(args.workPath, args.entrypoint);
const files = args.files;
const tmpDir = mkdtempSync(join$1(tmpdir(), "vercel-introspection-"));
for (const [key, value] of Object.entries(files)) {
if (!(value instanceof FileBlob) || typeof value.data !== "string") continue;
const filePath = join$1(tmpDir, key);
mkdirSync(dirname$1(filePath), { recursive: true });
writeFileSync(filePath, value.data);
}
let introspectionData;
await new Promise((resolvePromise) => {
try {
debug("Spawning introspection process");
const outputTempDir = mkdtempSync(join$1(tmpdir(), "introspection-output-"));
const tempFilePath = join$1(outputTempDir, "output.txt");
const writeStream = createWriteStream(tempFilePath);
let streamClosed = false;
const child = spawn$1("node", [
"-r",
rolldownCjsLoaderPath,
"--import",
rolldownEsmLoaderPath,
handlerPath
], {
stdio: [
"pipe",
"pipe",
"pipe"
],
cwd: args.workPath,
env: {
...process.env,
...args.meta?.buildEnv,
...args.meta?.env,
VERCEL_INTROSPECTION_HANDLER: handlerPath,
VERCEL_INTROSPECTION_HANDLER_BUILT: args.handler,
VERCEL_INTROSPECTION_WORK_PATH: args.workPath,
VERCEL_INTROSPECTION_REPO_ROOT_PATH: args.repoRootPath,
VERCEL_INTROSPECTION_TMP_DIR: tmpDir
}
});
child.stdout?.pipe(writeStream);
let stderrBuffer = "";
child.stderr?.on("data", (data) => {
stderrBuffer += data.toString();
});
writeStream.on("error", (err) => {
debug(`Write stream error: ${err.message}`);
});
const timeout = setTimeout(() => {
debug("Introspection timeout, killing process with SIGTERM");
child.kill("SIGTERM");
}, 8e3);
const timeout2 = setTimeout(() => {
debug("Introspection timeout, killing process with SIGKILL");
child.kill("SIGKILL");
}, 9e3);
const cleanup = () => {
clearTimeout(timeout);
clearTimeout(timeout2);
try {
rmSync(tmpDir, {
recursive: true,
force: true
});
} catch (err) {
debug(`Error deleting tmpDir: ${err}`);
}
};
child.on("error", (err) => {
cleanup();
debug(`Loader error: ${err.message}`);
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
try {
rmSync(outputTempDir, {
recursive: true,
force: true
});
} catch (cleanupErr) {
debug(`Error deleting output temp dir: ${cleanupErr}`);
}
resolvePromise();
});
else resolvePromise();
});
child.on("close", () => {
cleanup();
debug("Introspection process closed");
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
let stdoutBuffer;
try {
stdoutBuffer = readFileSync(tempFilePath, "utf8");
const beginIndex = stdoutBuffer.indexOf(BEGIN_INTROSPECTION_RESULT);
const endIndex = stdoutBuffer.indexOf(END_INTROSPECTION_RESULT);
if (beginIndex !== -1 && endIndex !== -1) {
const introspectionString = stdoutBuffer.substring(beginIndex + BEGIN_INTROSPECTION_RESULT.length, endIndex);
if (introspectionString) {
introspectionData = introspectionSchema.parse(JSON.parse(introspectionString));
debug("Introspection data parsed successfully");
}
} else debug(`Introspection markers not found.\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} catch (error) {
debug(`Error parsing introspection data: ${error}\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} finally {
try {
rmSync(outputTempDir, {
recursive: true,
force: true
});
} catch (err) {
debug(`Error deleting output temp directory: ${err}`);
}
resolvePromise();
}
});
else resolvePromise();
});
} catch (error) {
debug("Introspection error", error);
resolvePromise();
}
});
const { handler: handler$1 } = await getBuildSummary(buildResult.rolldownResult.outputDir);
if (!introspectionData) {
introspectionSpan.setAttributes({
"introspection.success": "false",
"introspection.routes": "0"
});
return defaultResult$1;
}
const additionalFolders = (introspectionData.additionalFolders ?? []).map((val) => {
if (isAbsolute(val)) return relative(args.workPath, val);
return val;
});
introspectionSpan.setAttributes({
"introspection.success": "true",
"introspection.routes": String(introspectionData.routes.length)
});
return {
dir: buildResult.rolldownResult.outputDir,
handler: handler$1,
files: buildResult.rolldownResult.outputFiles
routes: introspectionData.routes,
additionalFolders,
additionalDeps: introspectionData.additionalDeps ?? []
};
};
return introspectionSpan.trace(runIntrospection);
};
//#endregion
//#region src/build.ts
const maybeDoBuildCommand = async (args, downloadResult) => {
const buildCommandResult = await maybeExecBuildCommand(args, downloadResult);
const outputSetting = args.config.outputDirectory;
let outputDir;
let entrypoint;
if (buildCommandResult && outputSetting) if (outputSetting) {
const _outputDir = join$1(args.workPath, outputSetting);
const _entrypoint = await findEntrypoint(_outputDir);
if (_entrypoint) {
outputDir = _outputDir;
entrypoint = _entrypoint;
}
} else for (const outputDirectory of [
"dist",
"build",
"output"
]) {
const _outputDir = join$1(args.workPath, outputDirectory);
if (existsSync(_outputDir)) {
const _entrypoint = await findEntrypoint(_outputDir);
if (_entrypoint) {
outputDir = _outputDir;
entrypoint = _entrypoint;
break;
}
}
}
const cervelJsonPath = join$1(outputDir, ".cervel.json");
if (existsSync(cervelJsonPath)) {
const { handler: handler$1 } = await getBuildSummary(outputDir);
return {
dir: outputDir,
handler: handler$1
};
const localBuildFiles = /* @__PURE__ */ new Set();
let files;
if (outputDir && entrypoint) {
files = await glob("**", outputDir);
for (const file of Object.keys(files)) localBuildFiles.add(join$1(outputDir, file));
}
let handler;
return {
localBuildFiles,
files,
handler: entrypoint,
outputDir
};
};
//#endregion
//#region src/typescript.ts
const require_ = createRequire(import.meta.url);
const typescript = (args) => {
const { span } = args;
return span.child("vc.builder.backends.tsCompile").trace(async () => {
const extension = extname$1(args.entrypoint);
if (![
".ts",
".mts",
".cts"
].includes(extension)) return;
const tscPath = resolveTscPath(args);
if (!tscPath) {
console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Typecheck skipped ${Colors.gray("(TypeScript not found)")}`));
return null;
}
return doTypeCheck(args, tscPath);
});
};
async function doTypeCheck(args, tscPath) {
let stdout = "";
let stderr = "";
/**
* This might be subject to change.
* - if no tscPath, skip typecheck
* - if tsconfig, provide the tsconfig path
* - else provide the entrypoint path
*/
const tscArgs = [
tscPath,
"--noEmit",
"--pretty",
"--allowJs",
"--esModuleInterop",
"--skipLibCheck"
];
const tsconfig = await findNearestTsconfig(args.workPath);
if (tsconfig) tscArgs.push("--project", tsconfig);
else tscArgs.push(args.entrypoint);
const child = spawn$1(process.execPath, tscArgs, {
cwd: args.workPath,
stdio: [
"ignore",
"pipe",
"pipe"
]
});
child.stdout?.on("data", (data) => {
stdout += data.toString();
});
child.stderr?.on("data", (data) => {
stderr += data.toString();
});
await new Promise((resolve$1, reject) => {
child.on("close", (code) => {
if (code === 0) {
console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Typecheck complete`));
resolve$1();
} else {
const output = stdout || stderr;
if (output) {
console.error("\nTypeScript type check failed:\n");
console.error(output);
}
reject(/* @__PURE__ */ new Error("TypeScript type check failed"));
}
});
child.on("error", (err) => {
reject(err);
});
});
}
const resolveTscPath = (args) => {
try {
handler = await findEntrypoint(outputDir);
} catch (error) {
handler = await findEntrypoint(outputDir, { ignoreRegex: true });
return require_.resolve("typescript/bin/tsc", { paths: [args.workPath] });
} catch (e) {
return null;
}
await writeFile(cervelJsonPath, JSON.stringify({ handler }, null, 2));
};
const findNearestTsconfig = async (workPath) => {
const tsconfigPath = join$1(workPath, "tsconfig.json");
if (existsSync(tsconfigPath)) return tsconfigPath;
if (workPath === "/") return;
return findNearestTsconfig(join$1(workPath, ".."));
};
//#endregion
//#region src/introspection/index.ts
const require = createRequire(import.meta.url);
const introspectApp = async (args) => {
const { span } = args;
const introspectionSpan = span.child("vc.builder.backends.introspection");
if (isExperimentalBackendsWithoutIntrospectionEnabled()) return defaultResult(args);
const cjsLoaderPath = require.resolve("@vercel/backends/introspection/loaders/cjs");
const rolldownEsmLoaderPath = `file://${require.resolve("@vercel/backends/introspection/loaders/rolldown-esm")}`;
const handlerPath = join$1(args.dir, args.handler);
const introspectionSchema$1 = z.object({
frameworkSlug: z.string().optional(),
routes: z.array(z.object({
src: z.string(),
dest: z.string(),
methods: z.array(z.string())
})),
additionalFolders: z.array(z.string()).optional().transform((values) => {
return values?.map((val) => {
if (isAbsolute(val)) return relative(args.dir, val);
return val;
});
}),
additionalDeps: z.array(z.string()).optional()
});
let introspectionData;
await new Promise((resolvePromise) => {
try {
debug("Spawning introspection process");
const child = spawn$1("node", [
"-r",
cjsLoaderPath,
"--import",
rolldownEsmLoaderPath,
handlerPath
], {
stdio: [
"pipe",
"pipe",
"pipe"
],
cwd: args.dir,
env: {
...process.env,
...args.env
}
});
const tempDir = mkdtempSync(join$1(tmpdir(), "introspection-"));
const tempFilePath = join$1(tempDir, "output.txt");
const writeStream = createWriteStream(tempFilePath);
let streamClosed = false;
child.stdout?.pipe(writeStream);
let stderrBuffer = "";
child.stderr?.on("data", (data) => {
stderrBuffer += data.toString();
});
writeStream.on("error", (err) => {
debug(`Write stream error: ${err.message}`);
});
const timeout = setTimeout(() => {
debug("Introspection timeout, killing process with SIGTERM");
child.kill("SIGTERM");
}, 8e3);
const timeout2 = setTimeout(() => {
debug("Introspection timeout, killing process with SIGKILL");
child.kill("SIGKILL");
}, 9e3);
child.on("error", (err) => {
clearTimeout(timeout);
clearTimeout(timeout2);
debug(`Loader error: ${err.message}`);
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
try {
unlinkSync(tempFilePath);
} catch (cleanupErr) {
debug(`Error deleting temp file on error: ${cleanupErr}`);
}
resolvePromise(void 0);
});
else resolvePromise(void 0);
});
child.on("close", () => {
clearTimeout(timeout);
clearTimeout(timeout2);
debug("Introspection process closed");
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
let stdoutBuffer;
try {
stdoutBuffer = readFileSync(tempFilePath, "utf8");
const beginIndex = stdoutBuffer.indexOf(BEGIN_INTROSPECTION_RESULT);
const endIndex = stdoutBuffer.indexOf(END_INTROSPECTION_RESULT);
if (beginIndex !== -1 && endIndex !== -1) {
const introspectionString = stdoutBuffer.substring(beginIndex + BEGIN_INTROSPECTION_RESULT.length, endIndex);
if (introspectionString) {
introspectionData = introspectionSchema$1.parse(JSON.parse(introspectionString));
debug("Introspection data parsed successfully");
}
} else debug(`Introspection markers not found.\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} catch (error) {
debug(`Error parsing introspection data: ${error}\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} finally {
try {
rmSync(tempDir, {
recursive: true,
force: true
});
} catch (err) {
debug(`Error deleting temp directory: ${err}`);
}
resolvePromise(void 0);
}
});
else resolvePromise(void 0);
});
} catch (error) {
debug("Introspection error", error);
resolvePromise(void 0);
}
});
const framework = getFramework(args);
if (!introspectionData) {
introspectionSpan.setAttributes({
"introspection.success": "false",
"introspection.routes": "0"
});
return defaultResult(args);
}
const routes = [
{ handle: "filesystem" },
...introspectionData.routes,
{
src: "/(.*)",
dest: "/"
}
];
introspectionSpan.setAttributes({
"introspection.success": "true",
"introspection.routes": String(introspectionData.routes.length),
"introspection.framework": introspectionData.frameworkSlug ?? ""
});
return {
dir: outputDir,
handler
routes,
framework,
additionalFolders: introspectionData.additionalFolders ?? [],
additionalDeps: introspectionData.additionalDeps ?? []
};
};
const defaultResult = (args) => {
return {
routes: [{ handle: "filesystem" }, {
src: "/(.*)",
dest: "/"
}],
framework: getFramework(args)
};
};
const getFramework = (args) => {
try {
let version$1;
if (args.framework) {
const frameworkLibPath = require.resolve(`${args.framework}`, { paths: [args.dir] });
const findNearestPackageJson = (dir) => {
const packageJsonPath = join$1(dir, "package.json");
if (existsSync(packageJsonPath)) return packageJsonPath;
const parentDir = dirname$1(dir);
if (parentDir === dir) return;
return findNearestPackageJson(parentDir);
};
const nearestPackageJsonPath = findNearestPackageJson(frameworkLibPath);
if (nearestPackageJsonPath) version$1 = require(nearestPackageJsonPath).version;
}
return {
slug: args.framework ?? "",
version: version$1 ?? ""
};
} catch (error) {
debug(`Error getting framework for ${args.framework}. Setting framework version to empty string.`, error);
return {
slug: args.framework ?? "",
version: ""
};
}
};

@@ -981,44 +1437,65 @@ //#endregion

span.setAttributes({ "builder.name": builderName });
const entrypoint = await findEntrypoint(args.workPath);
debug("Entrypoint", entrypoint);
const buildSpan = span.child("vc.builder.backends.build");
const introspectionSpan = span.child("vc.builder.backends.introspectApp");
const [buildResult, introspectionResult] = await Promise.all([buildSpan.trace(() => doBuild(args, downloadResult, buildSpan)), introspectionSpan.trace(() => introspectApp({
handler: entrypoint,
dir: args.workPath,
framework: args.config.framework,
env: {
...args.meta?.env ?? {},
...args.meta?.buildEnv ?? {}
},
span: introspectionSpan
}))]);
const files = buildResult.files;
const { routes, framework } = introspectionResult;
if (routes.length > 2) debug(`Introspection completed successfully with ${routes.length} routes`);
else debug(`Introspection failed to detect routes`);
const handler = buildResult.handler;
if (!files) throw new Error("Unable to trace files for build");
const lambda = new NodejsLambda({
runtime: nodeVersion.runtime,
handler,
files,
shouldAddHelpers: false,
shouldAddSourcemapSupport: true,
framework: {
slug: framework?.slug ?? "",
version: framework?.version ?? ""
},
awsLambdaHandler: "",
shouldDisableAutomaticFetchInstrumentation: process.env.VERCEL_TRACING_DISABLE_AUTOMATIC_FETCH_INSTRUMENTATION === "1"
return buildSpan.trace(async () => {
const entrypoint = await findEntrypointOrThrow(args.workPath);
debug("Entrypoint", entrypoint);
args.entrypoint = entrypoint;
const userBuildResult = await maybeDoBuildCommand(args, downloadResult);
const rolldownResult = await rolldown({
...args,
span: buildSpan
});
const introspectionPromise = introspection({
...args,
span: buildSpan,
files: rolldownResult.files,
handler: rolldownResult.handler
});
const typescriptPromise = typescript({
entrypoint,
workPath: args.workPath,
span: buildSpan
});
const localBuildFiles = userBuildResult?.localBuildFiles.size > 0 ? userBuildResult?.localBuildFiles : rolldownResult.localBuildFiles;
const files = userBuildResult?.files || rolldownResult.files;
const handler = userBuildResult?.handler || rolldownResult.handler;
const nftWorkPath = userBuildResult?.outputDir || args.workPath;
await nft({
...args,
workPath: nftWorkPath,
localBuildFiles,
files,
ignoreNodeModules: false,
span: buildSpan
});
const introspectionResult = await introspectionPromise;
await typescriptPromise;
const lambda = new NodejsLambda({
runtime: nodeVersion.runtime,
handler,
files,
framework: rolldownResult.framework,
shouldAddHelpers: false,
shouldAddSourcemapSupport: true,
awsLambdaHandler: "",
shouldDisableAutomaticFetchInstrumentation: process.env.VERCEL_TRACING_DISABLE_AUTOMATIC_FETCH_INSTRUMENTATION === "1"
});
const routes = [
{ handle: "filesystem" },
...introspectionResult.routes,
{
src: "/(.*)",
dest: "/"
}
];
const output = { index: lambda };
for (const route of routes) if (route.dest) {
if (route.dest === "/") continue;
output[route.dest] = lambda;
}
return {
routes,
output
};
});
const output = { index: lambda };
for (const route of routes) if (route.dest) {
if (route.dest === "/") continue;
output[route.dest] = lambda;
}
return {
routes,
output
};
};

@@ -1030,2 +1507,2 @@ const prepareCache = ({ repoRootPath, workPath }) => {

//#endregion
export { build, build$1 as cervelBuild, serve as cervelServe, findEntrypoint, getBuildSummary, introspectApp, nodeFileTrace, prepareCache, srvxOptions, version };
export { build, build$1 as cervelBuild, serve as cervelServe, findEntrypoint, findEntrypointOrThrow, getBuildSummary, introspectApp, nodeFileTrace, prepareCache, srvxOptions, version };

@@ -6,6 +6,5 @@ /// <reference types="node" resolution-mode="require"/>

//#region src/cervel/find-entrypoint.d.ts
declare const findEntrypoint: (cwd: string, options?: {
ignoreRegex?: boolean;
}) => Promise<string>;
//#region src/find-entrypoint.d.ts
declare const findEntrypoint: (cwd: string) => Promise<string | undefined>;
declare const findEntrypointOrThrow: (cwd: string) => Promise<string>;
//#endregion

@@ -107,2 +106,2 @@ //#region src/cervel/types.d.ts

//#endregion
export { type CervelBuildOptions, type CervelServeOptions, type PathOptions, build, build$1 as cervelBuild, serve as cervelServe, findEntrypoint, getBuildSummary, introspectApp, nodeFileTrace, prepareCache, srvxOptions, version };
export { type CervelBuildOptions, type CervelServeOptions, type PathOptions, build, build$1 as cervelBuild, serve as cervelServe, findEntrypoint, findEntrypointOrThrow, getBuildSummary, introspectApp, nodeFileTrace, prepareCache, srvxOptions, version };
import { builtinModules, createRequire } from "node:module";
import { delimiter, dirname, extname, join } from "path";
import { FileFsRef, NodejsLambda, Span, debug, defaultCachePathGlob, download, execCommand, getEnvForPackageManager, getNodeBinPaths, getNodeVersion, getPackageJson, getScriptName, glob, isExperimentalBackendsWithoutIntrospectionEnabled, runNpmInstall, runPackageJsonScript, scanParentDirs } from "@vercel/build-utils";
import { FileBlob, FileFsRef, NodejsLambda, Span, debug, defaultCachePathGlob, download, execCommand, getEnvForPackageManager, getNodeBinPaths, getNodeVersion, glob, isBackendFramework, isExperimentalBackendsWithoutIntrospectionEnabled, runNpmInstall, runPackageJsonScript, scanParentDirs } from "@vercel/build-utils";
import { createWriteStream, existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, unlinkSync, writeFileSync } from "node:fs";
import { lstat, readFile, rm } from "node:fs/promises";
import { dirname as dirname$1, extname as extname$1, isAbsolute, join as join$1, relative } from "node:path";
import { createWriteStream, existsSync, mkdtempSync, readFileSync, rmSync, unlinkSync } from "node:fs";
import { spawn } from "node:child_process";
import { tmpdir } from "node:os";
import { z } from "zod";
import { lstat, readFile, rm, writeFile } from "node:fs/promises";
import { build as build$2 } from "rolldown";

@@ -16,6 +13,9 @@ import { exports } from "resolve.exports";

import { createRequire as createRequire$1 } from "module";
import { spawn as spawn$1 } from "child_process";
import { spawn } from "child_process";
import { existsSync as existsSync$1 } from "fs";
import execa from "execa";
import { readFile as readFile$1, writeFile as writeFile$1 } from "fs/promises";
import { readFile as readFile$1, writeFile } from "fs/promises";
import { spawn as spawn$1 } from "node:child_process";
import { tmpdir } from "node:os";
import { z } from "zod";

@@ -69,195 +69,4 @@ //#region src/utils.ts

//#endregion
//#region src/introspection/util.ts
const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";
const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";
//#endregion
//#region src/introspection/index.ts
const require$1 = createRequire(import.meta.url);
const introspectApp = async (args) => {
const { span } = args;
const introspectionSpan = span.child("vc.builder.backends.introspection");
if (isExperimentalBackendsWithoutIntrospectionEnabled()) return defaultResult(args);
const cjsLoaderPath = require$1.resolve("@vercel/backends/introspection/loaders/cjs");
const rolldownEsmLoaderPath = `file://${require$1.resolve("@vercel/backends/introspection/loaders/rolldown-esm")}`;
const handlerPath = join$1(args.dir, args.handler);
const introspectionSchema = z.object({
frameworkSlug: z.string().optional(),
routes: z.array(z.object({
src: z.string(),
dest: z.string(),
methods: z.array(z.string())
})),
additionalFolders: z.array(z.string()).optional().transform((values) => {
return values?.map((val) => {
if (isAbsolute(val)) return relative(args.dir, val);
return val;
});
}),
additionalDeps: z.array(z.string()).optional()
});
let introspectionData;
await new Promise((resolvePromise) => {
try {
debug("Spawning introspection process");
const child = spawn("node", [
"-r",
cjsLoaderPath,
"--import",
rolldownEsmLoaderPath,
handlerPath
], {
stdio: [
"pipe",
"pipe",
"pipe"
],
cwd: args.dir,
env: {
...process.env,
...args.env
}
});
const tempDir = mkdtempSync(join$1(tmpdir(), "introspection-"));
const tempFilePath = join$1(tempDir, "output.txt");
const writeStream = createWriteStream(tempFilePath);
let streamClosed = false;
child.stdout?.pipe(writeStream);
let stderrBuffer = "";
child.stderr?.on("data", (data) => {
stderrBuffer += data.toString();
});
writeStream.on("error", (err) => {
debug(`Write stream error: ${err.message}`);
});
const timeout = setTimeout(() => {
debug("Introspection timeout, killing process with SIGTERM");
child.kill("SIGTERM");
}, 8e3);
const timeout2 = setTimeout(() => {
debug("Introspection timeout, killing process with SIGKILL");
child.kill("SIGKILL");
}, 9e3);
child.on("error", (err) => {
clearTimeout(timeout);
clearTimeout(timeout2);
debug(`Loader error: ${err.message}`);
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
try {
unlinkSync(tempFilePath);
} catch (cleanupErr) {
debug(`Error deleting temp file on error: ${cleanupErr}`);
}
resolvePromise(void 0);
});
else resolvePromise(void 0);
});
child.on("close", () => {
clearTimeout(timeout);
clearTimeout(timeout2);
debug("Introspection process closed");
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
let stdoutBuffer;
try {
stdoutBuffer = readFileSync(tempFilePath, "utf8");
const beginIndex = stdoutBuffer.indexOf(BEGIN_INTROSPECTION_RESULT);
const endIndex = stdoutBuffer.indexOf(END_INTROSPECTION_RESULT);
if (beginIndex !== -1 && endIndex !== -1) {
const introspectionString = stdoutBuffer.substring(beginIndex + BEGIN_INTROSPECTION_RESULT.length, endIndex);
if (introspectionString) {
introspectionData = introspectionSchema.parse(JSON.parse(introspectionString));
debug("Introspection data parsed successfully");
}
} else debug(`Introspection markers not found.\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} catch (error) {
debug(`Error parsing introspection data: ${error}\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} finally {
try {
rmSync(tempDir, {
recursive: true,
force: true
});
} catch (err) {
debug(`Error deleting temp directory: ${err}`);
}
resolvePromise(void 0);
}
});
else resolvePromise(void 0);
});
} catch (error) {
debug("Introspection error", error);
resolvePromise(void 0);
}
});
const framework = getFramework(args);
if (!introspectionData) {
introspectionSpan.setAttributes({
"introspection.success": "false",
"introspection.routes": "0"
});
return defaultResult(args);
}
const routes = [
{ handle: "filesystem" },
...introspectionData.routes,
{
src: "/(.*)",
dest: "/"
}
];
introspectionSpan.setAttributes({
"introspection.success": "true",
"introspection.routes": String(introspectionData.routes.length),
"introspection.framework": introspectionData.frameworkSlug ?? ""
});
return {
routes,
framework,
additionalFolders: introspectionData.additionalFolders ?? [],
additionalDeps: introspectionData.additionalDeps ?? []
};
};
const defaultResult = (args) => {
return {
routes: [{ handle: "filesystem" }, {
src: "/(.*)",
dest: "/"
}],
framework: getFramework(args)
};
};
const getFramework = (args) => {
try {
let version$1;
if (args.framework) {
const frameworkLibPath = require$1.resolve(`${args.framework}`, { paths: [args.dir] });
const findNearestPackageJson = (dir) => {
const packageJsonPath = join$1(dir, "package.json");
if (existsSync(packageJsonPath)) return packageJsonPath;
const parentDir = dirname$1(dir);
if (parentDir === dir) return;
return findNearestPackageJson(parentDir);
};
const nearestPackageJsonPath = findNearestPackageJson(frameworkLibPath);
if (nearestPackageJsonPath) version$1 = require$1(nearestPackageJsonPath).version;
}
return {
slug: args.framework ?? "",
version: version$1 ?? ""
};
} catch (error) {
debug(`Error getting framework for ${args.framework}. Setting framework version to empty string.`, error);
return {
slug: args.framework ?? "",
version: ""
};
}
};
//#endregion
//#region src/cervel/plugin.ts
const CJS_SHIM_PREFIX = "\0cjs-shim:";
const CJS_SHIM_PREFIX$1 = "\0cjs-shim:";
const plugin = (args) => {

@@ -273,3 +82,3 @@ const packageJsonCache = /* @__PURE__ */ new Map();

*/
const getPackageJson$1 = async (pkgPath) => {
const getPackageJson = async (pkgPath) => {
if (packageJsonCache.has(pkgPath)) return packageJsonCache.get(pkgPath);

@@ -296,3 +105,3 @@ try {

if (!pkgJsonPath) return true;
const pkgJson = await getPackageJson$1(pkgJsonPath);
const pkgJson = await getPackageJson(pkgJsonPath);
if (!pkgJson) return true;

@@ -331,3 +140,3 @@ const pkgDir = dirname$1(pkgJsonPath);

async handler(id, importer, rOpts) {
if (id.startsWith(CJS_SHIM_PREFIX)) return {
if (id.startsWith(CJS_SHIM_PREFIX$1)) return {
id,

@@ -342,3 +151,3 @@ external: false

if (resolved?.id && isLocalImport(resolved.id)) tracedPaths.add(resolved.id);
if (importer?.startsWith(CJS_SHIM_PREFIX) && isBareImport(id)) return {
if (importer?.startsWith(CJS_SHIM_PREFIX$1) && isBareImport(id)) return {
id,

@@ -353,3 +162,3 @@ external: true

const importerPkgDir = relative(args.repoRootPath, dirname$1(importerPkgJsonPath));
const shimId$1 = `${CJS_SHIM_PREFIX}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
const shimId$1 = `${CJS_SHIM_PREFIX$1}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
shimMeta.set(shimId$1, {

@@ -364,3 +173,3 @@ pkgDir: importerPkgDir,

}
const shimId = `${CJS_SHIM_PREFIX}${id.replace(/\//g, "_")}`;
const shimId = `${CJS_SHIM_PREFIX$1}${id.replace(/\//g, "_")}`;
shimMeta.set(shimId, {

@@ -390,3 +199,3 @@ pkgDir: "",

load: { async handler(id) {
if (id.startsWith(CJS_SHIM_PREFIX)) {
if (id.startsWith(CJS_SHIM_PREFIX$1)) {
const meta = shimMeta.get(id);

@@ -476,3 +285,3 @@ if (!meta) return { code: `module.exports = require('${id.slice(10)}');` };

`.trim();
const rolldown = async (args) => {
const rolldown$1 = async (args) => {
const entrypointPath = join$1(args.workPath, args.entrypoint);

@@ -612,4 +421,4 @@ const outputDir = join$1(args.workPath, args.out);

//#region src/cervel/typescript.ts
const require_ = createRequire$1(import.meta.url);
const typescript = (args) => {
const require_$1 = createRequire$1(import.meta.url);
const typescript$1 = (args) => {
const { span } = args;

@@ -623,3 +432,3 @@ return span.child("vc.builder.backends.tsCompile").trace(async () => {

].includes(extension)) return;
const tscPath = resolveTscPath(args);
const tscPath = resolveTscPath$1(args);
if (!tscPath) {

@@ -629,6 +438,6 @@ console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Typecheck skipped ${Colors.gray("(TypeScript not found)")}`));

}
return doTypeCheck(args, tscPath);
return doTypeCheck$1(args, tscPath);
});
};
async function doTypeCheck(args, tscPath) {
async function doTypeCheck$1(args, tscPath) {
let stdout = "";

@@ -650,6 +459,6 @@ let stderr = "";

];
const tsconfig = await findNearestTsconfig(args.workPath);
const tsconfig = await findNearestTsconfig$1(args.workPath);
if (tsconfig) tscArgs.push("--project", tsconfig);
else tscArgs.push(args.entrypoint);
const child = spawn$1(process.execPath, tscArgs, {
const child = spawn(process.execPath, tscArgs, {
cwd: args.workPath,

@@ -687,5 +496,5 @@ stdio: [

}
const resolveTscPath = (args) => {
const resolveTscPath$1 = (args) => {
try {
return require_.resolve("typescript/bin/tsc", { paths: [args.workPath] });
return require_$1.resolve("typescript/bin/tsc", { paths: [args.workPath] });
} catch (e) {

@@ -695,11 +504,11 @@ return null;

};
const findNearestTsconfig = async (workPath) => {
const findNearestTsconfig$1 = async (workPath) => {
const tsconfigPath = join(workPath, "tsconfig.json");
if (existsSync$1(tsconfigPath)) return tsconfigPath;
if (workPath === "/") return;
return findNearestTsconfig(join(workPath, ".."));
return findNearestTsconfig$1(join(workPath, ".."));
};
//#endregion
//#region src/cervel/find-entrypoint.ts
//#region src/find-entrypoint.ts
const frameworks = [

@@ -717,3 +526,7 @@ "express",

"server",
"main"
"main",
"src/app",
"src/index",
"src/server",
"src/main"
];

@@ -730,48 +543,36 @@ const entrypointExtensions = [

const createFrameworkRegex = (framework) => new RegExp(`(?:from|require|import)\\s*(?:\\(\\s*)?["']${framework}["']\\s*(?:\\))?`, "g");
const findEntrypoint = async (cwd, options) => {
if (options?.ignoreRegex ?? false) {
for (const entrypoint of entrypoints) if (existsSync(join$1(cwd, entrypoint))) return entrypoint;
for (const entrypoint of entrypoints) if (existsSync(join$1(cwd, "src", entrypoint))) return join$1("src", entrypoint);
throw new Error("No entrypoint file found");
}
const packageJson = await readFile(join$1(cwd, "package.json"), "utf-8");
const packageJsonObject = JSON.parse(packageJson);
const framework = frameworks.find((framework$1) => packageJsonObject.dependencies?.[framework$1]);
if (!framework) {
for (const entrypoint of entrypoints) {
const entrypointPath = join$1(cwd, entrypoint);
try {
await readFile(entrypointPath, "utf-8");
return entrypoint;
} catch (e) {
continue;
}
}
throw new Error("No entrypoint or framework found");
}
const regex = createFrameworkRegex(framework);
for (const entrypoint of entrypoints) {
const findEntrypoint = async (cwd) => {
let framework;
try {
const packageJson = await readFile(join$1(cwd, "package.json"), "utf-8");
const packageJsonObject = JSON.parse(packageJson);
framework = frameworks.find((framework$1) => packageJsonObject.dependencies?.[framework$1]);
} catch (_) {}
if (!framework) for (const entrypoint of entrypoints) {
const entrypointPath = join$1(cwd, entrypoint);
try {
const content = await readFile(entrypointPath, "utf-8");
if (regex.test(content)) return entrypoint;
} catch (e) {
continue;
}
await readFile(entrypointPath, "utf-8");
return entrypoint;
} catch (_) {}
}
const regex = framework ? createFrameworkRegex(framework) : void 0;
for (const entrypoint of entrypoints) {
const entrypointPath = join$1(cwd, "src", entrypoint);
const entrypointPath = join$1(cwd, entrypoint);
try {
const content = await readFile(entrypointPath, "utf-8");
if (regex.test(content)) return join$1("src", entrypoint);
} catch (e) {
continue;
}
if (regex) {
if (regex.test(content)) return entrypoint;
}
} catch (_) {}
}
throw new Error("No entrypoint found");
};
const findEntrypointOrThrow = async (cwd) => {
const entrypoint = await findEntrypoint(cwd);
if (!entrypoint) throw new Error(`No entrypoint found in "${cwd}". Expected one of: ${entrypoints.join(", ")}`);
return entrypoint;
};
//#endregion
//#region src/cervel/index.ts
const require = createRequire(import.meta.url);
const require$2 = createRequire(import.meta.url);
const getBuildSummary = async (outputDir) => {

@@ -782,9 +583,9 @@ const buildSummary = await readFile$1(join$1(outputDir, ".cervel.json"), "utf-8");

const build$1 = async (args) => {
const entrypoint = args.entrypoint || await findEntrypoint(args.workPath);
const entrypoint = args.entrypoint || await findEntrypointOrThrow(args.workPath);
const span = args.span ?? new Span({ name: "cervel-build" });
const [, rolldownResult] = await Promise.all([typescript({
const [, rolldownResult] = await Promise.all([typescript$1({
entrypoint,
workPath: args.workPath,
span
}), rolldown({
}), rolldown$1({
entrypoint,

@@ -796,3 +597,3 @@ workPath: args.workPath,

})]);
await writeFile$1(join$1(args.workPath, args.out, ".cervel.json"), JSON.stringify({ handler: rolldownResult.result.handler }, null, 2));
await writeFile(join$1(args.workPath, args.out, ".cervel.json"), JSON.stringify({ handler: rolldownResult.result.handler }, null, 2));
console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Build complete`));

@@ -802,5 +603,5 @@ return { rolldownResult: rolldownResult.result };

const serve = async (args) => {
const entrypoint = await findEntrypoint(args.workPath);
const srvxBin = join$1(require.resolve("srvx"), "..", "..", "..", "bin", "srvx.mjs");
const tsxBin = require.resolve("tsx");
const entrypoint = await findEntrypointOrThrow(args.workPath);
const srvxBin = join$1(require$2.resolve("srvx"), "..", "..", "..", "bin", "srvx.mjs");
const tsxBin = require$2.resolve("tsx");
const restArgs = Object.entries(args.rest).filter(([, value]) => value !== void 0 && value !== false).map(([key, value]) => typeof value === "boolean" ? `--${key}` : `--${key}=${value}`);

@@ -846,126 +647,781 @@ if (!args.rest.import) restArgs.push("--import", tsxBin);

//#endregion
//#region src/build.ts
const defaultOutputDirectory = join$1(".vercel", "node");
const doBuild = async (args, downloadResult, span) => {
const buildCommandResult = await maybeExecBuildCommand(args, downloadResult);
const outputSetting = args.config.outputDirectory;
const buildCommand = args.config.projectSettings?.buildCommand;
const isCervelCommand = buildCommand?.trim().startsWith("cervel");
if (!outputSetting) {
debug("No output directory configured, using default output directory");
if (isCervelCommand) {
debug("Cervel command ran, using its default output location");
const cervelOutputDir = join$1(args.workPath, "dist");
if (existsSync(join$1(cervelOutputDir, ".cervel.json"))) {
debug("Cervel JSON file found, using its handler");
const { handler: handler$2 } = await getBuildSummary(cervelOutputDir);
return {
dir: cervelOutputDir,
handler: handler$2
};
}
throw new Error(`Build command "${buildCommand}" completed, but no output was found at ${cervelOutputDir}. Make sure your cervel command is configured correctly.`);
//#region src/rolldown/resolve-format.ts
const resolveEntrypointAndFormat = async (args) => {
const extension = extname$1(args.entrypoint);
const extensionMap = {
".ts": {
format: "auto",
extension: "js"
},
".mts": {
format: "esm",
extension: "mjs"
},
".cts": {
format: "cjs",
extension: "cjs"
},
".cjs": {
format: "cjs",
extension: "cjs"
},
".js": {
format: "auto",
extension: "js"
},
".mjs": {
format: "esm",
extension: "mjs"
}
const distDir = join$1(args.workPath, "dist");
if (existsSync(distDir)) {
debug("Dist directory found, checking for .cervel.json");
const cervelJsonPath$1 = join$1(distDir, ".cervel.json");
if (existsSync(cervelJsonPath$1)) {
const { handler: handler$3 } = await getBuildSummary(distDir);
return {
dir: distDir,
handler: handler$3
};
}
let handler$2;
try {
debug("Finding entrypoint in dist directory");
handler$2 = await findEntrypoint(distDir);
} catch (error) {
};
const extensionInfo = extensionMap[extension] || extensionMap[".js"];
let resolvedFormat = extensionInfo.format === "auto" ? void 0 : extensionInfo.format;
const packageJsonPath = join$1(args.workPath, "package.json");
let pkg = {};
if (existsSync(packageJsonPath)) {
const source = await readFile(packageJsonPath, "utf8");
try {
pkg = JSON.parse(source.toString());
} catch (_e) {
pkg = {};
}
if (extensionInfo.format === "auto") if (pkg.type === "module") resolvedFormat = "esm";
else resolvedFormat = "cjs";
}
if (!resolvedFormat) throw new Error(`Unable to resolve format for ${args.entrypoint}`);
return {
format: resolvedFormat,
extension: resolvedFormat === "esm" ? "mjs" : "cjs"
};
};
//#endregion
//#region src/rolldown/nft.ts
const nft = async (args) => {
const nftSpan = args.span.child("vc.builder.backends.nft");
const runNft = async () => {
const nftResult = await nodeFileTrace$1(Array.from(args.localBuildFiles), {
base: args.repoRootPath,
processCwd: args.workPath,
ts: true,
mixedModules: true,
ignore: args.ignoreNodeModules ? (path) => path.includes("node_modules") : void 0,
async readFile(fsPath) {
try {
debug("Finding entrypoint in dist directory with ignoreRegex");
handler$2 = await findEntrypoint(distDir, { ignoreRegex: true });
debug("Found entrypoint in dist directory with ignoreRegex", handler$2);
} catch (error$1) {
debug("Unable to detect entrypoint, building ourselves");
const buildResult$1 = await build$1({
workPath: args.workPath,
repoRootPath: args.repoRootPath,
out: defaultOutputDirectory,
span
});
const { handler: handler$3 } = await getBuildSummary(buildResult$1.rolldownResult.outputDir);
return {
dir: buildResult$1.rolldownResult.outputDir,
handler: handler$3,
files: buildResult$1.rolldownResult.outputFiles
};
let source = await readFile(fsPath);
if (isTypeScriptFile(fsPath)) source = (await transform(fsPath, source.toString())).code;
return source;
} catch (error) {
if (isNativeError(error) && "code" in error && (error.code === "ENOENT" || error.code === "EISDIR")) return null;
throw error;
}
}
await writeFile(cervelJsonPath$1, JSON.stringify({ handler: handler$2 }, null, 2));
const files = await nodeFileTrace({
keepTracedPaths: true,
tracedPaths: [join$1(distDir, handler$2)],
repoRootPath: args.repoRootPath,
workPath: args.workPath,
outDir: distDir,
span
});
for (const file of nftResult.fileList) {
const absolutePath = join$1(args.repoRootPath, file);
const stats = await lstat(absolutePath);
const outputPath = file;
if (args.localBuildFiles.has(join$1(args.repoRootPath, outputPath))) continue;
if (stats.isSymbolicLink() || stats.isFile()) if (args.ignoreNodeModules) {
const content = await readFile(absolutePath, "utf-8");
args.files[outputPath] = new FileBlob({
data: content,
mode: stats.mode
});
} else args.files[outputPath] = new FileFsRef({
fsPath: absolutePath,
mode: stats.mode
});
return {
dir: distDir,
handler: handler$2,
files
};
}
debug("No dist directory found, or unable to detect entrypoint, building ourselves");
const buildResult = await build$1({
workPath: args.workPath,
repoRootPath: args.repoRootPath,
out: defaultOutputDirectory,
span
});
const { handler: handler$1 } = await getBuildSummary(buildResult.rolldownResult.outputDir);
};
await nftSpan.trace(runNft);
};
const isTypeScriptFile = (fsPath) => {
return fsPath.endsWith(".ts") || fsPath.endsWith(".tsx") || fsPath.endsWith(".mts") || fsPath.endsWith(".cts");
};
//#endregion
//#region src/rolldown/index.ts
const PLUGIN_NAME = "vercel:backends";
const CJS_SHIM_PREFIX = "\0cjs-shim:";
const rolldown = async (args) => {
const files = {};
const { format, extension } = await resolveEntrypointAndFormat(args);
const localBuildFiles = /* @__PURE__ */ new Set();
let handler = null;
const packageJsonCache = /* @__PURE__ */ new Map();
const shimMeta = /* @__PURE__ */ new Map();
const framework = {
slug: "",
version: ""
};
const getPackageJson = async (pkgPath) => {
if (packageJsonCache.has(pkgPath)) return packageJsonCache.get(pkgPath);
try {
const contents = await readFile(pkgPath, "utf-8");
const parsed = JSON.parse(contents);
packageJsonCache.set(pkgPath, parsed);
return parsed;
} catch {
packageJsonCache.set(pkgPath, null);
return null;
}
};
const isCommonJS = async (bareImport, resolvedPath, resolvedInfo) => {
const ext = extname$1(resolvedPath);
if (ext === ".cjs") return true;
if (ext === ".mjs") return false;
if (ext === ".js" || ext === ".ts") {
const pkgJsonPath = resolvedInfo.packageJsonPath;
if (!pkgJsonPath) return true;
const pkgJson = await getPackageJson(pkgJsonPath);
if (!pkgJson) return true;
const pkgDir = dirname$1(pkgJsonPath);
const relativePath = resolvedPath.slice(pkgDir.length + 1).replace(/\\/g, "/");
const pkgName = pkgJson.name || "";
const subpath = bareImport.startsWith(pkgName) ? `.${bareImport.slice(pkgName.length)}` || "." : ".";
try {
if (exports(pkgJson, subpath, {
require: false,
conditions: ["node", "import"]
})?.some((p) => p === relativePath || p === `./${relativePath}`)) return false;
if (exports(pkgJson, subpath, {
require: true,
conditions: ["node", "require"]
})?.some((p) => p === relativePath || p === `./${relativePath}`)) return true;
} catch {}
if (pkgJson.module) return false;
return pkgJson.type !== "module";
}
return true;
};
const isBareImport = (id) => {
return !id.startsWith(".") && !id.startsWith("/") && !/^[a-z][a-z0-9+.-]*:/i.test(id);
};
const isNodeModule = (resolved) => {
return resolved?.id?.includes("node_modules") ?? false;
};
const isNodeBuiltin = (id) => {
const normalizedId = id.includes(":") ? id.split(":")[1] : id;
return builtinModules.includes(normalizedId);
};
const isLocalImport = (id) => {
return !id.startsWith("node:") && !id.includes("node_modules");
};
const plugin$1 = () => {
return {
dir: buildResult.rolldownResult.outputDir,
handler: handler$1,
files: buildResult.rolldownResult.outputFiles
name: PLUGIN_NAME,
resolveId: {
order: "pre",
async handler(id, importer, rOpts) {
if (id.startsWith(CJS_SHIM_PREFIX)) return {
id,
external: false
};
const resolved = await this.resolve(id, importer, rOpts);
if (isNodeBuiltin(id)) return {
id: id.startsWith("node:") ? id : `node:${id}`,
external: true
};
if (resolved?.id && isLocalImport(resolved.id)) localBuildFiles.add(resolved.id);
else if (!resolved) localBuildFiles.add(join$1(args.workPath, id));
if (importer?.startsWith(CJS_SHIM_PREFIX) && isBareImport(id)) return {
id,
external: true
};
if (importer && isBareImport(id) && isNodeModule(resolved)) {
if (isBackendFramework(id) && resolved?.packageJsonPath) try {
const pkg = await readFile(resolved.packageJsonPath, "utf8");
const pkgJson = JSON.parse(pkg);
framework.slug = pkgJson.name;
framework.version = pkgJson.version;
} catch {}
if (resolved ? await isCommonJS(id, resolved.id, resolved) : false) {
const importerPkgJsonPath = (await this.resolve(importer))?.packageJsonPath;
if (importerPkgJsonPath) {
const importerPkgDir = relative(args.repoRootPath, dirname$1(importerPkgJsonPath));
const shimId$1 = `${CJS_SHIM_PREFIX}${importerPkgDir.replace(/\//g, "_")}_${id.replace(/\//g, "_")}`;
shimMeta.set(shimId$1, {
pkgDir: importerPkgDir,
pkgName: id
});
return {
id: shimId$1,
external: false
};
}
const shimId = `${CJS_SHIM_PREFIX}${id.replace(/\//g, "_")}`;
shimMeta.set(shimId, {
pkgDir: "",
pkgName: id
});
return {
id: shimId,
external: false
};
}
return {
id,
external: true
};
}
if (importer && isBareImport(id)) return resolved;
if (resolved && !isNodeModule(resolved)) return resolved;
return resolved;
}
},
load: { async handler(id) {
if (id.startsWith(CJS_SHIM_PREFIX)) {
const meta = shimMeta.get(id);
if (!meta) return { code: `module.exports = require('${id.slice(10)}');` };
const { pkgDir, pkgName } = meta;
return { code: `
import { createRequire } from 'node:module';
import { fileURLToPath } from 'node:url';
import { dirname, join } from 'node:path';
const requireFromContext = createRequire(join(dirname(fileURLToPath(import.meta.url)), '${pkgDir ? join$1("..", pkgDir, "package.json") : "../package.json"}'));
module.exports = requireFromContext('${pkgName}');
`.trim() };
}
return null;
} }
};
};
const runRolldown = () => build$2({
input: args.entrypoint,
write: false,
cwd: args.workPath,
platform: "node",
transform: { define: format === "esm" ? {
__dirname: "import.meta.dirname",
__filename: "import.meta.filename"
} : void 0 },
tsconfig: true,
plugins: [plugin$1()],
output: {
cleanDir: true,
format,
entryFileNames: `[name].${extension}`,
preserveModules: true,
preserveModulesRoot: args.repoRootPath,
sourcemap: false
}
});
const rolldownSpan = args.span?.child("vc.builder.backends.rolldown");
const out = await rolldownSpan?.trace(runRolldown) || await runRolldown();
for (const file of out.output) if (file.type === "chunk") {
if (file.isEntry) handler = file.fileName;
files[file.fileName] = new FileBlob({
data: file.code,
mode: 420
});
}
const outputDir = join$1(args.workPath, outputSetting);
const packageJson = await getPackageJson(args.workPath);
const monorepoWithoutBuildScript = args.config.projectSettings?.monorepoManager && !getScriptName(packageJson, ["build"]);
if (!buildCommandResult || monorepoWithoutBuildScript) {
const buildResult = await build$1({
workPath: args.workPath,
repoRootPath: args.repoRootPath,
out: outputDir,
span
await nft({
...args,
localBuildFiles,
files,
span: rolldownSpan ?? new Span({ name: "vc.builder.backends.nft" }),
ignoreNodeModules: true
});
if (!handler) throw new Error(`Unable to resolve build handler for entrypoint: ${args.entrypoint}`);
return {
files,
handler,
framework,
localBuildFiles
};
};
//#endregion
//#region src/rolldown/util.ts
const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";
const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";
//#endregion
//#region src/rolldown/introspection.ts
const require$1 = createRequire(import.meta.url);
const introspectionSchema = z.object({
routes: z.array(z.object({
src: z.string(),
dest: z.string(),
methods: z.array(z.string())
})),
additionalFolders: z.array(z.string()).optional(),
additionalDeps: z.array(z.string()).optional()
});
const introspection = async (args) => {
const defaultResult$1 = {
routes: [],
additionalFolders: [],
additionalDeps: []
};
if (isExperimentalBackendsWithoutIntrospectionEnabled()) return defaultResult$1;
const introspectionSpan = args.span.child("vc.builder.backends.introspection");
const runIntrospection = async () => {
const rolldownEsmLoaderPath = `file://${require$1.resolve("@vercel/backends/rolldown/esm")}`;
const rolldownCjsLoaderPath = require$1.resolve("@vercel/backends/rolldown/cjs-hooks");
const handlerPath = join$1(args.workPath, args.entrypoint);
const files = args.files;
const tmpDir = mkdtempSync(join$1(tmpdir(), "vercel-introspection-"));
for (const [key, value] of Object.entries(files)) {
if (!(value instanceof FileBlob) || typeof value.data !== "string") continue;
const filePath = join$1(tmpDir, key);
mkdirSync(dirname$1(filePath), { recursive: true });
writeFileSync(filePath, value.data);
}
let introspectionData;
await new Promise((resolvePromise) => {
try {
debug("Spawning introspection process");
const outputTempDir = mkdtempSync(join$1(tmpdir(), "introspection-output-"));
const tempFilePath = join$1(outputTempDir, "output.txt");
const writeStream = createWriteStream(tempFilePath);
let streamClosed = false;
const child = spawn$1("node", [
"-r",
rolldownCjsLoaderPath,
"--import",
rolldownEsmLoaderPath,
handlerPath
], {
stdio: [
"pipe",
"pipe",
"pipe"
],
cwd: args.workPath,
env: {
...process.env,
...args.meta?.buildEnv,
...args.meta?.env,
VERCEL_INTROSPECTION_HANDLER: handlerPath,
VERCEL_INTROSPECTION_HANDLER_BUILT: args.handler,
VERCEL_INTROSPECTION_WORK_PATH: args.workPath,
VERCEL_INTROSPECTION_REPO_ROOT_PATH: args.repoRootPath,
VERCEL_INTROSPECTION_TMP_DIR: tmpDir
}
});
child.stdout?.pipe(writeStream);
let stderrBuffer = "";
child.stderr?.on("data", (data) => {
stderrBuffer += data.toString();
});
writeStream.on("error", (err) => {
debug(`Write stream error: ${err.message}`);
});
const timeout = setTimeout(() => {
debug("Introspection timeout, killing process with SIGTERM");
child.kill("SIGTERM");
}, 8e3);
const timeout2 = setTimeout(() => {
debug("Introspection timeout, killing process with SIGKILL");
child.kill("SIGKILL");
}, 9e3);
const cleanup = () => {
clearTimeout(timeout);
clearTimeout(timeout2);
try {
rmSync(tmpDir, {
recursive: true,
force: true
});
} catch (err) {
debug(`Error deleting tmpDir: ${err}`);
}
};
child.on("error", (err) => {
cleanup();
debug(`Loader error: ${err.message}`);
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
try {
rmSync(outputTempDir, {
recursive: true,
force: true
});
} catch (cleanupErr) {
debug(`Error deleting output temp dir: ${cleanupErr}`);
}
resolvePromise();
});
else resolvePromise();
});
child.on("close", () => {
cleanup();
debug("Introspection process closed");
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
let stdoutBuffer;
try {
stdoutBuffer = readFileSync(tempFilePath, "utf8");
const beginIndex = stdoutBuffer.indexOf(BEGIN_INTROSPECTION_RESULT);
const endIndex = stdoutBuffer.indexOf(END_INTROSPECTION_RESULT);
if (beginIndex !== -1 && endIndex !== -1) {
const introspectionString = stdoutBuffer.substring(beginIndex + BEGIN_INTROSPECTION_RESULT.length, endIndex);
if (introspectionString) {
introspectionData = introspectionSchema.parse(JSON.parse(introspectionString));
debug("Introspection data parsed successfully");
}
} else debug(`Introspection markers not found.\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} catch (error) {
debug(`Error parsing introspection data: ${error}\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} finally {
try {
rmSync(outputTempDir, {
recursive: true,
force: true
});
} catch (err) {
debug(`Error deleting output temp directory: ${err}`);
}
resolvePromise();
}
});
else resolvePromise();
});
} catch (error) {
debug("Introspection error", error);
resolvePromise();
}
});
const { handler: handler$1 } = await getBuildSummary(buildResult.rolldownResult.outputDir);
if (!introspectionData) {
introspectionSpan.setAttributes({
"introspection.success": "false",
"introspection.routes": "0"
});
return defaultResult$1;
}
const additionalFolders = (introspectionData.additionalFolders ?? []).map((val) => {
if (isAbsolute(val)) return relative(args.workPath, val);
return val;
});
introspectionSpan.setAttributes({
"introspection.success": "true",
"introspection.routes": String(introspectionData.routes.length)
});
return {
dir: buildResult.rolldownResult.outputDir,
handler: handler$1,
files: buildResult.rolldownResult.outputFiles
routes: introspectionData.routes,
additionalFolders,
additionalDeps: introspectionData.additionalDeps ?? []
};
};
return introspectionSpan.trace(runIntrospection);
};
//#endregion
//#region src/build.ts
const maybeDoBuildCommand = async (args, downloadResult) => {
const buildCommandResult = await maybeExecBuildCommand(args, downloadResult);
const outputSetting = args.config.outputDirectory;
let outputDir;
let entrypoint;
if (buildCommandResult && outputSetting) if (outputSetting) {
const _outputDir = join$1(args.workPath, outputSetting);
const _entrypoint = await findEntrypoint(_outputDir);
if (_entrypoint) {
outputDir = _outputDir;
entrypoint = _entrypoint;
}
} else for (const outputDirectory of [
"dist",
"build",
"output"
]) {
const _outputDir = join$1(args.workPath, outputDirectory);
if (existsSync(_outputDir)) {
const _entrypoint = await findEntrypoint(_outputDir);
if (_entrypoint) {
outputDir = _outputDir;
entrypoint = _entrypoint;
break;
}
}
}
const cervelJsonPath = join$1(outputDir, ".cervel.json");
if (existsSync(cervelJsonPath)) {
const { handler: handler$1 } = await getBuildSummary(outputDir);
return {
dir: outputDir,
handler: handler$1
};
const localBuildFiles = /* @__PURE__ */ new Set();
let files;
if (outputDir && entrypoint) {
files = await glob("**", outputDir);
for (const file of Object.keys(files)) localBuildFiles.add(join$1(outputDir, file));
}
let handler;
return {
localBuildFiles,
files,
handler: entrypoint,
outputDir
};
};
//#endregion
//#region src/typescript.ts
const require_ = createRequire(import.meta.url);
const typescript = (args) => {
const { span } = args;
return span.child("vc.builder.backends.tsCompile").trace(async () => {
const extension = extname$1(args.entrypoint);
if (![
".ts",
".mts",
".cts"
].includes(extension)) return;
const tscPath = resolveTscPath(args);
if (!tscPath) {
console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Typecheck skipped ${Colors.gray("(TypeScript not found)")}`));
return null;
}
return doTypeCheck(args, tscPath);
});
};
async function doTypeCheck(args, tscPath) {
let stdout = "";
let stderr = "";
/**
* This might be subject to change.
* - if no tscPath, skip typecheck
* - if tsconfig, provide the tsconfig path
* - else provide the entrypoint path
*/
const tscArgs = [
tscPath,
"--noEmit",
"--pretty",
"--allowJs",
"--esModuleInterop",
"--skipLibCheck"
];
const tsconfig = await findNearestTsconfig(args.workPath);
if (tsconfig) tscArgs.push("--project", tsconfig);
else tscArgs.push(args.entrypoint);
const child = spawn$1(process.execPath, tscArgs, {
cwd: args.workPath,
stdio: [
"ignore",
"pipe",
"pipe"
]
});
child.stdout?.on("data", (data) => {
stdout += data.toString();
});
child.stderr?.on("data", (data) => {
stderr += data.toString();
});
await new Promise((resolve$1, reject) => {
child.on("close", (code) => {
if (code === 0) {
console.log(Colors.gray(`${Colors.bold(Colors.cyan("✓"))} Typecheck complete`));
resolve$1();
} else {
const output = stdout || stderr;
if (output) {
console.error("\nTypeScript type check failed:\n");
console.error(output);
}
reject(/* @__PURE__ */ new Error("TypeScript type check failed"));
}
});
child.on("error", (err) => {
reject(err);
});
});
}
const resolveTscPath = (args) => {
try {
handler = await findEntrypoint(outputDir);
} catch (error) {
handler = await findEntrypoint(outputDir, { ignoreRegex: true });
return require_.resolve("typescript/bin/tsc", { paths: [args.workPath] });
} catch (e) {
return null;
}
await writeFile(cervelJsonPath, JSON.stringify({ handler }, null, 2));
};
const findNearestTsconfig = async (workPath) => {
const tsconfigPath = join$1(workPath, "tsconfig.json");
if (existsSync(tsconfigPath)) return tsconfigPath;
if (workPath === "/") return;
return findNearestTsconfig(join$1(workPath, ".."));
};
//#endregion
//#region src/introspection/index.ts
const require = createRequire(import.meta.url);
const introspectApp = async (args) => {
const { span } = args;
const introspectionSpan = span.child("vc.builder.backends.introspection");
if (isExperimentalBackendsWithoutIntrospectionEnabled()) return defaultResult(args);
const cjsLoaderPath = require.resolve("@vercel/backends/introspection/loaders/cjs");
const rolldownEsmLoaderPath = `file://${require.resolve("@vercel/backends/introspection/loaders/rolldown-esm")}`;
const handlerPath = join$1(args.dir, args.handler);
const introspectionSchema$1 = z.object({
frameworkSlug: z.string().optional(),
routes: z.array(z.object({
src: z.string(),
dest: z.string(),
methods: z.array(z.string())
})),
additionalFolders: z.array(z.string()).optional().transform((values) => {
return values?.map((val) => {
if (isAbsolute(val)) return relative(args.dir, val);
return val;
});
}),
additionalDeps: z.array(z.string()).optional()
});
let introspectionData;
await new Promise((resolvePromise) => {
try {
debug("Spawning introspection process");
const child = spawn$1("node", [
"-r",
cjsLoaderPath,
"--import",
rolldownEsmLoaderPath,
handlerPath
], {
stdio: [
"pipe",
"pipe",
"pipe"
],
cwd: args.dir,
env: {
...process.env,
...args.env
}
});
const tempDir = mkdtempSync(join$1(tmpdir(), "introspection-"));
const tempFilePath = join$1(tempDir, "output.txt");
const writeStream = createWriteStream(tempFilePath);
let streamClosed = false;
child.stdout?.pipe(writeStream);
let stderrBuffer = "";
child.stderr?.on("data", (data) => {
stderrBuffer += data.toString();
});
writeStream.on("error", (err) => {
debug(`Write stream error: ${err.message}`);
});
const timeout = setTimeout(() => {
debug("Introspection timeout, killing process with SIGTERM");
child.kill("SIGTERM");
}, 8e3);
const timeout2 = setTimeout(() => {
debug("Introspection timeout, killing process with SIGKILL");
child.kill("SIGKILL");
}, 9e3);
child.on("error", (err) => {
clearTimeout(timeout);
clearTimeout(timeout2);
debug(`Loader error: ${err.message}`);
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
try {
unlinkSync(tempFilePath);
} catch (cleanupErr) {
debug(`Error deleting temp file on error: ${cleanupErr}`);
}
resolvePromise(void 0);
});
else resolvePromise(void 0);
});
child.on("close", () => {
clearTimeout(timeout);
clearTimeout(timeout2);
debug("Introspection process closed");
if (!streamClosed) writeStream.end(() => {
streamClosed = true;
let stdoutBuffer;
try {
stdoutBuffer = readFileSync(tempFilePath, "utf8");
const beginIndex = stdoutBuffer.indexOf(BEGIN_INTROSPECTION_RESULT);
const endIndex = stdoutBuffer.indexOf(END_INTROSPECTION_RESULT);
if (beginIndex !== -1 && endIndex !== -1) {
const introspectionString = stdoutBuffer.substring(beginIndex + BEGIN_INTROSPECTION_RESULT.length, endIndex);
if (introspectionString) {
introspectionData = introspectionSchema$1.parse(JSON.parse(introspectionString));
debug("Introspection data parsed successfully");
}
} else debug(`Introspection markers not found.\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} catch (error) {
debug(`Error parsing introspection data: ${error}\nstdout:\n${stdoutBuffer}\nstderr:\n${stderrBuffer}`);
} finally {
try {
rmSync(tempDir, {
recursive: true,
force: true
});
} catch (err) {
debug(`Error deleting temp directory: ${err}`);
}
resolvePromise(void 0);
}
});
else resolvePromise(void 0);
});
} catch (error) {
debug("Introspection error", error);
resolvePromise(void 0);
}
});
const framework = getFramework(args);
if (!introspectionData) {
introspectionSpan.setAttributes({
"introspection.success": "false",
"introspection.routes": "0"
});
return defaultResult(args);
}
const routes = [
{ handle: "filesystem" },
...introspectionData.routes,
{
src: "/(.*)",
dest: "/"
}
];
introspectionSpan.setAttributes({
"introspection.success": "true",
"introspection.routes": String(introspectionData.routes.length),
"introspection.framework": introspectionData.frameworkSlug ?? ""
});
return {
dir: outputDir,
handler
routes,
framework,
additionalFolders: introspectionData.additionalFolders ?? [],
additionalDeps: introspectionData.additionalDeps ?? []
};
};
const defaultResult = (args) => {
return {
routes: [{ handle: "filesystem" }, {
src: "/(.*)",
dest: "/"
}],
framework: getFramework(args)
};
};
const getFramework = (args) => {
try {
let version$1;
if (args.framework) {
const frameworkLibPath = require.resolve(`${args.framework}`, { paths: [args.dir] });
const findNearestPackageJson = (dir) => {
const packageJsonPath = join$1(dir, "package.json");
if (existsSync(packageJsonPath)) return packageJsonPath;
const parentDir = dirname$1(dir);
if (parentDir === dir) return;
return findNearestPackageJson(parentDir);
};
const nearestPackageJsonPath = findNearestPackageJson(frameworkLibPath);
if (nearestPackageJsonPath) version$1 = require(nearestPackageJsonPath).version;
}
return {
slug: args.framework ?? "",
version: version$1 ?? ""
};
} catch (error) {
debug(`Error getting framework for ${args.framework}. Setting framework version to empty string.`, error);
return {
slug: args.framework ?? "",
version: ""
};
}
};

@@ -981,44 +1437,65 @@ //#endregion

span.setAttributes({ "builder.name": builderName });
const entrypoint = await findEntrypoint(args.workPath);
debug("Entrypoint", entrypoint);
const buildSpan = span.child("vc.builder.backends.build");
const introspectionSpan = span.child("vc.builder.backends.introspectApp");
const [buildResult, introspectionResult] = await Promise.all([buildSpan.trace(() => doBuild(args, downloadResult, buildSpan)), introspectionSpan.trace(() => introspectApp({
handler: entrypoint,
dir: args.workPath,
framework: args.config.framework,
env: {
...args.meta?.env ?? {},
...args.meta?.buildEnv ?? {}
},
span: introspectionSpan
}))]);
const files = buildResult.files;
const { routes, framework } = introspectionResult;
if (routes.length > 2) debug(`Introspection completed successfully with ${routes.length} routes`);
else debug(`Introspection failed to detect routes`);
const handler = buildResult.handler;
if (!files) throw new Error("Unable to trace files for build");
const lambda = new NodejsLambda({
runtime: nodeVersion.runtime,
handler,
files,
shouldAddHelpers: false,
shouldAddSourcemapSupport: true,
framework: {
slug: framework?.slug ?? "",
version: framework?.version ?? ""
},
awsLambdaHandler: "",
shouldDisableAutomaticFetchInstrumentation: process.env.VERCEL_TRACING_DISABLE_AUTOMATIC_FETCH_INSTRUMENTATION === "1"
return buildSpan.trace(async () => {
const entrypoint = await findEntrypointOrThrow(args.workPath);
debug("Entrypoint", entrypoint);
args.entrypoint = entrypoint;
const userBuildResult = await maybeDoBuildCommand(args, downloadResult);
const rolldownResult = await rolldown({
...args,
span: buildSpan
});
const introspectionPromise = introspection({
...args,
span: buildSpan,
files: rolldownResult.files,
handler: rolldownResult.handler
});
const typescriptPromise = typescript({
entrypoint,
workPath: args.workPath,
span: buildSpan
});
const localBuildFiles = userBuildResult?.localBuildFiles.size > 0 ? userBuildResult?.localBuildFiles : rolldownResult.localBuildFiles;
const files = userBuildResult?.files || rolldownResult.files;
const handler = userBuildResult?.handler || rolldownResult.handler;
const nftWorkPath = userBuildResult?.outputDir || args.workPath;
await nft({
...args,
workPath: nftWorkPath,
localBuildFiles,
files,
ignoreNodeModules: false,
span: buildSpan
});
const introspectionResult = await introspectionPromise;
await typescriptPromise;
const lambda = new NodejsLambda({
runtime: nodeVersion.runtime,
handler,
files,
framework: rolldownResult.framework,
shouldAddHelpers: false,
shouldAddSourcemapSupport: true,
awsLambdaHandler: "",
shouldDisableAutomaticFetchInstrumentation: process.env.VERCEL_TRACING_DISABLE_AUTOMATIC_FETCH_INSTRUMENTATION === "1"
});
const routes = [
{ handle: "filesystem" },
...introspectionResult.routes,
{
src: "/(.*)",
dest: "/"
}
];
const output = { index: lambda };
for (const route of routes) if (route.dest) {
if (route.dest === "/") continue;
output[route.dest] = lambda;
}
return {
routes,
output
};
});
const output = { index: lambda };
for (const route of routes) if (route.dest) {
if (route.dest === "/") continue;
output[route.dest] = lambda;
}
return {
routes,
output
};
};

@@ -1030,2 +1507,2 @@ const prepareCache = ({ repoRootPath, workPath }) => {

//#endregion
export { build, build$1 as cervelBuild, serve as cervelServe, findEntrypoint, getBuildSummary, introspectApp, nodeFileTrace, prepareCache, srvxOptions, version };
export { build, build$1 as cervelBuild, serve as cervelServe, findEntrypoint, findEntrypointOrThrow, getBuildSummary, introspectApp, nodeFileTrace, prepareCache, srvxOptions, version };
import { pathToRegexp } from "path-to-regexp";
import { debug } from "@vercel/build-utils";
//#region src/introspection/util.ts
//#region src/rolldown/util.ts
const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";

@@ -36,3 +36,2 @@ const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";

if (routes.length > 0) return {
frameworkSlug: "express",
routes,

@@ -39,0 +38,0 @@ additionalFolders,

import { pathToRegexp } from "path-to-regexp";
import { debug } from "@vercel/build-utils";
//#region src/introspection/util.ts
//#region src/rolldown/util.ts
const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";

@@ -31,6 +31,3 @@ const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";

const routes = extractRoutes();
if (routes.length > 0) return {
frameworkSlug: "hono",
routes
};
if (routes.length > 0) return { routes };
});

@@ -37,0 +34,0 @@ function extractRoutes() {

@@ -29,3 +29,3 @@ //#region rolldown:runtime

//#region src/introspection/util.ts
//#region src/rolldown/util.ts
const BEGIN_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_BEGIN__\n";

@@ -57,6 +57,3 @@ const END_INTROSPECTION_RESULT = "\n__VERCEL_INTROSPECTION_END__\n";

const routes = extractRoutes$1();
if (routes.length > 0) return {
frameworkSlug: "hono",
routes
};
if (routes.length > 0) return { routes };
});

@@ -104,3 +101,2 @@ function extractRoutes$1() {

if (routes.length > 0) return {
frameworkSlug: "express",
routes,

@@ -107,0 +103,0 @@ additionalFolders,

{
"name": "@vercel/backends",
"version": "0.0.27",
"version": "0.0.28",
"license": "Apache-2.0",

@@ -14,7 +14,5 @@ "main": "./dist/index.mjs",

"./package.json": "./package.json",
"./introspection/loaders/cjs": "./dist/introspection/loaders/cjs.cjs",
"./introspection/loaders/esm": "./dist/introspection/loaders/esm.mjs",
"./introspection/loaders/hooks": "./dist/introspection/loaders/hooks.mjs",
"./introspection/loaders/rolldown-esm": "./dist/introspection/loaders/rolldown-esm.mjs",
"./introspection/loaders/rolldown-hooks": "./dist/introspection/loaders/rolldown-hooks.mjs"
"./rolldown/esm": "./dist/rolldown/esm.mjs",
"./rolldown/hooks": "./dist/rolldown/hooks.mjs",
"./rolldown/cjs-hooks": "./dist/rolldown/cjs-hooks.cjs"
},

@@ -40,3 +38,3 @@ "repository": {

"zod": "3.22.4",
"@vercel/build-utils": "13.3.0"
"@vercel/build-utils": "13.3.1"
},

@@ -43,0 +41,0 @@ "peerDependencies": {