@vercel/remix-builder
Advanced tools
Comparing version 2.0.2 to 2.0.3
"use strict"; | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
var desc = Object.getOwnPropertyDescriptor(m, k); | ||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||
desc = { enumerable: true, get: function() { return m[k]; } }; | ||
} | ||
Object.defineProperty(o, k2, desc); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __exportStar = (this && this.__exportStar) || function(m, exports) { | ||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.version = void 0; | ||
exports.version = 2; | ||
__exportStar(require("./build"), exports); | ||
__exportStar(require("./prepare-cache"), exports); | ||
//# sourceMappingURL=index.js.map | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var src_exports = {}; | ||
__export(src_exports, { | ||
version: () => version | ||
}); | ||
module.exports = __toCommonJS(src_exports); | ||
__reExport(src_exports, require("./build"), module.exports); | ||
__reExport(src_exports, require("./prepare-cache"), module.exports); | ||
const version = 2; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
version, | ||
...require("./build"), | ||
...require("./prepare-cache") | ||
}); | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.build = void 0; | ||
const ts_morph_1 = require("ts-morph"); | ||
const fs_1 = require("fs"); | ||
const path_1 = require("path"); | ||
const build_utils_1 = require("@vercel/build-utils"); | ||
const static_config_1 = require("@vercel/static-config"); | ||
const nft_1 = require("@vercel/nft"); | ||
const utils_1 = require("./utils"); | ||
const hydrogen_1 = require("./hydrogen"); | ||
const remixBuilderPkg = JSON.parse((0, fs_1.readFileSync)((0, path_1.join)(__dirname, '../package.json'), 'utf8')); | ||
const remixRunDevForkVersion = remixBuilderPkg.devDependencies['@remix-run/dev']; | ||
const DEFAULTS_PATH = (0, path_1.join)(__dirname, '../defaults'); | ||
const edgeServerSrcPromise = fs_1.promises.readFile((0, path_1.join)(DEFAULTS_PATH, 'server-edge.mjs'), 'utf-8'); | ||
const nodeServerSrcPromise = fs_1.promises.readFile((0, path_1.join)(DEFAULTS_PATH, 'server-node.mjs'), 'utf-8'); | ||
// Minimum supported version of the `@vercel/remix` package | ||
const VERCEL_REMIX_MIN_VERSION = '1.10.0'; | ||
// Minimum supported version of the `@vercel/remix-run-dev` forked compiler | ||
const REMIX_RUN_DEV_MIN_VERSION = '1.15.0'; | ||
// Maximum version of `@vercel/remix-run-dev` fork | ||
// (and also `@vercel/remix` since they get published at the same time) | ||
const REMIX_RUN_DEV_MAX_VERSION = remixRunDevForkVersion.slice(remixRunDevForkVersion.lastIndexOf('@') + 1); | ||
const build = async ({ entrypoint, files, workPath, repoRootPath, config, meta = {}, }) => { | ||
const { installCommand, buildCommand } = config; | ||
await (0, build_utils_1.download)(files, workPath, meta); | ||
const mountpoint = (0, path_1.dirname)(entrypoint); | ||
const entrypointFsDirname = (0, path_1.join)(workPath, mountpoint); | ||
// Run "Install Command" | ||
const nodeVersion = await (0, build_utils_1.getNodeVersion)(entrypointFsDirname, undefined, config, meta); | ||
const { cliType, packageJsonPath, lockfileVersion } = await (0, build_utils_1.scanParentDirs)(entrypointFsDirname); | ||
if (!packageJsonPath) { | ||
throw new Error('Failed to locate `package.json` file in your project'); | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var build_exports = {}; | ||
__export(build_exports, { | ||
build: () => build | ||
}); | ||
module.exports = __toCommonJS(build_exports); | ||
var import_ts_morph = require("ts-morph"); | ||
var import_fs = require("fs"); | ||
var import_path = require("path"); | ||
var import_build_utils = require("@vercel/build-utils"); | ||
var import_static_config = require("@vercel/static-config"); | ||
var import_nft = require("@vercel/nft"); | ||
var import_utils = require("./utils"); | ||
var import_hydrogen = require("./hydrogen"); | ||
const remixBuilderPkg = JSON.parse( | ||
(0, import_fs.readFileSync)((0, import_path.join)(__dirname, "../package.json"), "utf8") | ||
); | ||
const remixRunDevForkVersion = remixBuilderPkg.devDependencies["@remix-run/dev"]; | ||
const DEFAULTS_PATH = (0, import_path.join)(__dirname, "../defaults"); | ||
const edgeServerSrcPromise = import_fs.promises.readFile( | ||
(0, import_path.join)(DEFAULTS_PATH, "server-edge.mjs"), | ||
"utf-8" | ||
); | ||
const nodeServerSrcPromise = import_fs.promises.readFile( | ||
(0, import_path.join)(DEFAULTS_PATH, "server-node.mjs"), | ||
"utf-8" | ||
); | ||
const VERCEL_REMIX_MIN_VERSION = "1.10.0"; | ||
const REMIX_RUN_DEV_MIN_VERSION = "1.15.0"; | ||
const REMIX_RUN_DEV_MAX_VERSION = remixRunDevForkVersion.slice( | ||
remixRunDevForkVersion.lastIndexOf("@") + 1 | ||
); | ||
const build = async ({ | ||
entrypoint, | ||
files, | ||
workPath, | ||
repoRootPath, | ||
config, | ||
meta = {} | ||
}) => { | ||
const { installCommand, buildCommand } = config; | ||
await (0, import_build_utils.download)(files, workPath, meta); | ||
const mountpoint = (0, import_path.dirname)(entrypoint); | ||
const entrypointFsDirname = (0, import_path.join)(workPath, mountpoint); | ||
const nodeVersion = await (0, import_build_utils.getNodeVersion)( | ||
entrypointFsDirname, | ||
void 0, | ||
config, | ||
meta | ||
); | ||
const { cliType, packageJsonPath, lockfileVersion } = await (0, import_build_utils.scanParentDirs)( | ||
entrypointFsDirname | ||
); | ||
if (!packageJsonPath) { | ||
throw new Error("Failed to locate `package.json` file in your project"); | ||
} | ||
const pkgRaw = await import_fs.promises.readFile(packageJsonPath, "utf8"); | ||
const pkg = JSON.parse(pkgRaw); | ||
const spawnOpts = (0, import_build_utils.getSpawnOptions)(meta, nodeVersion); | ||
if (!spawnOpts.env) { | ||
spawnOpts.env = {}; | ||
} | ||
spawnOpts.env = (0, import_build_utils.getEnvForPackageManager)({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env: spawnOpts.env | ||
}); | ||
if (typeof installCommand === "string") { | ||
if (installCommand.trim()) { | ||
console.log(`Running "install" command: \`${installCommand}\`...`); | ||
await (0, import_build_utils.execCommand)(installCommand, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname | ||
}); | ||
} else { | ||
console.log(`Skipping "install" command...`); | ||
} | ||
const pkgRaw = await fs_1.promises.readFile(packageJsonPath, 'utf8'); | ||
const pkg = JSON.parse(pkgRaw); | ||
const spawnOpts = (0, build_utils_1.getSpawnOptions)(meta, nodeVersion); | ||
if (!spawnOpts.env) { | ||
spawnOpts.env = {}; | ||
} else { | ||
await (0, import_build_utils.runNpmInstall)(entrypointFsDirname, [], spawnOpts, meta, nodeVersion); | ||
} | ||
const isHydrogen2 = pkg.dependencies?.["@shopify/remix-oxygen"] || pkg.devDependencies?.["@shopify/remix-oxygen"]; | ||
const remixRunDevPath = await (0, import_utils.ensureResolvable)( | ||
entrypointFsDirname, | ||
repoRootPath, | ||
"@remix-run/dev" | ||
); | ||
const remixRunDevPkg = JSON.parse( | ||
(0, import_fs.readFileSync)((0, import_path.join)(remixRunDevPath, "package.json"), "utf8") | ||
); | ||
const remixVersion = remixRunDevPkg.version; | ||
const remixConfig = await (0, import_utils.chdirAndReadConfig)( | ||
remixRunDevPath, | ||
entrypointFsDirname, | ||
packageJsonPath | ||
); | ||
const { serverEntryPoint, appDirectory } = remixConfig; | ||
const remixRoutes = Object.values(remixConfig.routes); | ||
const depsToAdd = []; | ||
if (!isHydrogen2 && remixRunDevPkg.name !== "@vercel/remix-run-dev") { | ||
const remixDevForkVersion = (0, import_utils.resolveSemverMinMax)( | ||
REMIX_RUN_DEV_MIN_VERSION, | ||
REMIX_RUN_DEV_MAX_VERSION, | ||
remixVersion | ||
); | ||
depsToAdd.push( | ||
`@remix-run/dev@npm:@vercel/remix-run-dev@${remixDevForkVersion}` | ||
); | ||
} | ||
const userEntryServerFile = (0, import_utils.findEntry)(appDirectory, "entry.server"); | ||
if (!userEntryServerFile) { | ||
await import_fs.promises.copyFile( | ||
(0, import_path.join)(DEFAULTS_PATH, "entry.server.jsx"), | ||
(0, import_path.join)(appDirectory, "entry.server.jsx") | ||
); | ||
if (!pkg.dependencies["@vercel/remix"]) { | ||
const vercelRemixVersion = (0, import_utils.resolveSemverMinMax)( | ||
VERCEL_REMIX_MIN_VERSION, | ||
REMIX_RUN_DEV_MAX_VERSION, | ||
remixVersion | ||
); | ||
depsToAdd.push(`@vercel/remix@${vercelRemixVersion}`); | ||
} | ||
spawnOpts.env = (0, build_utils_1.getEnvForPackageManager)({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env: spawnOpts.env, | ||
} | ||
if (depsToAdd.length) { | ||
await (0, import_utils.addDependencies)(cliType, depsToAdd, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname | ||
}); | ||
if (typeof installCommand === 'string') { | ||
if (installCommand.trim()) { | ||
console.log(`Running "install" command: \`${installCommand}\`...`); | ||
await (0, build_utils_1.execCommand)(installCommand, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname, | ||
}); | ||
} | ||
else { | ||
console.log(`Skipping "install" command...`); | ||
} | ||
} | ||
const userEntryClientFile = (0, import_utils.findEntry)( | ||
remixConfig.appDirectory, | ||
"entry.client" | ||
); | ||
if (!userEntryClientFile) { | ||
await import_fs.promises.copyFile( | ||
(0, import_path.join)(DEFAULTS_PATH, "entry.client.react.jsx"), | ||
(0, import_path.join)(appDirectory, "entry.client.jsx") | ||
); | ||
} | ||
let remixConfigWrapped = false; | ||
let serverEntryPointAbs; | ||
let originalServerEntryPoint; | ||
const remixConfigPath = (0, import_utils.findConfig)(entrypointFsDirname, "remix.config"); | ||
const renamedRemixConfigPath = remixConfigPath ? `${remixConfigPath}.original${(0, import_path.extname)(remixConfigPath)}` : void 0; | ||
let serverBundles; | ||
const serverBundlesMap = /* @__PURE__ */ new Map(); | ||
const resolvedConfigsMap = /* @__PURE__ */ new Map(); | ||
try { | ||
const project = new import_ts_morph.Project(); | ||
const staticConfigsMap = /* @__PURE__ */ new Map(); | ||
for (const route of remixRoutes) { | ||
const routePath = (0, import_path.join)(remixConfig.appDirectory, route.file); | ||
let staticConfig = (0, import_static_config.getConfig)(project, routePath); | ||
if (staticConfig && isHydrogen2) { | ||
console.log( | ||
"WARN: `export const config` is currently not supported for Hydrogen v2 apps" | ||
); | ||
staticConfig = null; | ||
} | ||
staticConfigsMap.set(route, staticConfig); | ||
} | ||
else { | ||
await (0, build_utils_1.runNpmInstall)(entrypointFsDirname, [], spawnOpts, meta, nodeVersion); | ||
for (const route of remixRoutes) { | ||
const config2 = (0, import_utils.getResolvedRouteConfig)( | ||
route, | ||
remixConfig.routes, | ||
staticConfigsMap, | ||
isHydrogen2 | ||
); | ||
resolvedConfigsMap.set(route, config2); | ||
} | ||
const isHydrogen2 = pkg.dependencies?.['@shopify/remix-oxygen'] || | ||
pkg.devDependencies?.['@shopify/remix-oxygen']; | ||
// Determine the version of Remix based on the `@remix-run/dev` | ||
// package version. | ||
const remixRunDevPath = await (0, utils_1.ensureResolvable)(entrypointFsDirname, repoRootPath, '@remix-run/dev'); | ||
const remixRunDevPkg = JSON.parse((0, fs_1.readFileSync)((0, path_1.join)(remixRunDevPath, 'package.json'), 'utf8')); | ||
const remixVersion = remixRunDevPkg.version; | ||
const remixConfig = await (0, utils_1.chdirAndReadConfig)(remixRunDevPath, entrypointFsDirname, packageJsonPath); | ||
const { serverEntryPoint, appDirectory } = remixConfig; | ||
const remixRoutes = Object.values(remixConfig.routes); | ||
const depsToAdd = []; | ||
// Override the official `@remix-run/dev` package with the | ||
// Vercel fork, which supports the `serverBundles` config | ||
if (!isHydrogen2 && remixRunDevPkg.name !== '@vercel/remix-run-dev') { | ||
const remixDevForkVersion = (0, utils_1.resolveSemverMinMax)(REMIX_RUN_DEV_MIN_VERSION, REMIX_RUN_DEV_MAX_VERSION, remixVersion); | ||
depsToAdd.push(`@remix-run/dev@npm:@vercel/remix-run-dev@${remixDevForkVersion}`); | ||
for (const route of remixRoutes) { | ||
if ((0, import_utils.isLayoutRoute)(route.id, remixRoutes)) | ||
continue; | ||
const config2 = resolvedConfigsMap.get(route); | ||
if (!config2) { | ||
throw new Error(`Expected resolved config for "${route.id}"`); | ||
} | ||
const hash = (0, import_utils.calculateRouteConfigHash)(config2); | ||
let routesForHash = serverBundlesMap.get(hash); | ||
if (!Array.isArray(routesForHash)) { | ||
routesForHash = []; | ||
serverBundlesMap.set(hash, routesForHash); | ||
} | ||
routesForHash.push(route); | ||
} | ||
// `app/entry.server.tsx` and `app/entry.client.tsx` are optional in Remix, | ||
// so if either of those files are missing then add our own versions. | ||
const userEntryServerFile = (0, utils_1.findEntry)(appDirectory, 'entry.server'); | ||
if (!userEntryServerFile) { | ||
await fs_1.promises.copyFile((0, path_1.join)(DEFAULTS_PATH, 'entry.server.jsx'), (0, path_1.join)(appDirectory, 'entry.server.jsx')); | ||
if (!pkg.dependencies['@vercel/remix']) { | ||
// Dependency version resolution logic | ||
// 1. Users app is on 1.9.0 -> we install the 1.10.0 (minimum) version of `@vercel/remix`. | ||
// 2. Users app is on 1.11.0 (a version greater than 1.10.0 and less than the known max | ||
// published version) -> we install the (matching) 1.11.0 version of `@vercel/remix`. | ||
// 3. Users app is on something greater than our latest version of the fork -> we install | ||
// the latest known published version of `@vercel/remix`. | ||
const vercelRemixVersion = (0, utils_1.resolveSemverMinMax)(VERCEL_REMIX_MIN_VERSION, REMIX_RUN_DEV_MAX_VERSION, remixVersion); | ||
depsToAdd.push(`@vercel/remix@${vercelRemixVersion}`); | ||
} | ||
} | ||
if (depsToAdd.length) { | ||
await (0, utils_1.addDependencies)(cliType, depsToAdd, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname, | ||
}); | ||
} | ||
const userEntryClientFile = (0, utils_1.findEntry)(remixConfig.appDirectory, 'entry.client'); | ||
if (!userEntryClientFile) { | ||
await fs_1.promises.copyFile((0, path_1.join)(DEFAULTS_PATH, 'entry.client.react.jsx'), (0, path_1.join)(appDirectory, 'entry.client.jsx')); | ||
} | ||
let remixConfigWrapped = false; | ||
let serverEntryPointAbs; | ||
let originalServerEntryPoint; | ||
const remixConfigPath = (0, utils_1.findConfig)(entrypointFsDirname, 'remix.config'); | ||
const renamedRemixConfigPath = remixConfigPath | ||
? `${remixConfigPath}.original${(0, path_1.extname)(remixConfigPath)}` | ||
: undefined; | ||
// These get populated inside the try/catch below | ||
let serverBundles; | ||
const serverBundlesMap = new Map(); | ||
const resolvedConfigsMap = new Map(); | ||
try { | ||
// Read the `export const config` (if any) for each route | ||
const project = new ts_morph_1.Project(); | ||
const staticConfigsMap = new Map(); | ||
for (const route of remixRoutes) { | ||
const routePath = (0, path_1.join)(remixConfig.appDirectory, route.file); | ||
let staticConfig = (0, static_config_1.getConfig)(project, routePath); | ||
if (staticConfig && isHydrogen2) { | ||
console.log('WARN: `export const config` is currently not supported for Hydrogen v2 apps'); | ||
staticConfig = null; | ||
} | ||
staticConfigsMap.set(route, staticConfig); | ||
} | ||
for (const route of remixRoutes) { | ||
const config = (0, utils_1.getResolvedRouteConfig)(route, remixConfig.routes, staticConfigsMap, isHydrogen2); | ||
resolvedConfigsMap.set(route, config); | ||
} | ||
// Figure out which routes belong to which server bundles | ||
// based on having common static config properties | ||
for (const route of remixRoutes) { | ||
if ((0, utils_1.isLayoutRoute)(route.id, remixRoutes)) | ||
continue; | ||
const config = resolvedConfigsMap.get(route); | ||
if (!config) { | ||
throw new Error(`Expected resolved config for "${route.id}"`); | ||
} | ||
const hash = (0, utils_1.calculateRouteConfigHash)(config); | ||
let routesForHash = serverBundlesMap.get(hash); | ||
if (!Array.isArray(routesForHash)) { | ||
routesForHash = []; | ||
serverBundlesMap.set(hash, routesForHash); | ||
} | ||
routesForHash.push(route); | ||
} | ||
serverBundles = Array.from(serverBundlesMap.entries()).map(([hash, routes]) => { | ||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime ?? 'nodejs'; | ||
return { | ||
serverBuildPath: isHydrogen2 | ||
? (0, path_1.relative)(entrypointFsDirname, remixConfig.serverBuildPath) | ||
: `${(0, path_1.relative)(entrypointFsDirname, (0, path_1.dirname)(remixConfig.serverBuildPath))}/build-${runtime}-${hash}.js`, | ||
routes: routes.map(r => r.id), | ||
}; | ||
}); | ||
// We need to patch the `remix.config.js` file to force some values necessary | ||
// for a build that works on either Node.js or the Edge runtime | ||
if (!isHydrogen2 && remixConfigPath && renamedRemixConfigPath) { | ||
await fs_1.promises.rename(remixConfigPath, renamedRemixConfigPath); | ||
let patchedConfig; | ||
// Figure out if the `remix.config` file is using ESM syntax | ||
if ((0, utils_1.isESM)(renamedRemixConfigPath)) { | ||
patchedConfig = `import config from './${(0, path_1.basename)(renamedRemixConfigPath)}'; | ||
serverBundles = Array.from(serverBundlesMap.entries()).map( | ||
([hash, routes2]) => { | ||
const runtime = resolvedConfigsMap.get(routes2[0])?.runtime ?? "nodejs"; | ||
return { | ||
serverBuildPath: isHydrogen2 ? (0, import_path.relative)(entrypointFsDirname, remixConfig.serverBuildPath) : `${(0, import_path.relative)( | ||
entrypointFsDirname, | ||
(0, import_path.dirname)(remixConfig.serverBuildPath) | ||
)}/build-${runtime}-${hash}.js`, | ||
routes: routes2.map((r) => r.id) | ||
}; | ||
} | ||
); | ||
if (!isHydrogen2 && remixConfigPath && renamedRemixConfigPath) { | ||
await import_fs.promises.rename(remixConfigPath, renamedRemixConfigPath); | ||
let patchedConfig; | ||
if ((0, import_utils.isESM)(renamedRemixConfigPath)) { | ||
patchedConfig = `import config from './${(0, import_path.basename)( | ||
renamedRemixConfigPath | ||
)}'; | ||
config.serverBuildTarget = undefined; | ||
@@ -173,5 +229,6 @@ config.serverModuleFormat = 'cjs'; | ||
export default config;`; | ||
} | ||
else { | ||
patchedConfig = `const config = require('./${(0, path_1.basename)(renamedRemixConfigPath)}'); | ||
} else { | ||
patchedConfig = `const config = require('./${(0, import_path.basename)( | ||
renamedRemixConfigPath | ||
)}'); | ||
config.serverBuildTarget = undefined; | ||
@@ -183,300 +240,330 @@ config.serverModuleFormat = 'cjs'; | ||
module.exports = config;`; | ||
} | ||
await fs_1.promises.writeFile(remixConfigPath, patchedConfig); | ||
remixConfigWrapped = true; | ||
} | ||
// For Hydrogen v2, patch the `server.ts` file to be Vercel-compatible | ||
if (isHydrogen2) { | ||
if (remixConfig.serverEntryPoint) { | ||
serverEntryPointAbs = (0, path_1.join)(entrypointFsDirname, remixConfig.serverEntryPoint); | ||
originalServerEntryPoint = await fs_1.promises.readFile(serverEntryPointAbs, 'utf8'); | ||
const patchedServerEntryPoint = (0, hydrogen_1.patchHydrogenServer)(project, serverEntryPointAbs); | ||
if (patchedServerEntryPoint) { | ||
(0, build_utils_1.debug)(`Patched Hydrogen server file: ${remixConfig.serverEntryPoint}`); | ||
await fs_1.promises.writeFile(serverEntryPointAbs, patchedServerEntryPoint); | ||
} | ||
} | ||
else { | ||
console.log('WARN: No "server" field found in Remix config'); | ||
} | ||
} | ||
// Make `remix build` output production mode | ||
spawnOpts.env.NODE_ENV = 'production'; | ||
// Run "Build Command" | ||
if (buildCommand) { | ||
(0, build_utils_1.debug)(`Executing build command "${buildCommand}"`); | ||
await (0, build_utils_1.execCommand)(buildCommand, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname, | ||
}); | ||
} | ||
else { | ||
if (hasScript('vercel-build', pkg)) { | ||
(0, build_utils_1.debug)(`Executing "yarn vercel-build"`); | ||
await (0, build_utils_1.runPackageJsonScript)(entrypointFsDirname, 'vercel-build', spawnOpts); | ||
} | ||
else if (hasScript('build', pkg)) { | ||
(0, build_utils_1.debug)(`Executing "yarn build"`); | ||
await (0, build_utils_1.runPackageJsonScript)(entrypointFsDirname, 'build', spawnOpts); | ||
} | ||
else { | ||
await (0, build_utils_1.execCommand)('remix build', { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname, | ||
}); | ||
} | ||
} | ||
} | ||
await import_fs.promises.writeFile(remixConfigPath, patchedConfig); | ||
remixConfigWrapped = true; | ||
} | ||
finally { | ||
const cleanupOps = []; | ||
// Clean up our patched `remix.config.js` to be polite | ||
if (remixConfigWrapped && remixConfigPath && renamedRemixConfigPath) { | ||
cleanupOps.push(fs_1.promises | ||
.rename(renamedRemixConfigPath, remixConfigPath) | ||
.then(() => (0, build_utils_1.debug)(`Restored original "${(0, path_1.basename)(remixConfigPath)}" file`))); | ||
if (isHydrogen2) { | ||
if (remixConfig.serverEntryPoint) { | ||
serverEntryPointAbs = (0, import_path.join)( | ||
entrypointFsDirname, | ||
remixConfig.serverEntryPoint | ||
); | ||
originalServerEntryPoint = await import_fs.promises.readFile( | ||
serverEntryPointAbs, | ||
"utf8" | ||
); | ||
const patchedServerEntryPoint = (0, import_hydrogen.patchHydrogenServer)( | ||
project, | ||
serverEntryPointAbs | ||
); | ||
if (patchedServerEntryPoint) { | ||
(0, import_build_utils.debug)( | ||
`Patched Hydrogen server file: ${remixConfig.serverEntryPoint}` | ||
); | ||
await import_fs.promises.writeFile(serverEntryPointAbs, patchedServerEntryPoint); | ||
} | ||
// Restore original server entrypoint if it was modified (for Hydrogen v2) | ||
if (serverEntryPointAbs && originalServerEntryPoint) { | ||
cleanupOps.push(fs_1.promises | ||
.writeFile(serverEntryPointAbs, originalServerEntryPoint) | ||
.then(() => (0, build_utils_1.debug)(`Restored original "${(0, path_1.basename)(serverEntryPointAbs)}" file`))); | ||
} | ||
await Promise.all(cleanupOps); | ||
} else { | ||
console.log('WARN: No "server" field found in Remix config'); | ||
} | ||
} | ||
// This needs to happen before we run NFT to create the Node/Edge functions | ||
await Promise.all([ | ||
(0, utils_1.ensureResolvable)(entrypointFsDirname, repoRootPath, '@remix-run/server-runtime'), | ||
!isHydrogen2 | ||
? (0, utils_1.ensureResolvable)(entrypointFsDirname, repoRootPath, '@remix-run/node') | ||
: null, | ||
]); | ||
const staticDir = (0, path_1.join)(remixConfig.assetsBuildDirectory, ...remixConfig.publicPath | ||
.replace(/^\/|\/$/g, '') | ||
.split('/') | ||
// eslint-disable-next-line @typescript-eslint/no-unused-vars | ||
.map(_ => '..')); | ||
const [staticFiles, ...functions] = await Promise.all([ | ||
(0, build_utils_1.glob)('**', staticDir), | ||
...serverBundles.map(bundle => { | ||
const firstRoute = remixConfig.routes[bundle.routes[0]]; | ||
const config = resolvedConfigsMap.get(firstRoute) ?? { | ||
runtime: 'nodejs', | ||
}; | ||
if (config.runtime === 'edge') { | ||
return createRenderEdgeFunction(entrypointFsDirname, repoRootPath, (0, path_1.join)(entrypointFsDirname, bundle.serverBuildPath), serverEntryPoint, remixVersion, config); | ||
} | ||
return createRenderNodeFunction(nodeVersion, entrypointFsDirname, repoRootPath, (0, path_1.join)(entrypointFsDirname, bundle.serverBuildPath), serverEntryPoint, remixVersion, config); | ||
}), | ||
]); | ||
const output = staticFiles; | ||
const routes = [ | ||
{ | ||
src: '^/build/(.*)$', | ||
headers: { 'cache-control': 'public, max-age=31536000, immutable' }, | ||
continue: true, | ||
}, | ||
{ | ||
handle: 'filesystem', | ||
}, | ||
]; | ||
for (const route of remixRoutes) { | ||
// Layout routes don't get a function / route added | ||
if ((0, utils_1.isLayoutRoute)(route.id, remixRoutes)) | ||
continue; | ||
const { path, rePath } = (0, utils_1.getPathFromRoute)(route, remixConfig.routes); | ||
// If the route is a pathless layout route (at the root level) | ||
// and doesn't have any sub-routes, then a function should not be created. | ||
if (!path) { | ||
continue; | ||
} | ||
const funcIndex = serverBundles.findIndex(bundle => { | ||
return bundle.routes.includes(route.id); | ||
spawnOpts.env.NODE_ENV = "production"; | ||
if (buildCommand) { | ||
(0, import_build_utils.debug)(`Executing build command "${buildCommand}"`); | ||
await (0, import_build_utils.execCommand)(buildCommand, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname | ||
}); | ||
} else { | ||
if (hasScript("vercel-build", pkg)) { | ||
(0, import_build_utils.debug)(`Executing "yarn vercel-build"`); | ||
await (0, import_build_utils.runPackageJsonScript)( | ||
entrypointFsDirname, | ||
"vercel-build", | ||
spawnOpts | ||
); | ||
} else if (hasScript("build", pkg)) { | ||
(0, import_build_utils.debug)(`Executing "yarn build"`); | ||
await (0, import_build_utils.runPackageJsonScript)(entrypointFsDirname, "build", spawnOpts); | ||
} else { | ||
await (0, import_build_utils.execCommand)("remix build", { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname | ||
}); | ||
const func = functions[funcIndex]; | ||
if (!func) { | ||
throw new Error(`Could not determine server bundle for "${route.id}"`); | ||
} | ||
output[path] = | ||
func instanceof build_utils_1.EdgeFunction | ||
? // `EdgeFunction` currently requires the "name" property to be set. | ||
// Ideally this property will be removed, at which point we can | ||
// return the same `edgeFunction` instance instead of creating a | ||
// new one for each page. | ||
new build_utils_1.EdgeFunction({ | ||
...func, | ||
name: path, | ||
}) | ||
: func; | ||
// If this is a dynamic route then add a Vercel route | ||
const re = (0, utils_1.getRegExpFromPath)(rePath); | ||
if (re) { | ||
routes.push({ | ||
src: re.source, | ||
dest: path, | ||
}); | ||
} | ||
} | ||
} | ||
// Add a 404 path for not found pages to be server-side rendered by Remix. | ||
// Use an edge function bundle if one was generated, otherwise use Node.js. | ||
if (!output['404']) { | ||
const edgeFunctionIndex = Array.from(serverBundlesMap.values()).findIndex(routes => { | ||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime; | ||
return runtime === 'edge'; | ||
}); | ||
const func = edgeFunctionIndex !== -1 ? functions[edgeFunctionIndex] : functions[0]; | ||
output['404'] = | ||
func instanceof build_utils_1.EdgeFunction | ||
? new build_utils_1.EdgeFunction({ ...func, name: '404' }) | ||
: func; | ||
} finally { | ||
const cleanupOps = []; | ||
if (remixConfigWrapped && remixConfigPath && renamedRemixConfigPath) { | ||
cleanupOps.push( | ||
import_fs.promises.rename(renamedRemixConfigPath, remixConfigPath).then( | ||
() => (0, import_build_utils.debug)(`Restored original "${(0, import_path.basename)(remixConfigPath)}" file`) | ||
) | ||
); | ||
} | ||
routes.push({ | ||
src: '/(.*)', | ||
dest: '/404', | ||
if (serverEntryPointAbs && originalServerEntryPoint) { | ||
cleanupOps.push( | ||
import_fs.promises.writeFile(serverEntryPointAbs, originalServerEntryPoint).then( | ||
() => (0, import_build_utils.debug)(`Restored original "${(0, import_path.basename)(serverEntryPointAbs)}" file`) | ||
) | ||
); | ||
} | ||
await Promise.all(cleanupOps); | ||
} | ||
await Promise.all([ | ||
(0, import_utils.ensureResolvable)( | ||
entrypointFsDirname, | ||
repoRootPath, | ||
"@remix-run/server-runtime" | ||
), | ||
!isHydrogen2 ? (0, import_utils.ensureResolvable)(entrypointFsDirname, repoRootPath, "@remix-run/node") : null | ||
]); | ||
const staticDir = (0, import_path.join)( | ||
remixConfig.assetsBuildDirectory, | ||
...remixConfig.publicPath.replace(/^\/|\/$/g, "").split("/").map((_) => "..") | ||
); | ||
const [staticFiles, ...functions] = await Promise.all([ | ||
(0, import_build_utils.glob)("**", staticDir), | ||
...serverBundles.map((bundle) => { | ||
const firstRoute = remixConfig.routes[bundle.routes[0]]; | ||
const config2 = resolvedConfigsMap.get(firstRoute) ?? { | ||
runtime: "nodejs" | ||
}; | ||
if (config2.runtime === "edge") { | ||
return createRenderEdgeFunction( | ||
entrypointFsDirname, | ||
repoRootPath, | ||
(0, import_path.join)(entrypointFsDirname, bundle.serverBuildPath), | ||
serverEntryPoint, | ||
remixVersion, | ||
config2 | ||
); | ||
} | ||
return createRenderNodeFunction( | ||
nodeVersion, | ||
entrypointFsDirname, | ||
repoRootPath, | ||
(0, import_path.join)(entrypointFsDirname, bundle.serverBuildPath), | ||
serverEntryPoint, | ||
remixVersion, | ||
config2 | ||
); | ||
}) | ||
]); | ||
const output = staticFiles; | ||
const routes = [ | ||
{ | ||
src: "^/build/(.*)$", | ||
headers: { "cache-control": "public, max-age=31536000, immutable" }, | ||
continue: true | ||
}, | ||
{ | ||
handle: "filesystem" | ||
} | ||
]; | ||
for (const route of remixRoutes) { | ||
if ((0, import_utils.isLayoutRoute)(route.id, remixRoutes)) | ||
continue; | ||
const { path, rePath } = (0, import_utils.getPathFromRoute)(route, remixConfig.routes); | ||
if (!path) { | ||
continue; | ||
} | ||
const funcIndex = serverBundles.findIndex((bundle) => { | ||
return bundle.routes.includes(route.id); | ||
}); | ||
return { routes, output, framework: { version: remixVersion } }; | ||
const func = functions[funcIndex]; | ||
if (!func) { | ||
throw new Error(`Could not determine server bundle for "${route.id}"`); | ||
} | ||
output[path] = func instanceof import_build_utils.EdgeFunction ? ( | ||
// `EdgeFunction` currently requires the "name" property to be set. | ||
// Ideally this property will be removed, at which point we can | ||
// return the same `edgeFunction` instance instead of creating a | ||
// new one for each page. | ||
new import_build_utils.EdgeFunction({ | ||
...func, | ||
name: path | ||
}) | ||
) : func; | ||
const re = (0, import_utils.getRegExpFromPath)(rePath); | ||
if (re) { | ||
routes.push({ | ||
src: re.source, | ||
dest: path | ||
}); | ||
} | ||
} | ||
if (!output["404"]) { | ||
const edgeFunctionIndex = Array.from(serverBundlesMap.values()).findIndex( | ||
(routes2) => { | ||
const runtime = resolvedConfigsMap.get(routes2[0])?.runtime; | ||
return runtime === "edge"; | ||
} | ||
); | ||
const func = edgeFunctionIndex !== -1 ? functions[edgeFunctionIndex] : functions[0]; | ||
output["404"] = func instanceof import_build_utils.EdgeFunction ? new import_build_utils.EdgeFunction({ ...func, name: "404" }) : func; | ||
} | ||
routes.push({ | ||
src: "/(.*)", | ||
dest: "/404" | ||
}); | ||
return { routes, output, framework: { version: remixVersion } }; | ||
}; | ||
exports.build = build; | ||
function hasScript(scriptName, pkg) { | ||
const scripts = (pkg && pkg.scripts) || {}; | ||
return typeof scripts[scriptName] === 'string'; | ||
const scripts = pkg && pkg.scripts || {}; | ||
return typeof scripts[scriptName] === "string"; | ||
} | ||
async function createRenderNodeFunction(nodeVersion, entrypointDir, rootDir, serverBuildPath, serverEntryPoint, remixVersion, config) { | ||
const files = {}; | ||
let handler = (0, path_1.relative)(rootDir, serverBuildPath); | ||
let handlerPath = (0, path_1.join)(rootDir, handler); | ||
if (!serverEntryPoint) { | ||
const baseServerBuildPath = (0, path_1.basename)(serverBuildPath, '.js'); | ||
handler = (0, path_1.join)((0, path_1.dirname)(handler), `server-${baseServerBuildPath}.mjs`); | ||
handlerPath = (0, path_1.join)(rootDir, handler); | ||
// Copy the `server-node.mjs` file into the "build" directory | ||
const nodeServerSrc = await nodeServerSrcPromise; | ||
await writeEntrypointFile(handlerPath, nodeServerSrc.replace('@remix-run/dev/server-build', `./${baseServerBuildPath}.js`), rootDir); | ||
const files = {}; | ||
let handler = (0, import_path.relative)(rootDir, serverBuildPath); | ||
let handlerPath = (0, import_path.join)(rootDir, handler); | ||
if (!serverEntryPoint) { | ||
const baseServerBuildPath = (0, import_path.basename)(serverBuildPath, ".js"); | ||
handler = (0, import_path.join)((0, import_path.dirname)(handler), `server-${baseServerBuildPath}.mjs`); | ||
handlerPath = (0, import_path.join)(rootDir, handler); | ||
const nodeServerSrc = await nodeServerSrcPromise; | ||
await writeEntrypointFile( | ||
handlerPath, | ||
nodeServerSrc.replace( | ||
"@remix-run/dev/server-build", | ||
`./${baseServerBuildPath}.js` | ||
), | ||
rootDir | ||
); | ||
} | ||
const trace = await (0, import_nft.nodeFileTrace)([handlerPath], { | ||
base: rootDir, | ||
processCwd: entrypointDir | ||
}); | ||
for (const warning of trace.warnings) { | ||
(0, import_build_utils.debug)(`Warning from trace: ${warning.message}`); | ||
} | ||
for (const file of trace.fileList) { | ||
files[file] = await import_build_utils.FileFsRef.fromFsPath({ fsPath: (0, import_path.join)(rootDir, file) }); | ||
} | ||
const fn = new import_build_utils.NodejsLambda({ | ||
files, | ||
handler, | ||
runtime: nodeVersion.runtime, | ||
shouldAddHelpers: false, | ||
shouldAddSourcemapSupport: false, | ||
operationType: "SSR", | ||
supportsResponseStreaming: true, | ||
regions: config.regions, | ||
memory: config.memory, | ||
maxDuration: config.maxDuration, | ||
framework: { | ||
slug: "remix", | ||
version: remixVersion | ||
} | ||
// Trace the handler with `@vercel/nft` | ||
const trace = await (0, nft_1.nodeFileTrace)([handlerPath], { | ||
base: rootDir, | ||
processCwd: entrypointDir, | ||
}); | ||
for (const warning of trace.warnings) { | ||
(0, build_utils_1.debug)(`Warning from trace: ${warning.message}`); | ||
} | ||
for (const file of trace.fileList) { | ||
files[file] = await build_utils_1.FileFsRef.fromFsPath({ fsPath: (0, path_1.join)(rootDir, file) }); | ||
} | ||
const fn = new build_utils_1.NodejsLambda({ | ||
files, | ||
handler, | ||
runtime: nodeVersion.runtime, | ||
shouldAddHelpers: false, | ||
shouldAddSourcemapSupport: false, | ||
operationType: 'SSR', | ||
supportsResponseStreaming: true, | ||
regions: config.regions, | ||
memory: config.memory, | ||
maxDuration: config.maxDuration, | ||
framework: { | ||
slug: 'remix', | ||
version: remixVersion, | ||
}, | ||
}); | ||
return fn; | ||
}); | ||
return fn; | ||
} | ||
async function createRenderEdgeFunction(entrypointDir, rootDir, serverBuildPath, serverEntryPoint, remixVersion, config) { | ||
const files = {}; | ||
let handler = (0, path_1.relative)(rootDir, serverBuildPath); | ||
let handlerPath = (0, path_1.join)(rootDir, handler); | ||
if (!serverEntryPoint) { | ||
const baseServerBuildPath = (0, path_1.basename)(serverBuildPath, '.js'); | ||
handler = (0, path_1.join)((0, path_1.dirname)(handler), `server-${baseServerBuildPath}.mjs`); | ||
handlerPath = (0, path_1.join)(rootDir, handler); | ||
// Copy the `server-edge.mjs` file into the "build" directory | ||
const edgeServerSrc = await edgeServerSrcPromise; | ||
await writeEntrypointFile(handlerPath, edgeServerSrc.replace('@remix-run/dev/server-build', `./${baseServerBuildPath}.js`), rootDir); | ||
} | ||
let remixRunVercelPkgJson; | ||
// Trace the handler with `@vercel/nft` | ||
const trace = await (0, nft_1.nodeFileTrace)([handlerPath], { | ||
base: rootDir, | ||
processCwd: entrypointDir, | ||
conditions: ['edge-light', 'browser', 'module', 'import', 'require'], | ||
async readFile(fsPath) { | ||
let source; | ||
try { | ||
source = await fs_1.promises.readFile(fsPath); | ||
} | ||
catch (err) { | ||
if (err.code === 'ENOENT' || err.code === 'EISDIR') { | ||
return null; | ||
} | ||
throw err; | ||
} | ||
if ((0, path_1.basename)(fsPath) === 'package.json') { | ||
// For Edge Functions, patch "main" field to prefer "browser" or "module" | ||
const pkgJson = JSON.parse(source.toString()); | ||
// When `@remix-run/vercel` is detected, we need to modify the `package.json` | ||
// to include the "browser" field so that the proper Edge entrypoint file | ||
// is used. This is a temporary stop gap until this PR is merged: | ||
// https://github.com/remix-run/remix/pull/5537 | ||
if (pkgJson.name === '@remix-run/vercel') { | ||
pkgJson.browser = 'dist/edge.js'; | ||
pkgJson.dependencies['@remix-run/server-runtime'] = | ||
pkgJson.dependencies['@remix-run/node']; | ||
if (!remixRunVercelPkgJson) { | ||
remixRunVercelPkgJson = JSON.stringify(pkgJson, null, 2) + '\n'; | ||
// Copy in the edge entrypoint so that NFT can properly resolve it | ||
const vercelEdgeEntrypointPath = (0, path_1.join)(DEFAULTS_PATH, 'vercel-edge-entrypoint.js'); | ||
const vercelEdgeEntrypointDest = (0, path_1.join)((0, path_1.dirname)(fsPath), 'dist/edge.js'); | ||
await fs_1.promises.copyFile(vercelEdgeEntrypointPath, vercelEdgeEntrypointDest); | ||
} | ||
} | ||
for (const prop of ['browser', 'module']) { | ||
const val = pkgJson[prop]; | ||
if (typeof val === 'string') { | ||
pkgJson.main = val; | ||
// Return the modified `package.json` to nft | ||
source = JSON.stringify(pkgJson); | ||
break; | ||
} | ||
} | ||
} | ||
return source; | ||
}, | ||
}); | ||
for (const warning of trace.warnings) { | ||
(0, build_utils_1.debug)(`Warning from trace: ${warning.message}`); | ||
} | ||
for (const file of trace.fileList) { | ||
if (remixRunVercelPkgJson && | ||
file.endsWith(`@remix-run${path_1.sep}vercel${path_1.sep}package.json`)) { | ||
// Use the modified `@remix-run/vercel` package.json which contains "browser" field | ||
files[file] = new build_utils_1.FileBlob({ data: remixRunVercelPkgJson }); | ||
const files = {}; | ||
let handler = (0, import_path.relative)(rootDir, serverBuildPath); | ||
let handlerPath = (0, import_path.join)(rootDir, handler); | ||
if (!serverEntryPoint) { | ||
const baseServerBuildPath = (0, import_path.basename)(serverBuildPath, ".js"); | ||
handler = (0, import_path.join)((0, import_path.dirname)(handler), `server-${baseServerBuildPath}.mjs`); | ||
handlerPath = (0, import_path.join)(rootDir, handler); | ||
const edgeServerSrc = await edgeServerSrcPromise; | ||
await writeEntrypointFile( | ||
handlerPath, | ||
edgeServerSrc.replace( | ||
"@remix-run/dev/server-build", | ||
`./${baseServerBuildPath}.js` | ||
), | ||
rootDir | ||
); | ||
} | ||
let remixRunVercelPkgJson; | ||
const trace = await (0, import_nft.nodeFileTrace)([handlerPath], { | ||
base: rootDir, | ||
processCwd: entrypointDir, | ||
conditions: ["edge-light", "browser", "module", "import", "require"], | ||
async readFile(fsPath) { | ||
let source; | ||
try { | ||
source = await import_fs.promises.readFile(fsPath); | ||
} catch (err) { | ||
if (err.code === "ENOENT" || err.code === "EISDIR") { | ||
return null; | ||
} | ||
else { | ||
files[file] = await build_utils_1.FileFsRef.fromFsPath({ fsPath: (0, path_1.join)(rootDir, file) }); | ||
throw err; | ||
} | ||
if ((0, import_path.basename)(fsPath) === "package.json") { | ||
const pkgJson = JSON.parse(source.toString()); | ||
if (pkgJson.name === "@remix-run/vercel") { | ||
pkgJson.browser = "dist/edge.js"; | ||
pkgJson.dependencies["@remix-run/server-runtime"] = pkgJson.dependencies["@remix-run/node"]; | ||
if (!remixRunVercelPkgJson) { | ||
remixRunVercelPkgJson = JSON.stringify(pkgJson, null, 2) + "\n"; | ||
const vercelEdgeEntrypointPath = (0, import_path.join)( | ||
DEFAULTS_PATH, | ||
"vercel-edge-entrypoint.js" | ||
); | ||
const vercelEdgeEntrypointDest = (0, import_path.join)( | ||
(0, import_path.dirname)(fsPath), | ||
"dist/edge.js" | ||
); | ||
await import_fs.promises.copyFile( | ||
vercelEdgeEntrypointPath, | ||
vercelEdgeEntrypointDest | ||
); | ||
} | ||
} | ||
for (const prop of ["browser", "module"]) { | ||
const val = pkgJson[prop]; | ||
if (typeof val === "string") { | ||
pkgJson.main = val; | ||
source = JSON.stringify(pkgJson); | ||
break; | ||
} | ||
} | ||
} | ||
return source; | ||
} | ||
const fn = new build_utils_1.EdgeFunction({ | ||
files, | ||
deploymentTarget: 'v8-worker', | ||
name: 'render', | ||
entrypoint: handler, | ||
regions: config.regions, | ||
framework: { | ||
slug: 'remix', | ||
version: remixVersion, | ||
}, | ||
}); | ||
return fn; | ||
}); | ||
for (const warning of trace.warnings) { | ||
(0, import_build_utils.debug)(`Warning from trace: ${warning.message}`); | ||
} | ||
for (const file of trace.fileList) { | ||
if (remixRunVercelPkgJson && file.endsWith(`@remix-run${import_path.sep}vercel${import_path.sep}package.json`)) { | ||
files[file] = new import_build_utils.FileBlob({ data: remixRunVercelPkgJson }); | ||
} else { | ||
files[file] = await import_build_utils.FileFsRef.fromFsPath({ fsPath: (0, import_path.join)(rootDir, file) }); | ||
} | ||
} | ||
const fn = new import_build_utils.EdgeFunction({ | ||
files, | ||
deploymentTarget: "v8-worker", | ||
name: "render", | ||
entrypoint: handler, | ||
regions: config.regions, | ||
framework: { | ||
slug: "remix", | ||
version: remixVersion | ||
} | ||
}); | ||
return fn; | ||
} | ||
async function writeEntrypointFile(path, data, rootDir) { | ||
try { | ||
await fs_1.promises.writeFile(path, data); | ||
try { | ||
await import_fs.promises.writeFile(path, data); | ||
} catch (err) { | ||
if (err.code === "ENOENT") { | ||
throw new Error( | ||
`The "${(0, import_path.relative)( | ||
rootDir, | ||
(0, import_path.dirname)(path) | ||
)}" directory does not exist. Please contact support at https://vercel.com/help.` | ||
); | ||
} | ||
catch (err) { | ||
if (err.code === 'ENOENT') { | ||
throw new Error(`The "${(0, path_1.relative)(rootDir, (0, path_1.dirname)(path))}" directory does not exist. Please contact support at https://vercel.com/help.`); | ||
} | ||
throw err; | ||
} | ||
throw err; | ||
} | ||
} | ||
//# sourceMappingURL=build.js.map | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
build | ||
}); | ||
//# sourceMappingURL=build.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.patchHydrogenServer = void 0; | ||
const path_1 = require("path"); | ||
const ts_morph_1 = require("ts-morph"); | ||
/** | ||
* For Hydrogen v2, the `server.ts` file exports a signature like: | ||
* | ||
* ``` | ||
* export default { | ||
* async fetch( | ||
* request: Request, | ||
* env: Env, | ||
* executionContext: ExecutionContext, | ||
* ): Promise<Response>; | ||
* } | ||
* ``` | ||
* | ||
* Here we parse the AST of that file so that we can: | ||
* | ||
* 1. Convert the signature to be compatible with Vercel Edge functions | ||
* (i.e. `export default (res: Response): Promise<Response>`). | ||
* | ||
* 2. Track usages of the `env` parameter which (which gets removed), | ||
* so that we can create that object based on `process.env`. | ||
*/ | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var hydrogen_exports = {}; | ||
__export(hydrogen_exports, { | ||
patchHydrogenServer: () => patchHydrogenServer | ||
}); | ||
module.exports = __toCommonJS(hydrogen_exports); | ||
var import_path = require("path"); | ||
var import_ts_morph = require("ts-morph"); | ||
function patchHydrogenServer(project, serverEntryPoint) { | ||
const sourceFile = project.addSourceFileAtPath(serverEntryPoint); | ||
const defaultExportSymbol = sourceFile.getDescendantsOfKind(ts_morph_1.SyntaxKind.ExportAssignment)[0]; | ||
const envProperties = []; | ||
if (!defaultExportSymbol) { | ||
console.log(`WARN: No default export found in "${(0, path_1.basename)(serverEntryPoint)}"`); | ||
return; | ||
const sourceFile = project.addSourceFileAtPath(serverEntryPoint); | ||
const defaultExportSymbol = sourceFile.getDescendantsOfKind( | ||
import_ts_morph.SyntaxKind.ExportAssignment | ||
)[0]; | ||
const envProperties = []; | ||
if (!defaultExportSymbol) { | ||
console.log( | ||
`WARN: No default export found in "${(0, import_path.basename)(serverEntryPoint)}"` | ||
); | ||
return; | ||
} | ||
const objectLiteral = defaultExportSymbol.getFirstChildByKind( | ||
import_ts_morph.SyntaxKind.ObjectLiteralExpression | ||
); | ||
if (!import_ts_morph.Node.isObjectLiteralExpression(objectLiteral)) { | ||
console.log( | ||
`WARN: Default export in "${(0, import_path.basename)( | ||
serverEntryPoint | ||
)}" does not conform to Oxygen syntax` | ||
); | ||
return; | ||
} | ||
const fetchMethod = objectLiteral.getProperty("fetch"); | ||
if (!fetchMethod || !import_ts_morph.Node.isMethodDeclaration(fetchMethod)) { | ||
console.log( | ||
`WARN: Default export in "${(0, import_path.basename)( | ||
serverEntryPoint | ||
)}" does not conform to Oxygen syntax` | ||
); | ||
return; | ||
} | ||
const parameters = fetchMethod.getParameters(); | ||
const envParam = parameters[1]; | ||
const envParamName = envParam.getName(); | ||
if (envParam) { | ||
fetchMethod.forEachDescendant((node) => { | ||
if (import_ts_morph.Node.isPropertyAccessExpression(node) && node.getExpression().getText() === envParamName) { | ||
envProperties.push(node.getName()); | ||
} | ||
}); | ||
} | ||
fetchMethod.forEachDescendant((node) => { | ||
if (import_ts_morph.Node.isCallExpression(node) && node.getExpression().getText() === "caches.open") { | ||
node.replaceWithText(`undefined /* ${node.getText()} */`); | ||
} | ||
const objectLiteral = defaultExportSymbol.getFirstChildByKind(ts_morph_1.SyntaxKind.ObjectLiteralExpression); | ||
if (!ts_morph_1.Node.isObjectLiteralExpression(objectLiteral)) { | ||
console.log(`WARN: Default export in "${(0, path_1.basename)(serverEntryPoint)}" does not conform to Oxygen syntax`); | ||
return; | ||
} | ||
const fetchMethod = objectLiteral.getProperty('fetch'); | ||
if (!fetchMethod || !ts_morph_1.Node.isMethodDeclaration(fetchMethod)) { | ||
console.log(`WARN: Default export in "${(0, path_1.basename)(serverEntryPoint)}" does not conform to Oxygen syntax`); | ||
return; | ||
} | ||
const parameters = fetchMethod.getParameters(); | ||
// Find usages of the env object within the fetch method | ||
const envParam = parameters[1]; | ||
const envParamName = envParam.getName(); | ||
if (envParam) { | ||
fetchMethod.forEachDescendant(node => { | ||
if (ts_morph_1.Node.isPropertyAccessExpression(node) && | ||
node.getExpression().getText() === envParamName) { | ||
envProperties.push(node.getName()); | ||
} | ||
}); | ||
} | ||
// Vercel does not support the Web Cache API, so find | ||
// and replace `caches.open()` calls with `undefined` | ||
fetchMethod.forEachDescendant(node => { | ||
if (ts_morph_1.Node.isCallExpression(node) && | ||
node.getExpression().getText() === 'caches.open') { | ||
node.replaceWithText(`undefined /* ${node.getText()} */`); | ||
} | ||
}); | ||
// Remove the 'env' parameter to match Vercel's Edge signature | ||
parameters.splice(1, 1); | ||
// Construct the new function with the parameters and body of the original fetch method | ||
const newFunction = `export default async function(${parameters | ||
.map(p => p.getText()) | ||
.join(', ')}) ${fetchMethod.getBody().getText()}`; | ||
defaultExportSymbol.replaceWithText(newFunction); | ||
const defaultEnvVars = { | ||
SESSION_SECRET: 'foobar', | ||
PUBLIC_STORE_DOMAIN: 'mock.shop', | ||
}; | ||
const envCode = `const env = { ${envProperties | ||
.map(name => `${name}: process.env.${name}`) | ||
.join(', ')} };\n${Object.entries(defaultEnvVars) | ||
.map(([k, v]) => `if (!env.${k}) { env.${k} = ${JSON.stringify(v)}; console.warn('Warning: ${JSON.stringify(k)} env var not set - using default value ${JSON.stringify(v)}'); }`) | ||
.join('\n')}`; | ||
const updatedCodeString = sourceFile.getFullText(); | ||
return `${envCode}\n${updatedCodeString}`; | ||
}); | ||
parameters.splice(1, 1); | ||
const newFunction = `export default async function(${parameters.map((p) => p.getText()).join(", ")}) ${fetchMethod.getBody().getText()}`; | ||
defaultExportSymbol.replaceWithText(newFunction); | ||
const defaultEnvVars = { | ||
SESSION_SECRET: "foobar", | ||
PUBLIC_STORE_DOMAIN: "mock.shop" | ||
}; | ||
const envCode = `const env = { ${envProperties.map((name) => `${name}: process.env.${name}`).join(", ")} }; | ||
${Object.entries(defaultEnvVars).map( | ||
([k, v]) => `if (!env.${k}) { env.${k} = ${JSON.stringify( | ||
v | ||
)}; console.warn('Warning: ${JSON.stringify( | ||
k | ||
)} env var not set - using default value ${JSON.stringify(v)}'); }` | ||
).join("\n")}`; | ||
const updatedCodeString = sourceFile.getFullText(); | ||
return `${envCode} | ||
${updatedCodeString}`; | ||
} | ||
exports.patchHydrogenServer = patchHydrogenServer; | ||
//# sourceMappingURL=hydrogen.js.map | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
patchHydrogenServer | ||
}); | ||
//# sourceMappingURL=hydrogen.js.map |
"use strict"; | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
var desc = Object.getOwnPropertyDescriptor(m, k); | ||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||
desc = { enumerable: true, get: function() { return m[k]; } }; | ||
} | ||
Object.defineProperty(o, k2, desc); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __exportStar = (this && this.__exportStar) || function(m, exports) { | ||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.version = void 0; | ||
exports.version = 2; | ||
__exportStar(require("./build"), exports); | ||
__exportStar(require("./prepare-cache"), exports); | ||
//# sourceMappingURL=index.js.map | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var src_exports = {}; | ||
__export(src_exports, { | ||
version: () => version | ||
}); | ||
module.exports = __toCommonJS(src_exports); | ||
__reExport(src_exports, require("./build"), module.exports); | ||
__reExport(src_exports, require("./prepare-cache"), module.exports); | ||
const version = 2; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
version, | ||
...require("./build"), | ||
...require("./prepare-cache") | ||
}); | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.prepareCache = void 0; | ||
const build_utils_1 = require("@vercel/build-utils"); | ||
const path_1 = require("path"); | ||
const utils_1 = require("./utils"); | ||
const prepareCache = async ({ entrypoint, repoRootPath, workPath, }) => { | ||
const root = repoRootPath || workPath; | ||
const mountpoint = (0, path_1.dirname)(entrypoint); | ||
const entrypointFsDirname = (0, path_1.join)(workPath, mountpoint); | ||
const packageJsonPath = (0, path_1.join)(entrypointFsDirname, 'package.json'); | ||
const remixRunDevPath = (0, path_1.dirname)(utils_1._require.resolve('@remix-run/dev/package.json', { | ||
paths: [entrypointFsDirname], | ||
})); | ||
const remixConfig = await (0, utils_1.chdirAndReadConfig)(remixRunDevPath, entrypointFsDirname, packageJsonPath); | ||
const [nodeModulesFiles, cacheDirFiles] = await Promise.all([ | ||
// Cache `node_modules` | ||
(0, build_utils_1.glob)('**/node_modules/**', root), | ||
// Cache the Remix "cacheDirectory" (typically `.cache`) | ||
(0, build_utils_1.glob)((0, path_1.relative)(root, (0, path_1.join)(remixConfig.cacheDirectory, '**')), root), | ||
]); | ||
return { ...nodeModulesFiles, ...cacheDirFiles }; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
exports.prepareCache = prepareCache; | ||
//# sourceMappingURL=prepare-cache.js.map | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var prepare_cache_exports = {}; | ||
__export(prepare_cache_exports, { | ||
prepareCache: () => prepareCache | ||
}); | ||
module.exports = __toCommonJS(prepare_cache_exports); | ||
var import_build_utils = require("@vercel/build-utils"); | ||
var import_path = require("path"); | ||
var import_utils = require("./utils"); | ||
const prepareCache = async ({ | ||
entrypoint, | ||
repoRootPath, | ||
workPath | ||
}) => { | ||
const root = repoRootPath || workPath; | ||
const mountpoint = (0, import_path.dirname)(entrypoint); | ||
const entrypointFsDirname = (0, import_path.join)(workPath, mountpoint); | ||
const packageJsonPath = (0, import_path.join)(entrypointFsDirname, "package.json"); | ||
const remixRunDevPath = (0, import_path.dirname)( | ||
import_utils._require.resolve("@remix-run/dev/package.json", { | ||
paths: [entrypointFsDirname] | ||
}) | ||
); | ||
const remixConfig = await (0, import_utils.chdirAndReadConfig)( | ||
remixRunDevPath, | ||
entrypointFsDirname, | ||
packageJsonPath | ||
); | ||
const [nodeModulesFiles, cacheDirFiles] = await Promise.all([ | ||
// Cache `node_modules` | ||
(0, import_build_utils.glob)("**/node_modules/**", root), | ||
// Cache the Remix "cacheDirectory" (typically `.cache`) | ||
(0, import_build_utils.glob)((0, import_path.relative)(root, (0, import_path.join)(remixConfig.cacheDirectory, "**")), root) | ||
]); | ||
return { ...nodeModulesFiles, ...cacheDirFiles }; | ||
}; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
prepareCache | ||
}); | ||
//# sourceMappingURL=prepare-cache.js.map |
"use strict"; | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
var desc = Object.getOwnPropertyDescriptor(m, k); | ||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||
desc = { enumerable: true, get: function() { return m[k]; } }; | ||
} | ||
Object.defineProperty(o, k2, desc); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | ||
Object.defineProperty(o, "default", { enumerable: true, value: v }); | ||
}) : function(o, v) { | ||
o["default"] = v; | ||
}); | ||
var __importStar = (this && this.__importStar) || function (mod) { | ||
if (mod && mod.__esModule) return mod; | ||
var result = {}; | ||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | ||
__setModuleDefault(result, mod); | ||
return result; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var _a; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.isESM = exports.ensureResolvable = exports.resolveSemverMinMax = exports.addDependencies = exports.chdirAndReadConfig = exports.syncEnv = exports.getRegExpFromPath = exports.getPathFromRoute = exports.getRouteIterator = exports.isLayoutRoute = exports.calculateRouteConfigHash = exports.getResolvedRouteConfig = exports.findConfig = exports.findEntry = exports._require = void 0; | ||
const semver_1 = __importDefault(require("semver")); | ||
const child_process_1 = require("child_process"); | ||
const fs_1 = require("fs"); | ||
const path_1 = require("path"); | ||
const path_to_regexp_1 = require("path-to-regexp"); | ||
const build_utils_1 = require("@vercel/build-utils"); | ||
const build_utils_2 = require("@vercel/build-utils"); | ||
exports._require = eval('require'); | ||
const SPLAT_PATH = '/:params*'; | ||
const entryExts = ['.js', '.jsx', '.ts', '.tsx']; | ||
function findEntry(dir, basename) { | ||
for (const ext of entryExts) { | ||
const file = (0, path_1.resolve)(dir, basename + ext); | ||
if ((0, fs_1.existsSync)(file)) | ||
return (0, path_1.relative)(dir, file); | ||
} | ||
return undefined; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var utils_exports = {}; | ||
__export(utils_exports, { | ||
_require: () => _require, | ||
addDependencies: () => addDependencies, | ||
calculateRouteConfigHash: () => calculateRouteConfigHash, | ||
chdirAndReadConfig: () => chdirAndReadConfig, | ||
ensureResolvable: () => ensureResolvable, | ||
findConfig: () => findConfig, | ||
findEntry: () => findEntry, | ||
getPathFromRoute: () => getPathFromRoute, | ||
getRegExpFromPath: () => getRegExpFromPath, | ||
getResolvedRouteConfig: () => getResolvedRouteConfig, | ||
getRouteIterator: () => getRouteIterator, | ||
isESM: () => isESM, | ||
isLayoutRoute: () => isLayoutRoute, | ||
resolveSemverMinMax: () => resolveSemverMinMax, | ||
syncEnv: () => syncEnv | ||
}); | ||
module.exports = __toCommonJS(utils_exports); | ||
var import_semver = __toESM(require("semver")); | ||
var import_child_process = require("child_process"); | ||
var import_fs = require("fs"); | ||
var import_path = require("path"); | ||
var import_path_to_regexp = require("path-to-regexp"); | ||
var import_build_utils = require("@vercel/build-utils"); | ||
var import_build_utils2 = require("@vercel/build-utils"); | ||
const _require = eval("require"); | ||
const SPLAT_PATH = "/:params*"; | ||
const entryExts = [".js", ".jsx", ".ts", ".tsx"]; | ||
function findEntry(dir, basename2) { | ||
for (const ext of entryExts) { | ||
const file = (0, import_path.resolve)(dir, basename2 + ext); | ||
if ((0, import_fs.existsSync)(file)) | ||
return (0, import_path.relative)(dir, file); | ||
} | ||
return void 0; | ||
} | ||
exports.findEntry = findEntry; | ||
const configExts = ['.js', '.cjs', '.mjs']; | ||
function findConfig(dir, basename) { | ||
for (const ext of configExts) { | ||
const name = basename + ext; | ||
const file = (0, path_1.join)(dir, name); | ||
if ((0, fs_1.existsSync)(file)) | ||
return file; | ||
} | ||
return undefined; | ||
const configExts = [".js", ".cjs", ".mjs"]; | ||
function findConfig(dir, basename2) { | ||
for (const ext of configExts) { | ||
const name = basename2 + ext; | ||
const file = (0, import_path.join)(dir, name); | ||
if ((0, import_fs.existsSync)(file)) | ||
return file; | ||
} | ||
return void 0; | ||
} | ||
exports.findConfig = findConfig; | ||
function isEdgeRuntime(runtime) { | ||
return runtime === 'edge' || runtime === 'experimental-edge'; | ||
return runtime === "edge" || runtime === "experimental-edge"; | ||
} | ||
function getResolvedRouteConfig(route, routes, configs, isHydrogen2) { | ||
let runtime; | ||
let regions; | ||
let maxDuration; | ||
let memory; | ||
for (const currentRoute of getRouteIterator(route, routes)) { | ||
const staticConfig = configs.get(currentRoute); | ||
if (staticConfig) { | ||
if (typeof runtime === 'undefined' && staticConfig.runtime) { | ||
runtime = isEdgeRuntime(staticConfig.runtime) ? 'edge' : 'nodejs'; | ||
} | ||
if (typeof regions === 'undefined') { | ||
regions = staticConfig.regions; | ||
} | ||
if (typeof maxDuration === 'undefined') { | ||
maxDuration = staticConfig.maxDuration; | ||
} | ||
if (typeof memory === 'undefined') { | ||
memory = staticConfig.memory; | ||
} | ||
} | ||
let runtime; | ||
let regions; | ||
let maxDuration; | ||
let memory; | ||
for (const currentRoute of getRouteIterator(route, routes)) { | ||
const staticConfig = configs.get(currentRoute); | ||
if (staticConfig) { | ||
if (typeof runtime === "undefined" && staticConfig.runtime) { | ||
runtime = isEdgeRuntime(staticConfig.runtime) ? "edge" : "nodejs"; | ||
} | ||
if (typeof regions === "undefined") { | ||
regions = staticConfig.regions; | ||
} | ||
if (typeof maxDuration === "undefined") { | ||
maxDuration = staticConfig.maxDuration; | ||
} | ||
if (typeof memory === "undefined") { | ||
memory = staticConfig.memory; | ||
} | ||
} | ||
if (Array.isArray(regions)) { | ||
regions = Array.from(new Set(regions)).sort(); | ||
} | ||
if (isHydrogen2 || runtime === 'edge') { | ||
return { runtime: 'edge', regions }; | ||
} | ||
if (regions && !Array.isArray(regions)) { | ||
throw new Error(`"regions" for route "${route.id}" must be an array of strings`); | ||
} | ||
return { runtime: 'nodejs', regions, maxDuration, memory }; | ||
} | ||
if (Array.isArray(regions)) { | ||
regions = Array.from(new Set(regions)).sort(); | ||
} | ||
if (isHydrogen2 || runtime === "edge") { | ||
return { runtime: "edge", regions }; | ||
} | ||
if (regions && !Array.isArray(regions)) { | ||
throw new Error( | ||
`"regions" for route "${route.id}" must be an array of strings` | ||
); | ||
} | ||
return { runtime: "nodejs", regions, maxDuration, memory }; | ||
} | ||
exports.getResolvedRouteConfig = getResolvedRouteConfig; | ||
function calculateRouteConfigHash(config) { | ||
const str = JSON.stringify(config); | ||
return Buffer.from(str).toString('base64url'); | ||
const str = JSON.stringify(config); | ||
return Buffer.from(str).toString("base64url"); | ||
} | ||
exports.calculateRouteConfigHash = calculateRouteConfigHash; | ||
function isLayoutRoute(routeId, routes) { | ||
return routes.some(r => r.parentId === routeId); | ||
return routes.some((r) => r.parentId === routeId); | ||
} | ||
exports.isLayoutRoute = isLayoutRoute; | ||
function* getRouteIterator(route, routes) { | ||
let currentRoute = route; | ||
do { | ||
yield currentRoute; | ||
if (currentRoute.parentId) { | ||
currentRoute = routes[currentRoute.parentId]; | ||
} | ||
else { | ||
break; | ||
} | ||
} while (currentRoute); | ||
let currentRoute = route; | ||
do { | ||
yield currentRoute; | ||
if (currentRoute.parentId) { | ||
currentRoute = routes[currentRoute.parentId]; | ||
} else { | ||
break; | ||
} | ||
} while (currentRoute); | ||
} | ||
exports.getRouteIterator = getRouteIterator; | ||
function getPathFromRoute(route, routes) { | ||
if (route.id === 'root' || | ||
(route.parentId === 'root' && !route.path && route.index)) { | ||
return { path: 'index', rePath: '/index' }; | ||
} | ||
const pathParts = []; | ||
const rePathParts = []; | ||
for (const currentRoute of getRouteIterator(route, routes)) { | ||
if (!currentRoute.path) | ||
continue; | ||
const currentRouteParts = currentRoute.path.split('/').reverse(); | ||
for (const part of currentRouteParts) { | ||
if (part.endsWith('?')) { | ||
if (part.startsWith(':')) { | ||
// Optional path parameter | ||
pathParts.push(`(${part.substring(0, part.length - 1)})`); | ||
rePathParts.push(part); | ||
} | ||
else { | ||
// Optional static segment | ||
const p = `(${part.substring(0, part.length - 1)})`; | ||
pathParts.push(p); | ||
rePathParts.push(`${p}?`); | ||
} | ||
} | ||
else { | ||
pathParts.push(part); | ||
rePathParts.push(part); | ||
} | ||
if (route.id === "root" || route.parentId === "root" && !route.path && route.index) { | ||
return { path: "index", rePath: "/index" }; | ||
} | ||
const pathParts = []; | ||
const rePathParts = []; | ||
for (const currentRoute of getRouteIterator(route, routes)) { | ||
if (!currentRoute.path) | ||
continue; | ||
const currentRouteParts = currentRoute.path.split("/").reverse(); | ||
for (const part of currentRouteParts) { | ||
if (part.endsWith("?")) { | ||
if (part.startsWith(":")) { | ||
pathParts.push(`(${part.substring(0, part.length - 1)})`); | ||
rePathParts.push(part); | ||
} else { | ||
const p = `(${part.substring(0, part.length - 1)})`; | ||
pathParts.push(p); | ||
rePathParts.push(`${p}?`); | ||
} | ||
} else { | ||
pathParts.push(part); | ||
rePathParts.push(part); | ||
} | ||
} | ||
const path = pathParts.reverse().join('/'); | ||
// Replace "/*" at the end to handle "splat routes" | ||
let rePath = rePathParts.reverse().join('/'); | ||
rePath = | ||
rePath === '*' ? SPLAT_PATH : `/${rePath.replace(/\/\*$/, SPLAT_PATH)}`; | ||
return { path, rePath }; | ||
} | ||
const path = pathParts.reverse().join("/"); | ||
let rePath = rePathParts.reverse().join("/"); | ||
rePath = rePath === "*" ? SPLAT_PATH : `/${rePath.replace(/\/\*$/, SPLAT_PATH)}`; | ||
return { path, rePath }; | ||
} | ||
exports.getPathFromRoute = getPathFromRoute; | ||
function getRegExpFromPath(rePath) { | ||
const keys = []; | ||
const re = (0, path_to_regexp_1.pathToRegexp)(rePath, keys); | ||
return keys.length > 0 ? re : false; | ||
const keys = []; | ||
const re = (0, import_path_to_regexp.pathToRegexp)(rePath, keys); | ||
return keys.length > 0 ? re : false; | ||
} | ||
exports.getRegExpFromPath = getRegExpFromPath; | ||
/** | ||
* Updates the `dest` process.env object to match the `source` one. | ||
* A function is returned to restore the the `dest` env back to how | ||
* it was originally. | ||
*/ | ||
function syncEnv(source, dest) { | ||
const originalDest = { ...dest }; | ||
Object.assign(dest, source); | ||
for (const key of Object.keys(dest)) { | ||
if (!(key in source)) { | ||
delete dest[key]; | ||
} | ||
const originalDest = { ...dest }; | ||
Object.assign(dest, source); | ||
for (const key of Object.keys(dest)) { | ||
if (!(key in source)) { | ||
delete dest[key]; | ||
} | ||
return () => syncEnv(originalDest, dest); | ||
} | ||
return () => syncEnv(originalDest, dest); | ||
} | ||
exports.syncEnv = syncEnv; | ||
async function chdirAndReadConfig(remixRunDevPath, dir, packageJsonPath) { | ||
const { readConfig } = await (_a = (0, path_1.join)(remixRunDevPath, 'dist/config.js'), Promise.resolve().then(() => __importStar(require(_a)))); | ||
const originalCwd = process.cwd(); | ||
// As of Remix v1.14.0, reading the config may trigger adding | ||
// "isbot" as a dependency, and `npm`/`pnpm`/`yarn` may be invoked. | ||
// We want to prevent that behavior, so trick `readConfig()` | ||
// into thinking that "isbot" is already installed. | ||
let modifiedPackageJson = false; | ||
const pkgRaw = await fs_1.promises.readFile(packageJsonPath, 'utf8'); | ||
const pkg = JSON.parse(pkgRaw); | ||
if (!pkg.dependencies?.['isbot']) { | ||
pkg.dependencies.isbot = 'latest'; | ||
await fs_1.promises.writeFile(packageJsonPath, JSON.stringify(pkg)); | ||
modifiedPackageJson = true; | ||
const { readConfig } = await import((0, import_path.join)(remixRunDevPath, "dist/config.js")); | ||
const originalCwd = process.cwd(); | ||
let modifiedPackageJson = false; | ||
const pkgRaw = await import_fs.promises.readFile(packageJsonPath, "utf8"); | ||
const pkg = JSON.parse(pkgRaw); | ||
if (!pkg.dependencies?.["isbot"]) { | ||
pkg.dependencies.isbot = "latest"; | ||
await import_fs.promises.writeFile(packageJsonPath, JSON.stringify(pkg)); | ||
modifiedPackageJson = true; | ||
} | ||
const warn = console.warn; | ||
console.warn = import_build_utils.debug; | ||
let remixConfig; | ||
try { | ||
process.chdir(dir); | ||
remixConfig = await readConfig(dir); | ||
} finally { | ||
console.warn = warn; | ||
process.chdir(originalCwd); | ||
if (modifiedPackageJson) { | ||
await import_fs.promises.writeFile(packageJsonPath, pkgRaw); | ||
} | ||
// Suppress any warnings emitted from `readConfig()` to avoid | ||
// printing them > 1 time. They will already be printed during | ||
// `remix build` when invoking the Build Command. | ||
const warn = console.warn; | ||
console.warn = build_utils_1.debug; | ||
let remixConfig; | ||
try { | ||
process.chdir(dir); | ||
remixConfig = await readConfig(dir); | ||
} | ||
finally { | ||
console.warn = warn; | ||
process.chdir(originalCwd); | ||
if (modifiedPackageJson) { | ||
await fs_1.promises.writeFile(packageJsonPath, pkgRaw); | ||
} | ||
} | ||
return remixConfig; | ||
} | ||
return remixConfig; | ||
} | ||
exports.chdirAndReadConfig = chdirAndReadConfig; | ||
/** | ||
* Runs `npm i ${name}` / `pnpm i ${name}` / `yarn add ${name}`. | ||
*/ | ||
function addDependencies(cliType, names, opts = {}) { | ||
(0, build_utils_1.debug)('Installing additional dependencies:'); | ||
for (const name of names) { | ||
(0, build_utils_1.debug)(` - ${name}`); | ||
(0, import_build_utils.debug)("Installing additional dependencies:"); | ||
for (const name of names) { | ||
(0, import_build_utils.debug)(` - ${name}`); | ||
} | ||
const args = []; | ||
if (cliType === "npm" || cliType === "pnpm") { | ||
args.push("install"); | ||
if (opts.saveDev) { | ||
args.push("--save-dev"); | ||
} | ||
const args = []; | ||
if (cliType === 'npm' || cliType === 'pnpm') { | ||
args.push('install'); | ||
if (opts.saveDev) { | ||
args.push('--save-dev'); | ||
} | ||
} else { | ||
args.push("add"); | ||
if (opts.saveDev) { | ||
args.push("--dev"); | ||
} | ||
else { | ||
// 'yarn' | ||
args.push('add'); | ||
if (opts.saveDev) { | ||
args.push('--dev'); | ||
} | ||
const yarnVersion = (0, child_process_1.execSync)('yarn -v', { encoding: 'utf8' }).trim(); | ||
const isYarnV1 = semver_1.default.satisfies(yarnVersion, '1'); | ||
if (isYarnV1) { | ||
// Ignoring workspace check is only needed on Yarn v1 | ||
args.push('--ignore-workspace-root-check'); | ||
} | ||
const yarnVersion = (0, import_child_process.execSync)("yarn -v", { encoding: "utf8" }).trim(); | ||
const isYarnV1 = import_semver.default.satisfies(yarnVersion, "1"); | ||
if (isYarnV1) { | ||
args.push("--ignore-workspace-root-check"); | ||
} | ||
// Don't fail if pnpm is being run at the workspace root | ||
if (cliType === 'pnpm' && opts.cwd) { | ||
if ((0, fs_1.existsSync)((0, path_1.join)(opts.cwd, 'pnpm-workspace.yaml'))) { | ||
args.push('--workspace-root'); | ||
} | ||
} | ||
if (cliType === "pnpm" && opts.cwd) { | ||
if ((0, import_fs.existsSync)((0, import_path.join)(opts.cwd, "pnpm-workspace.yaml"))) { | ||
args.push("--workspace-root"); | ||
} | ||
return (0, build_utils_1.spawnAsync)(cliType, args.concat(names), opts); | ||
} | ||
return (0, import_build_utils.spawnAsync)(cliType, args.concat(names), opts); | ||
} | ||
exports.addDependencies = addDependencies; | ||
function resolveSemverMinMax(min, max, version) { | ||
const floored = semver_1.default.intersects(version, `>= ${min}`) ? version : min; | ||
return semver_1.default.intersects(floored, `<= ${max}`) ? floored : max; | ||
const floored = import_semver.default.intersects(version, `>= ${min}`) ? version : min; | ||
return import_semver.default.intersects(floored, `<= ${max}`) ? floored : max; | ||
} | ||
exports.resolveSemverMinMax = resolveSemverMinMax; | ||
async function ensureResolvable(start, base, pkgName) { | ||
try { | ||
const resolvedPkgPath = exports._require.resolve(`${pkgName}/package.json`, { | ||
paths: [start], | ||
}); | ||
const resolvedPath = (0, path_1.dirname)(resolvedPkgPath); | ||
if (!(0, path_1.relative)(base, resolvedPath).startsWith(`..${path_1.sep}`)) { | ||
// Resolved path is within the root of the project, so all good | ||
(0, build_utils_1.debug)(`"${pkgName}" resolved to '${resolvedPath}'`); | ||
return resolvedPath; | ||
} | ||
try { | ||
const resolvedPkgPath = _require.resolve(`${pkgName}/package.json`, { | ||
paths: [start] | ||
}); | ||
const resolvedPath = (0, import_path.dirname)(resolvedPkgPath); | ||
if (!(0, import_path.relative)(base, resolvedPath).startsWith(`..${import_path.sep}`)) { | ||
(0, import_build_utils.debug)(`"${pkgName}" resolved to '${resolvedPath}'`); | ||
return resolvedPath; | ||
} | ||
catch (err) { | ||
if (err.code !== 'MODULE_NOT_FOUND') { | ||
throw err; | ||
} | ||
} catch (err) { | ||
if (err.code !== "MODULE_NOT_FOUND") { | ||
throw err; | ||
} | ||
// If we got to here then `pkgName` was not resolvable up to the root | ||
// of the project. Try a couple symlink tricks, otherwise we'll bail. | ||
// Attempt to find the package in `node_modules/.pnpm` (pnpm) | ||
const pnpmDir = await (0, build_utils_2.walkParentDirs)({ | ||
base, | ||
start, | ||
filename: 'node_modules/.pnpm', | ||
}); | ||
if (pnpmDir) { | ||
const prefix = `${pkgName.replace('/', '+')}@`; | ||
const packages = await fs_1.promises.readdir(pnpmDir); | ||
const match = packages.find(p => p.startsWith(prefix)); | ||
if (match) { | ||
const pkgDir = (0, path_1.join)(pnpmDir, match, 'node_modules', pkgName); | ||
await ensureSymlink(pkgDir, (0, path_1.join)(start, 'node_modules'), pkgName); | ||
return pkgDir; | ||
} | ||
} | ||
const pnpmDir = await (0, import_build_utils2.walkParentDirs)({ | ||
base, | ||
start, | ||
filename: "node_modules/.pnpm" | ||
}); | ||
if (pnpmDir) { | ||
const prefix = `${pkgName.replace("/", "+")}@`; | ||
const packages = await import_fs.promises.readdir(pnpmDir); | ||
const match = packages.find((p) => p.startsWith(prefix)); | ||
if (match) { | ||
const pkgDir = (0, import_path.join)(pnpmDir, match, "node_modules", pkgName); | ||
await ensureSymlink(pkgDir, (0, import_path.join)(start, "node_modules"), pkgName); | ||
return pkgDir; | ||
} | ||
// Attempt to find the package in `node_modules/.store` (npm 9+ linked mode) | ||
const npmDir = await (0, build_utils_2.walkParentDirs)({ | ||
base, | ||
start, | ||
filename: 'node_modules/.store', | ||
}); | ||
if (npmDir) { | ||
const prefix = `${(0, path_1.basename)(pkgName)}@`; | ||
const prefixDir = (0, path_1.join)(npmDir, (0, path_1.dirname)(pkgName)); | ||
const packages = await fs_1.promises.readdir(prefixDir); | ||
const match = packages.find(p => p.startsWith(prefix)); | ||
if (match) { | ||
const pkgDir = (0, path_1.join)(prefixDir, match, 'node_modules', pkgName); | ||
await ensureSymlink(pkgDir, (0, path_1.join)(start, 'node_modules'), pkgName); | ||
return pkgDir; | ||
} | ||
} | ||
const npmDir = await (0, import_build_utils2.walkParentDirs)({ | ||
base, | ||
start, | ||
filename: "node_modules/.store" | ||
}); | ||
if (npmDir) { | ||
const prefix = `${(0, import_path.basename)(pkgName)}@`; | ||
const prefixDir = (0, import_path.join)(npmDir, (0, import_path.dirname)(pkgName)); | ||
const packages = await import_fs.promises.readdir(prefixDir); | ||
const match = packages.find((p) => p.startsWith(prefix)); | ||
if (match) { | ||
const pkgDir = (0, import_path.join)(prefixDir, match, "node_modules", pkgName); | ||
await ensureSymlink(pkgDir, (0, import_path.join)(start, "node_modules"), pkgName); | ||
return pkgDir; | ||
} | ||
throw new Error(`Failed to resolve "${pkgName}". To fix this error, add "${pkgName}" to "dependencies" in your \`package.json\` file.`); | ||
} | ||
throw new Error( | ||
`Failed to resolve "${pkgName}". To fix this error, add "${pkgName}" to "dependencies" in your \`package.json\` file.` | ||
); | ||
} | ||
exports.ensureResolvable = ensureResolvable; | ||
async function ensureSymlink(target, nodeModulesDir, pkgName) { | ||
const symlinkPath = (0, path_1.join)(nodeModulesDir, pkgName); | ||
const symlinkDir = (0, path_1.dirname)(symlinkPath); | ||
const relativeTarget = (0, path_1.relative)(symlinkDir, target); | ||
try { | ||
const existingTarget = await fs_1.promises.readlink(symlinkPath); | ||
if (existingTarget === relativeTarget) { | ||
// Symlink is already the expected value, so do nothing | ||
return; | ||
} | ||
else { | ||
// If a symlink already exists then delete it if the target doesn't match | ||
await fs_1.promises.unlink(symlinkPath); | ||
} | ||
const symlinkPath = (0, import_path.join)(nodeModulesDir, pkgName); | ||
const symlinkDir = (0, import_path.dirname)(symlinkPath); | ||
const relativeTarget = (0, import_path.relative)(symlinkDir, target); | ||
try { | ||
const existingTarget = await import_fs.promises.readlink(symlinkPath); | ||
if (existingTarget === relativeTarget) { | ||
return; | ||
} else { | ||
await import_fs.promises.unlink(symlinkPath); | ||
} | ||
catch (err) { | ||
// Ignore when path does not exist or is not a symlink | ||
if (err.code !== 'ENOENT' && err.code !== 'EINVAL') { | ||
throw err; | ||
} | ||
} catch (err) { | ||
if (err.code !== "ENOENT" && err.code !== "EINVAL") { | ||
throw err; | ||
} | ||
await fs_1.promises.symlink(relativeTarget, symlinkPath); | ||
(0, build_utils_1.debug)(`Created symlink for "${pkgName}"`); | ||
} | ||
await import_fs.promises.symlink(relativeTarget, symlinkPath); | ||
(0, import_build_utils.debug)(`Created symlink for "${pkgName}"`); | ||
} | ||
function isESM(path) { | ||
// Figure out if the `remix.config` file is using ESM syntax | ||
let isESM = false; | ||
try { | ||
(0, exports._require)(path); | ||
} | ||
catch (err) { | ||
isESM = err.code === 'ERR_REQUIRE_ESM'; | ||
} | ||
return isESM; | ||
let isESM2 = false; | ||
try { | ||
_require(path); | ||
} catch (err) { | ||
isESM2 = err.code === "ERR_REQUIRE_ESM"; | ||
} | ||
return isESM2; | ||
} | ||
exports.isESM = isESM; | ||
//# sourceMappingURL=utils.js.map | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
_require, | ||
addDependencies, | ||
calculateRouteConfigHash, | ||
chdirAndReadConfig, | ||
ensureResolvable, | ||
findConfig, | ||
findEntry, | ||
getPathFromRoute, | ||
getRegExpFromPath, | ||
getResolvedRouteConfig, | ||
getRouteIterator, | ||
isESM, | ||
isLayoutRoute, | ||
resolveSemverMinMax, | ||
syncEnv | ||
}); | ||
//# sourceMappingURL=utils.js.map |
{ | ||
"name": "@vercel/remix-builder", | ||
"version": "2.0.2", | ||
"version": "2.0.3", | ||
"license": "Apache-2.0", | ||
@@ -17,7 +17,7 @@ "main": "./dist/index.js", | ||
"dependencies": { | ||
"@vercel/build-utils": "7.1.0", | ||
"@vercel/build-utils": "7.1.1", | ||
"@vercel/nft": "0.22.5", | ||
"@vercel/static-config": "3.0.0", | ||
"path-to-regexp": "6.2.1", | ||
"semver": "7.3.8", | ||
"semver": "7.5.2", | ||
"ts-morph": "12.0.0" | ||
@@ -33,3 +33,3 @@ }, | ||
"scripts": { | ||
"build": "node build.js", | ||
"build": "node ../../utils/build.mjs", | ||
"test": "jest --reporters=default --reporters=jest-junit --env node --verbose --bail --runInBand", | ||
@@ -36,0 +36,0 @@ "test-unit": "pnpm test test/unit.*test.*", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
120678
1316
3
3
+ Added@vercel/build-utils@7.1.1(transitive)
+ Addedsemver@7.5.2(transitive)
- Removed@vercel/build-utils@7.1.0(transitive)
- Removedsemver@7.3.8(transitive)
Updated@vercel/build-utils@7.1.1
Updatedsemver@7.5.2