@vercel/redwood
Advanced tools
Comparing version 2.0.1 to 2.0.2
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.prepareCache = exports.build = exports.version = void 0; | ||
const path_1 = require("path"); | ||
const fs_1 = require("fs"); | ||
const semver_1 = require("semver"); | ||
const build_utils_1 = require("@vercel/build-utils"); | ||
const nft_1 = require("@vercel/nft"); | ||
const routing_utils_1 = require("@vercel/routing-utils"); | ||
// Do not change this version for RW specific config, | ||
// it refers to Vercels builder version | ||
exports.version = 2; | ||
const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) => { | ||
await (0, build_utils_1.download)(files, workPath, meta); | ||
const prefixedEnvs = (0, build_utils_1.getPrefixedEnvVars)({ | ||
envPrefix: 'REDWOOD_ENV_', | ||
envs: process.env, | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
// src/index.ts | ||
var src_exports = {}; | ||
__export(src_exports, { | ||
build: () => build, | ||
prepareCache: () => prepareCache, | ||
version: () => version | ||
}); | ||
module.exports = __toCommonJS(src_exports); | ||
var import_path = require("path"); | ||
var import_fs = require("fs"); | ||
var import_semver = require("semver"); | ||
var import_build_utils = require("@vercel/build-utils"); | ||
var import_nft = require("@vercel/nft"); | ||
var import_routing_utils = require("@vercel/routing-utils"); | ||
var version = 2; | ||
var build = async ({ | ||
workPath, | ||
files, | ||
entrypoint, | ||
meta = {}, | ||
config = {} | ||
}) => { | ||
await (0, import_build_utils.download)(files, workPath, meta); | ||
const prefixedEnvs = (0, import_build_utils.getPrefixedEnvVars)({ | ||
envPrefix: "REDWOOD_ENV_", | ||
envs: process.env | ||
}); | ||
for (const [key, value] of Object.entries(prefixedEnvs)) { | ||
process.env[key] = value; | ||
} | ||
const { installCommand, buildCommand } = config; | ||
const mountpoint = (0, import_path.dirname)(entrypoint); | ||
const entrypointFsDirname = (0, import_path.join)(workPath, mountpoint); | ||
const nodeVersion = await (0, import_build_utils.getNodeVersion)( | ||
entrypointFsDirname, | ||
void 0, | ||
config, | ||
meta | ||
); | ||
const spawnOpts = (0, import_build_utils.getSpawnOptions)(meta, nodeVersion); | ||
if (!spawnOpts.env) { | ||
spawnOpts.env = {}; | ||
} | ||
const { cliType, lockfileVersion } = await (0, import_build_utils.scanParentDirs)( | ||
entrypointFsDirname | ||
); | ||
spawnOpts.env = (0, import_build_utils.getEnvForPackageManager)({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env: spawnOpts.env || {} | ||
}); | ||
if (typeof installCommand === "string") { | ||
if (installCommand.trim()) { | ||
console.log(`Running "install" command: \`${installCommand}\`...`); | ||
await (0, import_build_utils.execCommand)(installCommand, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname | ||
}); | ||
} else { | ||
console.log(`Skipping "install" command...`); | ||
} | ||
} else { | ||
await (0, import_build_utils.runNpmInstall)(entrypointFsDirname, [], spawnOpts, meta, nodeVersion); | ||
} | ||
if (meta.isDev) { | ||
throw new Error("Detected `@vercel/redwood` dev but this is not supported"); | ||
} | ||
const pkg = await (0, import_build_utils.readConfigFile)((0, import_path.join)(workPath, "package.json")); | ||
const toml = await (0, import_build_utils.readConfigFile)( | ||
(0, import_path.join)(workPath, "redwood.toml") | ||
); | ||
if (buildCommand) { | ||
(0, import_build_utils.debug)(`Executing build command "${buildCommand}"`); | ||
await (0, import_build_utils.execCommand)(buildCommand, { | ||
...spawnOpts, | ||
cwd: workPath | ||
}); | ||
for (const [key, value] of Object.entries(prefixedEnvs)) { | ||
process.env[key] = value; | ||
} else if (hasScript("vercel-build", pkg)) { | ||
(0, import_build_utils.debug)(`Executing "yarn vercel-build"`); | ||
await (0, import_build_utils.runPackageJsonScript)(workPath, "vercel-build", spawnOpts); | ||
} else if (hasScript("build", pkg)) { | ||
(0, import_build_utils.debug)(`Executing "yarn build"`); | ||
await (0, import_build_utils.runPackageJsonScript)(workPath, "build", spawnOpts); | ||
} else { | ||
const { devDependencies = {} } = pkg || {}; | ||
const versionRange = devDependencies["@redwoodjs/core"]; | ||
let cmd; | ||
if (!versionRange || !(0, import_semver.validRange)(versionRange)) { | ||
console.log( | ||
"WARNING: Unable to detect RedwoodJS version in package.json devDependencies" | ||
); | ||
cmd = "yarn rw deploy vercel"; | ||
} else if ((0, import_semver.intersects)(versionRange, "<0.25.0")) { | ||
cmd = "yarn rw build && yarn rw db up --no-db-client --auto-approve && yarn rw dataMigrate up"; | ||
} else { | ||
cmd = "yarn rw deploy vercel"; | ||
} | ||
const { installCommand, buildCommand } = config; | ||
const mountpoint = (0, path_1.dirname)(entrypoint); | ||
const entrypointFsDirname = (0, path_1.join)(workPath, mountpoint); | ||
const nodeVersion = await (0, build_utils_1.getNodeVersion)(entrypointFsDirname, undefined, config, meta); | ||
const spawnOpts = (0, build_utils_1.getSpawnOptions)(meta, nodeVersion); | ||
if (!spawnOpts.env) { | ||
spawnOpts.env = {}; | ||
} | ||
const { cliType, lockfileVersion } = await (0, build_utils_1.scanParentDirs)(entrypointFsDirname); | ||
spawnOpts.env = (0, build_utils_1.getEnvForPackageManager)({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env: spawnOpts.env || {}, | ||
await (0, import_build_utils.execCommand)(cmd, { | ||
...spawnOpts, | ||
cwd: workPath | ||
}); | ||
if (typeof installCommand === 'string') { | ||
if (installCommand.trim()) { | ||
console.log(`Running "install" command: \`${installCommand}\`...`); | ||
await (0, build_utils_1.execCommand)(installCommand, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname, | ||
}); | ||
} | ||
else { | ||
console.log(`Skipping "install" command...`); | ||
} | ||
} | ||
const apiDir = toml?.web?.apiProxyPath?.replace(/^\//, "") ?? "api"; | ||
const apiDistPath = (0, import_path.join)(workPath, "api", "dist", "functions"); | ||
const webDistPath = (0, import_path.join)(workPath, "web", "dist"); | ||
const lambdaOutputs = {}; | ||
const webDistFiles = await (0, import_build_utils.glob)("**", webDistPath); | ||
const staticOutputs = {}; | ||
for (const [fileName, fileFsRef] of Object.entries(webDistFiles)) { | ||
const parsedPath = (0, import_path.parse)(fileFsRef.fsPath); | ||
if (parsedPath.ext !== ".html") { | ||
staticOutputs[fileName] = fileFsRef; | ||
} else { | ||
const fileNameWithoutExtension = (0, import_path.basename)(fileName, ".html"); | ||
const pathWithoutHtmlExtension = (0, import_path.join)( | ||
parsedPath.dir, | ||
fileNameWithoutExtension | ||
); | ||
fileFsRef.contentType = "text/html; charset=utf-8"; | ||
staticOutputs[(0, import_path.relative)(webDistPath, pathWithoutHtmlExtension)] = fileFsRef; | ||
} | ||
else { | ||
await (0, build_utils_1.runNpmInstall)(entrypointFsDirname, [], spawnOpts, meta, nodeVersion); | ||
} | ||
if (meta.isDev) { | ||
throw new Error('Detected `@vercel/redwood` dev but this is not supported'); | ||
} | ||
const pkg = await (0, build_utils_1.readConfigFile)((0, path_1.join)(workPath, 'package.json')); | ||
const toml = await (0, build_utils_1.readConfigFile)((0, path_1.join)(workPath, 'redwood.toml')); | ||
if (buildCommand) { | ||
(0, build_utils_1.debug)(`Executing build command "${buildCommand}"`); | ||
await (0, build_utils_1.execCommand)(buildCommand, { | ||
...spawnOpts, | ||
cwd: workPath, | ||
}); | ||
} | ||
else if (hasScript('vercel-build', pkg)) { | ||
(0, build_utils_1.debug)(`Executing "yarn vercel-build"`); | ||
await (0, build_utils_1.runPackageJsonScript)(workPath, 'vercel-build', spawnOpts); | ||
} | ||
else if (hasScript('build', pkg)) { | ||
(0, build_utils_1.debug)(`Executing "yarn build"`); | ||
await (0, build_utils_1.runPackageJsonScript)(workPath, 'build', spawnOpts); | ||
} | ||
else { | ||
const { devDependencies = {} } = pkg || {}; | ||
const versionRange = devDependencies['@redwoodjs/core']; | ||
let cmd; | ||
if (!versionRange || !(0, semver_1.validRange)(versionRange)) { | ||
console.log('WARNING: Unable to detect RedwoodJS version in package.json devDependencies'); | ||
cmd = 'yarn rw deploy vercel'; // Assume 0.25.0 and newer | ||
} | ||
const functionFiles = { | ||
...await (0, import_build_utils.glob)("*.js", apiDistPath), | ||
// top-level | ||
...await (0, import_build_utils.glob)("*/*.js", apiDistPath) | ||
// one-level deep | ||
}; | ||
const sourceCache = /* @__PURE__ */ new Map(); | ||
const fsCache = /* @__PURE__ */ new Map(); | ||
for (const [funcName, fileFsRef] of Object.entries(functionFiles)) { | ||
const outputName = (0, import_path.join)(apiDir, (0, import_path.parse)(funcName).name); | ||
const absEntrypoint = fileFsRef.fsPath; | ||
const relativeEntrypoint = (0, import_path.relative)(workPath, absEntrypoint); | ||
const awsLambdaHandler = getAWSLambdaHandler(relativeEntrypoint, "handler"); | ||
const sourceFile = relativeEntrypoint.replace("/dist/", "/src/"); | ||
const { fileList, esmFileList, warnings } = await (0, import_nft.nodeFileTrace)( | ||
[absEntrypoint], | ||
{ | ||
base: workPath, | ||
processCwd: workPath, | ||
ts: true, | ||
mixedModules: true, | ||
ignore: config.excludeFiles, | ||
async readFile(fsPath) { | ||
const relPath = (0, import_path.relative)(workPath, fsPath); | ||
const cached = sourceCache.get(relPath); | ||
if (cached) | ||
return cached.toString(); | ||
if (cached === null) | ||
return null; | ||
try { | ||
const source = (0, import_fs.readFileSync)(fsPath); | ||
const { mode } = (0, import_fs.lstatSync)(fsPath); | ||
let entry; | ||
if ((0, import_build_utils.isSymbolicLink)(mode)) { | ||
entry = new import_build_utils.FileFsRef({ fsPath, mode }); | ||
} else { | ||
entry = new import_build_utils.FileBlob({ data: source, mode }); | ||
} | ||
fsCache.set(relPath, entry); | ||
sourceCache.set(relPath, source); | ||
return source.toString(); | ||
} catch (e) { | ||
if (e.code === "ENOENT" || e.code === "EISDIR") { | ||
sourceCache.set(relPath, null); | ||
return null; | ||
} | ||
throw e; | ||
} | ||
} | ||
else if ((0, semver_1.intersects)(versionRange, '<0.25.0')) { | ||
// older than 0.25.0 | ||
cmd = | ||
'yarn rw build && yarn rw db up --no-db-client --auto-approve && yarn rw dataMigrate up'; | ||
} | ||
else { | ||
// 0.25.0 and newer | ||
cmd = 'yarn rw deploy vercel'; | ||
} | ||
await (0, build_utils_1.execCommand)(cmd, { | ||
...spawnOpts, | ||
cwd: workPath, | ||
}); | ||
} | ||
); | ||
for (const warning of warnings) { | ||
(0, import_build_utils.debug)(`Warning from trace: ${warning.message}`); | ||
} | ||
const apiDir = toml?.web?.apiProxyPath?.replace(/^\//, '') ?? 'api'; | ||
const apiDistPath = (0, path_1.join)(workPath, 'api', 'dist', 'functions'); | ||
const webDistPath = (0, path_1.join)(workPath, 'web', 'dist'); | ||
const lambdaOutputs = {}; | ||
// Strip out the .html extensions | ||
// And populate staticOutputs map with updated paths and contentType | ||
const webDistFiles = await (0, build_utils_1.glob)('**', webDistPath); | ||
const staticOutputs = {}; | ||
for (const [fileName, fileFsRef] of Object.entries(webDistFiles)) { | ||
const parsedPath = (0, path_1.parse)(fileFsRef.fsPath); | ||
if (parsedPath.ext !== '.html') { | ||
// No need to transform non-html files | ||
staticOutputs[fileName] = fileFsRef; | ||
} | ||
else { | ||
const fileNameWithoutExtension = (0, path_1.basename)(fileName, '.html'); | ||
const pathWithoutHtmlExtension = (0, path_1.join)(parsedPath.dir, fileNameWithoutExtension); | ||
fileFsRef.contentType = 'text/html; charset=utf-8'; | ||
// @NOTE: Filename is relative to webDistPath | ||
// e.g. {'./200': fsRef} | ||
staticOutputs[(0, path_1.relative)(webDistPath, pathWithoutHtmlExtension)] = | ||
fileFsRef; | ||
} | ||
const lambdaFiles = {}; | ||
const allFiles = [...fileList, ...esmFileList]; | ||
for (const filePath of allFiles) { | ||
lambdaFiles[filePath] = await import_build_utils.FileFsRef.fromFsPath({ | ||
fsPath: (0, import_path.join)(workPath, filePath) | ||
}); | ||
} | ||
// Each file in the `functions` dir will become a lambda | ||
// Also supports nested functions like: | ||
// ├── functions | ||
// │ ├── bazinga | ||
// │ │ ├── bazinga.js | ||
// │ ├── graphql.js | ||
const functionFiles = { | ||
...(await (0, build_utils_1.glob)('*.js', apiDistPath)), | ||
...(await (0, build_utils_1.glob)('*/*.js', apiDistPath)), // one-level deep | ||
}; | ||
const sourceCache = new Map(); | ||
const fsCache = new Map(); | ||
for (const [funcName, fileFsRef] of Object.entries(functionFiles)) { | ||
const outputName = (0, path_1.join)(apiDir, (0, path_1.parse)(funcName).name); // remove `.js` extension | ||
const absEntrypoint = fileFsRef.fsPath; | ||
const relativeEntrypoint = (0, path_1.relative)(workPath, absEntrypoint); | ||
const awsLambdaHandler = getAWSLambdaHandler(relativeEntrypoint, 'handler'); | ||
const sourceFile = relativeEntrypoint.replace('/dist/', '/src/'); | ||
const { fileList, esmFileList, warnings } = await (0, nft_1.nodeFileTrace)([absEntrypoint], { | ||
base: workPath, | ||
processCwd: workPath, | ||
ts: true, | ||
mixedModules: true, | ||
ignore: config.excludeFiles, | ||
async readFile(fsPath) { | ||
const relPath = (0, path_1.relative)(workPath, fsPath); | ||
const cached = sourceCache.get(relPath); | ||
if (cached) | ||
return cached.toString(); | ||
// null represents a not found | ||
if (cached === null) | ||
return null; | ||
try { | ||
const source = (0, fs_1.readFileSync)(fsPath); | ||
const { mode } = (0, fs_1.lstatSync)(fsPath); | ||
let entry; | ||
if ((0, build_utils_1.isSymbolicLink)(mode)) { | ||
entry = new build_utils_1.FileFsRef({ fsPath, mode }); | ||
} | ||
else { | ||
entry = new build_utils_1.FileBlob({ data: source, mode }); | ||
} | ||
fsCache.set(relPath, entry); | ||
sourceCache.set(relPath, source); | ||
return source.toString(); | ||
} | ||
catch (e) { | ||
if (e.code === 'ENOENT' || e.code === 'EISDIR') { | ||
sourceCache.set(relPath, null); | ||
return null; | ||
} | ||
throw e; | ||
} | ||
}, | ||
}); | ||
for (const warning of warnings) { | ||
(0, build_utils_1.debug)(`Warning from trace: ${warning.message}`); | ||
} | ||
const lambdaFiles = {}; | ||
const allFiles = [...fileList, ...esmFileList]; | ||
for (const filePath of allFiles) { | ||
lambdaFiles[filePath] = await build_utils_1.FileFsRef.fromFsPath({ | ||
fsPath: (0, path_1.join)(workPath, filePath), | ||
}); | ||
} | ||
lambdaFiles[(0, path_1.relative)(workPath, fileFsRef.fsPath)] = fileFsRef; | ||
const lambdaOptions = await (0, build_utils_1.getLambdaOptionsFromFunction)({ | ||
sourceFile, | ||
config, | ||
}); | ||
const lambda = new build_utils_1.NodejsLambda({ | ||
files: lambdaFiles, | ||
handler: relativeEntrypoint, | ||
runtime: nodeVersion.runtime, | ||
shouldAddHelpers: false, | ||
shouldAddSourcemapSupport: false, | ||
awsLambdaHandler, | ||
...lambdaOptions, | ||
}); | ||
lambdaOutputs[outputName] = lambda; | ||
} | ||
// Older versions of redwood did not create 200.html automatically | ||
// From v0.50.0+ 200.html is always generated as part of web build | ||
// Note that in builder post-processing, we remove the .html extension | ||
const fallbackHtmlPage = (0, fs_1.existsSync)((0, path_1.join)(webDistPath, '200.html')) | ||
? '/200' | ||
: '/index'; | ||
const defaultRoutesConfig = (0, routing_utils_1.getTransformedRoutes)({ | ||
// this makes sure we send back 200.html for unprerendered pages | ||
rewrites: [{ source: '/(.*)', destination: fallbackHtmlPage }], | ||
cleanUrls: true, | ||
trailingSlash: false, | ||
lambdaFiles[(0, import_path.relative)(workPath, fileFsRef.fsPath)] = fileFsRef; | ||
const lambdaOptions = await (0, import_build_utils.getLambdaOptionsFromFunction)({ | ||
sourceFile, | ||
config | ||
}); | ||
if (defaultRoutesConfig.error) { | ||
throw new Error(defaultRoutesConfig.error.message); | ||
} | ||
return { | ||
output: { ...staticOutputs, ...lambdaOutputs }, | ||
routes: defaultRoutesConfig.routes, | ||
}; | ||
const lambda = new import_build_utils.NodejsLambda({ | ||
files: lambdaFiles, | ||
handler: relativeEntrypoint, | ||
runtime: nodeVersion.runtime, | ||
shouldAddHelpers: false, | ||
shouldAddSourcemapSupport: false, | ||
awsLambdaHandler, | ||
...lambdaOptions | ||
}); | ||
lambdaOutputs[outputName] = lambda; | ||
} | ||
const fallbackHtmlPage = (0, import_fs.existsSync)((0, import_path.join)(webDistPath, "200.html")) ? "/200" : "/index"; | ||
const defaultRoutesConfig = (0, import_routing_utils.getTransformedRoutes)({ | ||
// this makes sure we send back 200.html for unprerendered pages | ||
rewrites: [{ source: "/(.*)", destination: fallbackHtmlPage }], | ||
cleanUrls: true, | ||
trailingSlash: false | ||
}); | ||
if (defaultRoutesConfig.error) { | ||
throw new Error(defaultRoutesConfig.error.message); | ||
} | ||
return { | ||
output: { ...staticOutputs, ...lambdaOutputs }, | ||
routes: defaultRoutesConfig.routes | ||
}; | ||
}; | ||
exports.build = build; | ||
function getAWSLambdaHandler(filePath, handlerName) { | ||
const { dir, name } = (0, path_1.parse)(filePath); | ||
return `${dir}${dir ? path_1.sep : ''}${name}.${handlerName}`; | ||
const { dir, name } = (0, import_path.parse)(filePath); | ||
return `${dir}${dir ? import_path.sep : ""}${name}.${handlerName}`; | ||
} | ||
function hasScript(scriptName, pkg) { | ||
const scripts = (pkg && pkg.scripts) || {}; | ||
return typeof scripts[scriptName] === 'string'; | ||
const scripts = pkg && pkg.scripts || {}; | ||
return typeof scripts[scriptName] === "string"; | ||
} | ||
const prepareCache = ({ repoRootPath, workPath }) => { | ||
return (0, build_utils_1.glob)('**/node_modules/**', repoRootPath || workPath); | ||
var prepareCache = ({ repoRootPath, workPath }) => { | ||
return (0, import_build_utils.glob)("**/node_modules/**", repoRootPath || workPath); | ||
}; | ||
exports.prepareCache = prepareCache; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
build, | ||
prepareCache, | ||
version | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.prepareCache = exports.build = exports.version = void 0; | ||
const path_1 = require("path"); | ||
const fs_1 = require("fs"); | ||
const semver_1 = require("semver"); | ||
const build_utils_1 = require("@vercel/build-utils"); | ||
const nft_1 = require("@vercel/nft"); | ||
const routing_utils_1 = require("@vercel/routing-utils"); | ||
// Do not change this version for RW specific config, | ||
// it refers to Vercels builder version | ||
exports.version = 2; | ||
const build = async ({ workPath, files, entrypoint, meta = {}, config = {}, }) => { | ||
await (0, build_utils_1.download)(files, workPath, meta); | ||
const prefixedEnvs = (0, build_utils_1.getPrefixedEnvVars)({ | ||
envPrefix: 'REDWOOD_ENV_', | ||
envs: process.env, | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
// src/index.ts | ||
var src_exports = {}; | ||
__export(src_exports, { | ||
build: () => build, | ||
prepareCache: () => prepareCache, | ||
version: () => version | ||
}); | ||
module.exports = __toCommonJS(src_exports); | ||
var import_path = require("path"); | ||
var import_fs = require("fs"); | ||
var import_semver = require("semver"); | ||
var import_build_utils = require("@vercel/build-utils"); | ||
var import_nft = require("@vercel/nft"); | ||
var import_routing_utils = require("@vercel/routing-utils"); | ||
var version = 2; | ||
var build = async ({ | ||
workPath, | ||
files, | ||
entrypoint, | ||
meta = {}, | ||
config = {} | ||
}) => { | ||
await (0, import_build_utils.download)(files, workPath, meta); | ||
const prefixedEnvs = (0, import_build_utils.getPrefixedEnvVars)({ | ||
envPrefix: "REDWOOD_ENV_", | ||
envs: process.env | ||
}); | ||
for (const [key, value] of Object.entries(prefixedEnvs)) { | ||
process.env[key] = value; | ||
} | ||
const { installCommand, buildCommand } = config; | ||
const mountpoint = (0, import_path.dirname)(entrypoint); | ||
const entrypointFsDirname = (0, import_path.join)(workPath, mountpoint); | ||
const nodeVersion = await (0, import_build_utils.getNodeVersion)( | ||
entrypointFsDirname, | ||
void 0, | ||
config, | ||
meta | ||
); | ||
const spawnOpts = (0, import_build_utils.getSpawnOptions)(meta, nodeVersion); | ||
if (!spawnOpts.env) { | ||
spawnOpts.env = {}; | ||
} | ||
const { cliType, lockfileVersion } = await (0, import_build_utils.scanParentDirs)( | ||
entrypointFsDirname | ||
); | ||
spawnOpts.env = (0, import_build_utils.getEnvForPackageManager)({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env: spawnOpts.env || {} | ||
}); | ||
if (typeof installCommand === "string") { | ||
if (installCommand.trim()) { | ||
console.log(`Running "install" command: \`${installCommand}\`...`); | ||
await (0, import_build_utils.execCommand)(installCommand, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname | ||
}); | ||
} else { | ||
console.log(`Skipping "install" command...`); | ||
} | ||
} else { | ||
await (0, import_build_utils.runNpmInstall)(entrypointFsDirname, [], spawnOpts, meta, nodeVersion); | ||
} | ||
if (meta.isDev) { | ||
throw new Error("Detected `@vercel/redwood` dev but this is not supported"); | ||
} | ||
const pkg = await (0, import_build_utils.readConfigFile)((0, import_path.join)(workPath, "package.json")); | ||
const toml = await (0, import_build_utils.readConfigFile)( | ||
(0, import_path.join)(workPath, "redwood.toml") | ||
); | ||
if (buildCommand) { | ||
(0, import_build_utils.debug)(`Executing build command "${buildCommand}"`); | ||
await (0, import_build_utils.execCommand)(buildCommand, { | ||
...spawnOpts, | ||
cwd: workPath | ||
}); | ||
for (const [key, value] of Object.entries(prefixedEnvs)) { | ||
process.env[key] = value; | ||
} else if (hasScript("vercel-build", pkg)) { | ||
(0, import_build_utils.debug)(`Executing "yarn vercel-build"`); | ||
await (0, import_build_utils.runPackageJsonScript)(workPath, "vercel-build", spawnOpts); | ||
} else if (hasScript("build", pkg)) { | ||
(0, import_build_utils.debug)(`Executing "yarn build"`); | ||
await (0, import_build_utils.runPackageJsonScript)(workPath, "build", spawnOpts); | ||
} else { | ||
const { devDependencies = {} } = pkg || {}; | ||
const versionRange = devDependencies["@redwoodjs/core"]; | ||
let cmd; | ||
if (!versionRange || !(0, import_semver.validRange)(versionRange)) { | ||
console.log( | ||
"WARNING: Unable to detect RedwoodJS version in package.json devDependencies" | ||
); | ||
cmd = "yarn rw deploy vercel"; | ||
} else if ((0, import_semver.intersects)(versionRange, "<0.25.0")) { | ||
cmd = "yarn rw build && yarn rw db up --no-db-client --auto-approve && yarn rw dataMigrate up"; | ||
} else { | ||
cmd = "yarn rw deploy vercel"; | ||
} | ||
const { installCommand, buildCommand } = config; | ||
const mountpoint = (0, path_1.dirname)(entrypoint); | ||
const entrypointFsDirname = (0, path_1.join)(workPath, mountpoint); | ||
const nodeVersion = await (0, build_utils_1.getNodeVersion)(entrypointFsDirname, undefined, config, meta); | ||
const spawnOpts = (0, build_utils_1.getSpawnOptions)(meta, nodeVersion); | ||
if (!spawnOpts.env) { | ||
spawnOpts.env = {}; | ||
} | ||
const { cliType, lockfileVersion } = await (0, build_utils_1.scanParentDirs)(entrypointFsDirname); | ||
spawnOpts.env = (0, build_utils_1.getEnvForPackageManager)({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env: spawnOpts.env || {}, | ||
await (0, import_build_utils.execCommand)(cmd, { | ||
...spawnOpts, | ||
cwd: workPath | ||
}); | ||
if (typeof installCommand === 'string') { | ||
if (installCommand.trim()) { | ||
console.log(`Running "install" command: \`${installCommand}\`...`); | ||
await (0, build_utils_1.execCommand)(installCommand, { | ||
...spawnOpts, | ||
cwd: entrypointFsDirname, | ||
}); | ||
} | ||
else { | ||
console.log(`Skipping "install" command...`); | ||
} | ||
} | ||
const apiDir = toml?.web?.apiProxyPath?.replace(/^\//, "") ?? "api"; | ||
const apiDistPath = (0, import_path.join)(workPath, "api", "dist", "functions"); | ||
const webDistPath = (0, import_path.join)(workPath, "web", "dist"); | ||
const lambdaOutputs = {}; | ||
const webDistFiles = await (0, import_build_utils.glob)("**", webDistPath); | ||
const staticOutputs = {}; | ||
for (const [fileName, fileFsRef] of Object.entries(webDistFiles)) { | ||
const parsedPath = (0, import_path.parse)(fileFsRef.fsPath); | ||
if (parsedPath.ext !== ".html") { | ||
staticOutputs[fileName] = fileFsRef; | ||
} else { | ||
const fileNameWithoutExtension = (0, import_path.basename)(fileName, ".html"); | ||
const pathWithoutHtmlExtension = (0, import_path.join)( | ||
parsedPath.dir, | ||
fileNameWithoutExtension | ||
); | ||
fileFsRef.contentType = "text/html; charset=utf-8"; | ||
staticOutputs[(0, import_path.relative)(webDistPath, pathWithoutHtmlExtension)] = fileFsRef; | ||
} | ||
else { | ||
await (0, build_utils_1.runNpmInstall)(entrypointFsDirname, [], spawnOpts, meta, nodeVersion); | ||
} | ||
if (meta.isDev) { | ||
throw new Error('Detected `@vercel/redwood` dev but this is not supported'); | ||
} | ||
const pkg = await (0, build_utils_1.readConfigFile)((0, path_1.join)(workPath, 'package.json')); | ||
const toml = await (0, build_utils_1.readConfigFile)((0, path_1.join)(workPath, 'redwood.toml')); | ||
if (buildCommand) { | ||
(0, build_utils_1.debug)(`Executing build command "${buildCommand}"`); | ||
await (0, build_utils_1.execCommand)(buildCommand, { | ||
...spawnOpts, | ||
cwd: workPath, | ||
}); | ||
} | ||
else if (hasScript('vercel-build', pkg)) { | ||
(0, build_utils_1.debug)(`Executing "yarn vercel-build"`); | ||
await (0, build_utils_1.runPackageJsonScript)(workPath, 'vercel-build', spawnOpts); | ||
} | ||
else if (hasScript('build', pkg)) { | ||
(0, build_utils_1.debug)(`Executing "yarn build"`); | ||
await (0, build_utils_1.runPackageJsonScript)(workPath, 'build', spawnOpts); | ||
} | ||
else { | ||
const { devDependencies = {} } = pkg || {}; | ||
const versionRange = devDependencies['@redwoodjs/core']; | ||
let cmd; | ||
if (!versionRange || !(0, semver_1.validRange)(versionRange)) { | ||
console.log('WARNING: Unable to detect RedwoodJS version in package.json devDependencies'); | ||
cmd = 'yarn rw deploy vercel'; // Assume 0.25.0 and newer | ||
} | ||
const functionFiles = { | ||
...await (0, import_build_utils.glob)("*.js", apiDistPath), | ||
// top-level | ||
...await (0, import_build_utils.glob)("*/*.js", apiDistPath) | ||
// one-level deep | ||
}; | ||
const sourceCache = /* @__PURE__ */ new Map(); | ||
const fsCache = /* @__PURE__ */ new Map(); | ||
for (const [funcName, fileFsRef] of Object.entries(functionFiles)) { | ||
const outputName = (0, import_path.join)(apiDir, (0, import_path.parse)(funcName).name); | ||
const absEntrypoint = fileFsRef.fsPath; | ||
const relativeEntrypoint = (0, import_path.relative)(workPath, absEntrypoint); | ||
const awsLambdaHandler = getAWSLambdaHandler(relativeEntrypoint, "handler"); | ||
const sourceFile = relativeEntrypoint.replace("/dist/", "/src/"); | ||
const { fileList, esmFileList, warnings } = await (0, import_nft.nodeFileTrace)( | ||
[absEntrypoint], | ||
{ | ||
base: workPath, | ||
processCwd: workPath, | ||
ts: true, | ||
mixedModules: true, | ||
ignore: config.excludeFiles, | ||
async readFile(fsPath) { | ||
const relPath = (0, import_path.relative)(workPath, fsPath); | ||
const cached = sourceCache.get(relPath); | ||
if (cached) | ||
return cached.toString(); | ||
if (cached === null) | ||
return null; | ||
try { | ||
const source = (0, import_fs.readFileSync)(fsPath); | ||
const { mode } = (0, import_fs.lstatSync)(fsPath); | ||
let entry; | ||
if ((0, import_build_utils.isSymbolicLink)(mode)) { | ||
entry = new import_build_utils.FileFsRef({ fsPath, mode }); | ||
} else { | ||
entry = new import_build_utils.FileBlob({ data: source, mode }); | ||
} | ||
fsCache.set(relPath, entry); | ||
sourceCache.set(relPath, source); | ||
return source.toString(); | ||
} catch (e) { | ||
if (e.code === "ENOENT" || e.code === "EISDIR") { | ||
sourceCache.set(relPath, null); | ||
return null; | ||
} | ||
throw e; | ||
} | ||
} | ||
else if ((0, semver_1.intersects)(versionRange, '<0.25.0')) { | ||
// older than 0.25.0 | ||
cmd = | ||
'yarn rw build && yarn rw db up --no-db-client --auto-approve && yarn rw dataMigrate up'; | ||
} | ||
else { | ||
// 0.25.0 and newer | ||
cmd = 'yarn rw deploy vercel'; | ||
} | ||
await (0, build_utils_1.execCommand)(cmd, { | ||
...spawnOpts, | ||
cwd: workPath, | ||
}); | ||
} | ||
); | ||
for (const warning of warnings) { | ||
(0, import_build_utils.debug)(`Warning from trace: ${warning.message}`); | ||
} | ||
const apiDir = toml?.web?.apiProxyPath?.replace(/^\//, '') ?? 'api'; | ||
const apiDistPath = (0, path_1.join)(workPath, 'api', 'dist', 'functions'); | ||
const webDistPath = (0, path_1.join)(workPath, 'web', 'dist'); | ||
const lambdaOutputs = {}; | ||
// Strip out the .html extensions | ||
// And populate staticOutputs map with updated paths and contentType | ||
const webDistFiles = await (0, build_utils_1.glob)('**', webDistPath); | ||
const staticOutputs = {}; | ||
for (const [fileName, fileFsRef] of Object.entries(webDistFiles)) { | ||
const parsedPath = (0, path_1.parse)(fileFsRef.fsPath); | ||
if (parsedPath.ext !== '.html') { | ||
// No need to transform non-html files | ||
staticOutputs[fileName] = fileFsRef; | ||
} | ||
else { | ||
const fileNameWithoutExtension = (0, path_1.basename)(fileName, '.html'); | ||
const pathWithoutHtmlExtension = (0, path_1.join)(parsedPath.dir, fileNameWithoutExtension); | ||
fileFsRef.contentType = 'text/html; charset=utf-8'; | ||
// @NOTE: Filename is relative to webDistPath | ||
// e.g. {'./200': fsRef} | ||
staticOutputs[(0, path_1.relative)(webDistPath, pathWithoutHtmlExtension)] = | ||
fileFsRef; | ||
} | ||
const lambdaFiles = {}; | ||
const allFiles = [...fileList, ...esmFileList]; | ||
for (const filePath of allFiles) { | ||
lambdaFiles[filePath] = await import_build_utils.FileFsRef.fromFsPath({ | ||
fsPath: (0, import_path.join)(workPath, filePath) | ||
}); | ||
} | ||
// Each file in the `functions` dir will become a lambda | ||
// Also supports nested functions like: | ||
// ├── functions | ||
// │ ├── bazinga | ||
// │ │ ├── bazinga.js | ||
// │ ├── graphql.js | ||
const functionFiles = { | ||
...(await (0, build_utils_1.glob)('*.js', apiDistPath)), | ||
...(await (0, build_utils_1.glob)('*/*.js', apiDistPath)), // one-level deep | ||
}; | ||
const sourceCache = new Map(); | ||
const fsCache = new Map(); | ||
for (const [funcName, fileFsRef] of Object.entries(functionFiles)) { | ||
const outputName = (0, path_1.join)(apiDir, (0, path_1.parse)(funcName).name); // remove `.js` extension | ||
const absEntrypoint = fileFsRef.fsPath; | ||
const relativeEntrypoint = (0, path_1.relative)(workPath, absEntrypoint); | ||
const awsLambdaHandler = getAWSLambdaHandler(relativeEntrypoint, 'handler'); | ||
const sourceFile = relativeEntrypoint.replace('/dist/', '/src/'); | ||
const { fileList, esmFileList, warnings } = await (0, nft_1.nodeFileTrace)([absEntrypoint], { | ||
base: workPath, | ||
processCwd: workPath, | ||
ts: true, | ||
mixedModules: true, | ||
ignore: config.excludeFiles, | ||
async readFile(fsPath) { | ||
const relPath = (0, path_1.relative)(workPath, fsPath); | ||
const cached = sourceCache.get(relPath); | ||
if (cached) | ||
return cached.toString(); | ||
// null represents a not found | ||
if (cached === null) | ||
return null; | ||
try { | ||
const source = (0, fs_1.readFileSync)(fsPath); | ||
const { mode } = (0, fs_1.lstatSync)(fsPath); | ||
let entry; | ||
if ((0, build_utils_1.isSymbolicLink)(mode)) { | ||
entry = new build_utils_1.FileFsRef({ fsPath, mode }); | ||
} | ||
else { | ||
entry = new build_utils_1.FileBlob({ data: source, mode }); | ||
} | ||
fsCache.set(relPath, entry); | ||
sourceCache.set(relPath, source); | ||
return source.toString(); | ||
} | ||
catch (e) { | ||
if (e.code === 'ENOENT' || e.code === 'EISDIR') { | ||
sourceCache.set(relPath, null); | ||
return null; | ||
} | ||
throw e; | ||
} | ||
}, | ||
}); | ||
for (const warning of warnings) { | ||
(0, build_utils_1.debug)(`Warning from trace: ${warning.message}`); | ||
} | ||
const lambdaFiles = {}; | ||
const allFiles = [...fileList, ...esmFileList]; | ||
for (const filePath of allFiles) { | ||
lambdaFiles[filePath] = await build_utils_1.FileFsRef.fromFsPath({ | ||
fsPath: (0, path_1.join)(workPath, filePath), | ||
}); | ||
} | ||
lambdaFiles[(0, path_1.relative)(workPath, fileFsRef.fsPath)] = fileFsRef; | ||
const lambdaOptions = await (0, build_utils_1.getLambdaOptionsFromFunction)({ | ||
sourceFile, | ||
config, | ||
}); | ||
const lambda = new build_utils_1.NodejsLambda({ | ||
files: lambdaFiles, | ||
handler: relativeEntrypoint, | ||
runtime: nodeVersion.runtime, | ||
shouldAddHelpers: false, | ||
shouldAddSourcemapSupport: false, | ||
awsLambdaHandler, | ||
...lambdaOptions, | ||
}); | ||
lambdaOutputs[outputName] = lambda; | ||
} | ||
// Older versions of redwood did not create 200.html automatically | ||
// From v0.50.0+ 200.html is always generated as part of web build | ||
// Note that in builder post-processing, we remove the .html extension | ||
const fallbackHtmlPage = (0, fs_1.existsSync)((0, path_1.join)(webDistPath, '200.html')) | ||
? '/200' | ||
: '/index'; | ||
const defaultRoutesConfig = (0, routing_utils_1.getTransformedRoutes)({ | ||
// this makes sure we send back 200.html for unprerendered pages | ||
rewrites: [{ source: '/(.*)', destination: fallbackHtmlPage }], | ||
cleanUrls: true, | ||
trailingSlash: false, | ||
lambdaFiles[(0, import_path.relative)(workPath, fileFsRef.fsPath)] = fileFsRef; | ||
const lambdaOptions = await (0, import_build_utils.getLambdaOptionsFromFunction)({ | ||
sourceFile, | ||
config | ||
}); | ||
if (defaultRoutesConfig.error) { | ||
throw new Error(defaultRoutesConfig.error.message); | ||
} | ||
return { | ||
output: { ...staticOutputs, ...lambdaOutputs }, | ||
routes: defaultRoutesConfig.routes, | ||
}; | ||
const lambda = new import_build_utils.NodejsLambda({ | ||
files: lambdaFiles, | ||
handler: relativeEntrypoint, | ||
runtime: nodeVersion.runtime, | ||
shouldAddHelpers: false, | ||
shouldAddSourcemapSupport: false, | ||
awsLambdaHandler, | ||
...lambdaOptions | ||
}); | ||
lambdaOutputs[outputName] = lambda; | ||
} | ||
const fallbackHtmlPage = (0, import_fs.existsSync)((0, import_path.join)(webDistPath, "200.html")) ? "/200" : "/index"; | ||
const defaultRoutesConfig = (0, import_routing_utils.getTransformedRoutes)({ | ||
// this makes sure we send back 200.html for unprerendered pages | ||
rewrites: [{ source: "/(.*)", destination: fallbackHtmlPage }], | ||
cleanUrls: true, | ||
trailingSlash: false | ||
}); | ||
if (defaultRoutesConfig.error) { | ||
throw new Error(defaultRoutesConfig.error.message); | ||
} | ||
return { | ||
output: { ...staticOutputs, ...lambdaOutputs }, | ||
routes: defaultRoutesConfig.routes | ||
}; | ||
}; | ||
exports.build = build; | ||
function getAWSLambdaHandler(filePath, handlerName) { | ||
const { dir, name } = (0, path_1.parse)(filePath); | ||
return `${dir}${dir ? path_1.sep : ''}${name}.${handlerName}`; | ||
const { dir, name } = (0, import_path.parse)(filePath); | ||
return `${dir}${dir ? import_path.sep : ""}${name}.${handlerName}`; | ||
} | ||
function hasScript(scriptName, pkg) { | ||
const scripts = (pkg && pkg.scripts) || {}; | ||
return typeof scripts[scriptName] === 'string'; | ||
const scripts = pkg && pkg.scripts || {}; | ||
return typeof scripts[scriptName] === "string"; | ||
} | ||
const prepareCache = ({ repoRootPath, workPath }) => { | ||
return (0, build_utils_1.glob)('**/node_modules/**', repoRootPath || workPath); | ||
var prepareCache = ({ repoRootPath, workPath }) => { | ||
return (0, import_build_utils.glob)("**/node_modules/**", repoRootPath || workPath); | ||
}; | ||
exports.prepareCache = prepareCache; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
build, | ||
prepareCache, | ||
version | ||
}); |
{ | ||
"name": "@vercel/redwood", | ||
"version": "2.0.1", | ||
"version": "2.0.2", | ||
"main": "./dist/index.js", | ||
@@ -24,3 +24,3 @@ "license": "Apache-2.0", | ||
"@types/semver": "6.0.0", | ||
"@vercel/build-utils": "7.1.1", | ||
"@vercel/build-utils": "7.2.0", | ||
"execa": "3.2.0", | ||
@@ -31,3 +31,3 @@ "fs-extra": "11.1.0", | ||
"scripts": { | ||
"build": "node build.js", | ||
"build": "node ../../utils/build-builder.mjs", | ||
"test-e2e": "pnpm test test/test.js", | ||
@@ -34,0 +34,0 @@ "test": "jest --reporters=default --reporters=jest-junit --env node --verbose --bail --runInBand", |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
500
31437
1