@vercel/build-utils
Advanced tools
Comparing version 7.1.0 to 7.1.1
# @vercel/build-utils | ||
## 7.1.1 | ||
### Patch Changes | ||
- add descriptions to NodeVersion properties ([#10403](https://github.com/vercel/vercel/pull/10403)) | ||
- Updated semver dependency ([#10411](https://github.com/vercel/vercel/pull/10411)) | ||
## 7.1.0 | ||
@@ -4,0 +12,0 @@ |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.cloneEnv = void 0; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var clone_env_exports = {}; | ||
__export(clone_env_exports, { | ||
cloneEnv: () => cloneEnv | ||
}); | ||
module.exports = __toCommonJS(clone_env_exports); | ||
const { hasOwnProperty } = Object.prototype; | ||
/** | ||
* Clones zero or more objects into a single new object while ensuring that the | ||
* `PATH` environment variable is defined when the `PATH` or `Path` environment | ||
* variables are defined. | ||
* | ||
* @param {Object} [...envs] Objects and/or `process.env` to clone and merge | ||
* @returns {Object} The new object | ||
*/ | ||
function cloneEnv(...envs) { | ||
return envs.reduce((obj, env) => { | ||
if (env === undefined || env === null) { | ||
return obj; | ||
} | ||
// mixin the env first | ||
obj = Object.assign(obj, env); | ||
if (hasOwnProperty.call(env, 'Path')) { | ||
// the system path is called `Path` on Windows and Node.js will | ||
// automatically return the system path when accessing `PATH`, | ||
// however we lose this proxied value when we destructure and | ||
// thus we must explicitly copy it, but we must also remove the | ||
// `Path` property since we can't have both a `PATH` and `Path` | ||
if (obj.Path !== undefined) { | ||
obj.PATH = obj.Path; | ||
} | ||
delete obj.Path; | ||
} | ||
return obj; | ||
}, {}); | ||
return envs.reduce((obj, env) => { | ||
if (env === void 0 || env === null) { | ||
return obj; | ||
} | ||
obj = Object.assign(obj, env); | ||
if (hasOwnProperty.call(env, "Path")) { | ||
if (obj.Path !== void 0) { | ||
obj.PATH = obj.Path; | ||
} | ||
delete obj.Path; | ||
} | ||
return obj; | ||
}, {}); | ||
} | ||
exports.cloneEnv = cloneEnv; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
cloneEnv | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const get_platform_env_1 = require("./get-platform-env"); | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var debug_exports = {}; | ||
__export(debug_exports, { | ||
default: () => debug | ||
}); | ||
module.exports = __toCommonJS(debug_exports); | ||
var import_get_platform_env = require("./get-platform-env"); | ||
function debug(message, ...additional) { | ||
if ((0, get_platform_env_1.getPlatformEnv)('BUILDER_DEBUG')) { | ||
console.log(message, ...additional); | ||
} | ||
else if (process.env.VERCEL_DEBUG_PREFIX) { | ||
console.log(`${process.env.VERCEL_DEBUG_PREFIX}${message}`, ...additional); | ||
} | ||
if ((0, import_get_platform_env.getPlatformEnv)("BUILDER_DEBUG")) { | ||
console.log(message, ...additional); | ||
} else if (process.env.VERCEL_DEBUG_PREFIX) { | ||
console.log(`${process.env.VERCEL_DEBUG_PREFIX}${message}`, ...additional); | ||
} | ||
} | ||
exports.default = debug; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.EdgeFunction = void 0; | ||
/** | ||
* An Edge Functions output | ||
*/ | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var edge_function_exports = {}; | ||
__export(edge_function_exports, { | ||
EdgeFunction: () => EdgeFunction | ||
}); | ||
module.exports = __toCommonJS(edge_function_exports); | ||
class EdgeFunction { | ||
constructor(params) { | ||
this.type = 'EdgeFunction'; | ||
this.name = params.name; | ||
this.deploymentTarget = params.deploymentTarget; | ||
this.entrypoint = params.entrypoint; | ||
this.files = params.files; | ||
this.assets = params.assets; | ||
this.regions = params.regions; | ||
this.framework = params.framework; | ||
} | ||
constructor(params) { | ||
this.type = "EdgeFunction"; | ||
this.name = params.name; | ||
this.deploymentTarget = params.deploymentTarget; | ||
this.entrypoint = params.entrypoint; | ||
this.files = params.files; | ||
this.assets = params.assets; | ||
this.regions = params.regions; | ||
this.framework = params.framework; | ||
} | ||
} | ||
exports.EdgeFunction = EdgeFunction; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
EdgeFunction | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getPrettyError = exports.NowBuildError = void 0; | ||
/** | ||
* This error should be thrown from a Builder in | ||
* order to stop the build and print a message. | ||
* This is necessary to avoid printing a stack trace. | ||
*/ | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var errors_exports = {}; | ||
__export(errors_exports, { | ||
NowBuildError: () => NowBuildError, | ||
getPrettyError: () => getPrettyError | ||
}); | ||
module.exports = __toCommonJS(errors_exports); | ||
class NowBuildError extends Error { | ||
constructor({ message, code, link, action }) { | ||
super(message); | ||
this.hideStackTrace = true; | ||
this.code = code; | ||
this.link = link; | ||
this.action = action; | ||
} | ||
constructor({ message, code, link, action }) { | ||
super(message); | ||
this.hideStackTrace = true; | ||
this.code = code; | ||
this.link = link; | ||
this.action = action; | ||
} | ||
} | ||
exports.NowBuildError = NowBuildError; | ||
function getPrettyError(obj) { | ||
const docsUrl = 'https://vercel.com/docs/concepts/projects/project-configuration'; | ||
try { | ||
const { dataPath, params, message: ajvMessage } = obj; | ||
const prop = getTopLevelPropertyName(dataPath); | ||
let message = dataPath && dataPath.startsWith('.') ? `\`${dataPath.slice(1)}\` ` : ''; | ||
if (params && typeof params.additionalProperty === 'string') { | ||
const suggestion = getSuggestion(prop, params.additionalProperty); | ||
message += `should NOT have additional property \`${params.additionalProperty}\`. ${suggestion}`; | ||
} | ||
else if (params && typeof params.missingProperty === 'string') { | ||
message += `missing required property \`${params.missingProperty}\`.`; | ||
} | ||
else { | ||
message += `${ajvMessage}.`; | ||
} | ||
return new NowBuildError({ | ||
code: 'INVALID_VERCEL_CONFIG', | ||
message: message, | ||
link: prop ? `${docsUrl}#${prop.toLowerCase()}` : docsUrl, | ||
action: 'View Documentation', | ||
}); | ||
const docsUrl = "https://vercel.com/docs/concepts/projects/project-configuration"; | ||
try { | ||
const { dataPath, params, message: ajvMessage } = obj; | ||
const prop = getTopLevelPropertyName(dataPath); | ||
let message = dataPath && dataPath.startsWith(".") ? `\`${dataPath.slice(1)}\` ` : ""; | ||
if (params && typeof params.additionalProperty === "string") { | ||
const suggestion = getSuggestion(prop, params.additionalProperty); | ||
message += `should NOT have additional property \`${params.additionalProperty}\`. ${suggestion}`; | ||
} else if (params && typeof params.missingProperty === "string") { | ||
message += `missing required property \`${params.missingProperty}\`.`; | ||
} else { | ||
message += `${ajvMessage}.`; | ||
} | ||
catch (e) { | ||
return new NowBuildError({ | ||
code: 'INVALID_VERCEL_CONFIG', | ||
message: `Failed to validate configuration.`, | ||
link: docsUrl, | ||
action: 'View Documentation', | ||
}); | ||
} | ||
return new NowBuildError({ | ||
code: "INVALID_VERCEL_CONFIG", | ||
message, | ||
link: prop ? `${docsUrl}#${prop.toLowerCase()}` : docsUrl, | ||
action: "View Documentation" | ||
}); | ||
} catch (e) { | ||
return new NowBuildError({ | ||
code: "INVALID_VERCEL_CONFIG", | ||
message: `Failed to validate configuration.`, | ||
link: docsUrl, | ||
action: "View Documentation" | ||
}); | ||
} | ||
} | ||
exports.getPrettyError = getPrettyError; | ||
/** | ||
* Get the top level property from the dataPath. | ||
* `.cleanUrls` => `cleanUrls` | ||
* `.headers[0].source` => `headers` | ||
* `.headers[0].headers[0]` => `headers` | ||
* `` => `` | ||
*/ | ||
function getTopLevelPropertyName(dataPath) { | ||
if (dataPath && dataPath.startsWith('.')) { | ||
const lastIndex = dataPath.indexOf('['); | ||
return lastIndex > -1 ? dataPath.slice(1, lastIndex) : dataPath.slice(1); | ||
} | ||
return ''; | ||
if (dataPath && dataPath.startsWith(".")) { | ||
const lastIndex = dataPath.indexOf("["); | ||
return lastIndex > -1 ? dataPath.slice(1, lastIndex) : dataPath.slice(1); | ||
} | ||
return ""; | ||
} | ||
const mapTypoToSuggestion = { | ||
'': { | ||
builder: 'builds', | ||
'build.env': '{ "build": { "env": {"name": "value"} } }', | ||
'builds.env': '{ "build": { "env": {"name": "value"} } }', | ||
}, | ||
rewrites: { src: 'source', dest: 'destination' }, | ||
redirects: { src: 'source', dest: 'destination', status: 'statusCode' }, | ||
headers: { src: 'source', header: 'headers' }, | ||
routes: { | ||
source: 'src', | ||
destination: 'dest', | ||
header: 'headers', | ||
method: 'methods', | ||
}, | ||
"": { | ||
builder: "builds", | ||
"build.env": '{ "build": { "env": {"name": "value"} } }', | ||
"builds.env": '{ "build": { "env": {"name": "value"} } }' | ||
}, | ||
rewrites: { src: "source", dest: "destination" }, | ||
redirects: { src: "source", dest: "destination", status: "statusCode" }, | ||
headers: { src: "source", header: "headers" }, | ||
routes: { | ||
source: "src", | ||
destination: "dest", | ||
header: "headers", | ||
method: "methods" | ||
} | ||
}; | ||
function getSuggestion(topLevelProp, additionalProperty) { | ||
const choices = mapTypoToSuggestion[topLevelProp]; | ||
const choice = choices ? choices[additionalProperty] : undefined; | ||
return choice ? `Did you mean \`${choice}\`?` : 'Please remove it.'; | ||
const choices = mapTypoToSuggestion[topLevelProp]; | ||
const choice = choices ? choices[additionalProperty] : void 0; | ||
return choice ? `Did you mean \`${choice}\`?` : "Please remove it."; | ||
} | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
NowBuildError, | ||
getPrettyError | ||
}); |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const assert_1 = __importDefault(require("assert")); | ||
const into_stream_1 = __importDefault(require("into-stream")); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var file_blob_exports = {}; | ||
__export(file_blob_exports, { | ||
default: () => FileBlob | ||
}); | ||
module.exports = __toCommonJS(file_blob_exports); | ||
var import_assert = __toESM(require("assert")); | ||
var import_into_stream = __toESM(require("into-stream")); | ||
class FileBlob { | ||
constructor({ mode = 0o100644, contentType, data }) { | ||
(0, assert_1.default)(typeof mode === 'number'); | ||
(0, assert_1.default)(typeof data === 'string' || Buffer.isBuffer(data)); | ||
this.type = 'FileBlob'; | ||
this.mode = mode; | ||
this.contentType = contentType; | ||
this.data = data; | ||
} | ||
static async fromStream({ mode = 0o100644, contentType, stream, }) { | ||
(0, assert_1.default)(typeof mode === 'number'); | ||
(0, assert_1.default)(typeof stream.pipe === 'function'); // is-stream | ||
const chunks = []; | ||
await new Promise((resolve, reject) => { | ||
stream.on('data', chunk => chunks.push(Buffer.from(chunk))); | ||
stream.on('error', error => reject(error)); | ||
stream.on('end', () => resolve()); | ||
}); | ||
const data = Buffer.concat(chunks); | ||
return new FileBlob({ mode, contentType, data }); | ||
} | ||
async toStreamAsync() { | ||
return this.toStream(); | ||
} | ||
toStream() { | ||
return (0, into_stream_1.default)(this.data); | ||
} | ||
constructor({ mode = 33188, contentType, data }) { | ||
(0, import_assert.default)(typeof mode === "number"); | ||
(0, import_assert.default)(typeof data === "string" || Buffer.isBuffer(data)); | ||
this.type = "FileBlob"; | ||
this.mode = mode; | ||
this.contentType = contentType; | ||
this.data = data; | ||
} | ||
static async fromStream({ | ||
mode = 33188, | ||
contentType, | ||
stream | ||
}) { | ||
(0, import_assert.default)(typeof mode === "number"); | ||
(0, import_assert.default)(typeof stream.pipe === "function"); | ||
const chunks = []; | ||
await new Promise((resolve, reject) => { | ||
stream.on("data", (chunk) => chunks.push(Buffer.from(chunk))); | ||
stream.on("error", (error) => reject(error)); | ||
stream.on("end", () => resolve()); | ||
}); | ||
const data = Buffer.concat(chunks); | ||
return new FileBlob({ mode, contentType, data }); | ||
} | ||
async toStreamAsync() { | ||
return this.toStream(); | ||
} | ||
toStream() { | ||
return (0, import_into_stream.default)(this.data); | ||
} | ||
} | ||
exports.default = FileBlob; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const assert_1 = __importDefault(require("assert")); | ||
const fs_extra_1 = __importDefault(require("fs-extra")); | ||
const multistream_1 = __importDefault(require("multistream")); | ||
const path_1 = __importDefault(require("path")); | ||
const async_sema_1 = __importDefault(require("async-sema")); | ||
const semaToPreventEMFILE = new async_sema_1.default(20); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var file_fs_ref_exports = {}; | ||
__export(file_fs_ref_exports, { | ||
default: () => file_fs_ref_default | ||
}); | ||
module.exports = __toCommonJS(file_fs_ref_exports); | ||
var import_assert = __toESM(require("assert")); | ||
var import_fs_extra = __toESM(require("fs-extra")); | ||
var import_multistream = __toESM(require("multistream")); | ||
var import_path = __toESM(require("path")); | ||
var import_async_sema = __toESM(require("async-sema")); | ||
const semaToPreventEMFILE = new import_async_sema.default(20); | ||
class FileFsRef { | ||
constructor({ mode = 0o100644, contentType, fsPath }) { | ||
(0, assert_1.default)(typeof mode === 'number'); | ||
(0, assert_1.default)(typeof fsPath === 'string'); | ||
this.type = 'FileFsRef'; | ||
this.mode = mode; | ||
this.contentType = contentType; | ||
this.fsPath = fsPath; | ||
constructor({ mode = 33188, contentType, fsPath }) { | ||
(0, import_assert.default)(typeof mode === "number"); | ||
(0, import_assert.default)(typeof fsPath === "string"); | ||
this.type = "FileFsRef"; | ||
this.mode = mode; | ||
this.contentType = contentType; | ||
this.fsPath = fsPath; | ||
} | ||
static async fromFsPath({ | ||
mode, | ||
contentType, | ||
fsPath | ||
}) { | ||
let m = mode; | ||
if (!m) { | ||
const stat = await import_fs_extra.default.lstat(fsPath); | ||
m = stat.mode; | ||
} | ||
static async fromFsPath({ mode, contentType, fsPath, }) { | ||
let m = mode; | ||
if (!m) { | ||
const stat = await fs_extra_1.default.lstat(fsPath); | ||
m = stat.mode; | ||
} | ||
return new FileFsRef({ mode: m, contentType, fsPath }); | ||
} | ||
static async fromStream({ mode = 0o100644, contentType, stream, fsPath, }) { | ||
(0, assert_1.default)(typeof mode === 'number'); | ||
(0, assert_1.default)(typeof stream.pipe === 'function'); // is-stream | ||
(0, assert_1.default)(typeof fsPath === 'string'); | ||
await fs_extra_1.default.mkdirp(path_1.default.dirname(fsPath)); | ||
await new Promise((resolve, reject) => { | ||
const dest = fs_extra_1.default.createWriteStream(fsPath, { | ||
mode: mode & 0o777, | ||
}); | ||
stream.pipe(dest); | ||
stream.on('error', reject); | ||
dest.on('finish', resolve); | ||
dest.on('error', reject); | ||
}); | ||
return new FileFsRef({ mode, contentType, fsPath }); | ||
} | ||
async toStreamAsync() { | ||
await semaToPreventEMFILE.acquire(); | ||
const release = () => semaToPreventEMFILE.release(); | ||
const stream = fs_extra_1.default.createReadStream(this.fsPath); | ||
stream.on('close', release); | ||
stream.on('error', release); | ||
return stream; | ||
} | ||
toStream() { | ||
let flag = false; | ||
// eslint-disable-next-line consistent-return | ||
return (0, multistream_1.default)(cb => { | ||
if (flag) | ||
return cb(null, null); | ||
flag = true; | ||
this.toStreamAsync() | ||
.then(stream => { | ||
cb(null, stream); | ||
}) | ||
.catch(error => { | ||
cb(error, null); | ||
}); | ||
}); | ||
} | ||
return new FileFsRef({ mode: m, contentType, fsPath }); | ||
} | ||
static async fromStream({ | ||
mode = 33188, | ||
contentType, | ||
stream, | ||
fsPath | ||
}) { | ||
(0, import_assert.default)(typeof mode === "number"); | ||
(0, import_assert.default)(typeof stream.pipe === "function"); | ||
(0, import_assert.default)(typeof fsPath === "string"); | ||
await import_fs_extra.default.mkdirp(import_path.default.dirname(fsPath)); | ||
await new Promise((resolve, reject) => { | ||
const dest = import_fs_extra.default.createWriteStream(fsPath, { | ||
mode: mode & 511 | ||
}); | ||
stream.pipe(dest); | ||
stream.on("error", reject); | ||
dest.on("finish", resolve); | ||
dest.on("error", reject); | ||
}); | ||
return new FileFsRef({ mode, contentType, fsPath }); | ||
} | ||
async toStreamAsync() { | ||
await semaToPreventEMFILE.acquire(); | ||
const release = () => semaToPreventEMFILE.release(); | ||
const stream = import_fs_extra.default.createReadStream(this.fsPath); | ||
stream.on("close", release); | ||
stream.on("error", release); | ||
return stream; | ||
} | ||
toStream() { | ||
let flag = false; | ||
return (0, import_multistream.default)((cb) => { | ||
if (flag) | ||
return cb(null, null); | ||
flag = true; | ||
this.toStreamAsync().then((stream) => { | ||
cb(null, stream); | ||
}).catch((error) => { | ||
cb(error, null); | ||
}); | ||
}); | ||
} | ||
} | ||
exports.default = FileFsRef; | ||
var file_fs_ref_default = FileFsRef; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const assert_1 = __importDefault(require("assert")); | ||
const node_fetch_1 = __importDefault(require("node-fetch")); | ||
const multistream_1 = __importDefault(require("multistream")); | ||
const async_retry_1 = __importDefault(require("async-retry")); | ||
const async_sema_1 = __importDefault(require("async-sema")); | ||
const semaToDownloadFromS3 = new async_sema_1.default(5); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var file_ref_exports = {}; | ||
__export(file_ref_exports, { | ||
default: () => FileRef | ||
}); | ||
module.exports = __toCommonJS(file_ref_exports); | ||
var import_assert = __toESM(require("assert")); | ||
var import_node_fetch = __toESM(require("node-fetch")); | ||
var import_multistream = __toESM(require("multistream")); | ||
var import_async_retry = __toESM(require("async-retry")); | ||
var import_async_sema = __toESM(require("async-sema")); | ||
const semaToDownloadFromS3 = new import_async_sema.default(5); | ||
class BailableError extends Error { | ||
constructor(...args) { | ||
super(...args); | ||
this.bail = false; | ||
} | ||
constructor(...args) { | ||
super(...args); | ||
this.bail = false; | ||
} | ||
} | ||
class FileRef { | ||
constructor({ mode = 0o100644, digest, contentType, mutable = false, }) { | ||
(0, assert_1.default)(typeof mode === 'number'); | ||
(0, assert_1.default)(typeof digest === 'string'); | ||
this.type = 'FileRef'; | ||
this.mode = mode; | ||
this.digest = digest; | ||
this.contentType = contentType; | ||
this.mutable = mutable; | ||
constructor({ | ||
mode = 33188, | ||
digest, | ||
contentType, | ||
mutable = false | ||
}) { | ||
(0, import_assert.default)(typeof mode === "number"); | ||
(0, import_assert.default)(typeof digest === "string"); | ||
this.type = "FileRef"; | ||
this.mode = mode; | ||
this.digest = digest; | ||
this.contentType = contentType; | ||
this.mutable = mutable; | ||
} | ||
async toStreamAsync() { | ||
let url = ""; | ||
const [digestType, digestHash] = this.digest.split(":"); | ||
if (digestType === "sha") { | ||
url = this.mutable ? `https://now-files.s3.amazonaws.com/${digestHash}` : `https://dmmcy0pwk6bqi.cloudfront.net/${digestHash}`; | ||
} else if (digestType === "sha+ephemeral") { | ||
url = `https://now-ephemeral-files.s3.amazonaws.com/${digestHash}`; | ||
} else { | ||
throw new Error("Expected digest to be sha"); | ||
} | ||
async toStreamAsync() { | ||
let url = ''; | ||
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256 | ||
const [digestType, digestHash] = this.digest.split(':'); | ||
if (digestType === 'sha') { | ||
// This CloudFront URL edge caches the `now-files` S3 bucket to prevent | ||
// overloading it. Mutable files cannot be cached. | ||
// `https://now-files.s3.amazonaws.com/${digestHash}` | ||
url = this.mutable | ||
? `https://now-files.s3.amazonaws.com/${digestHash}` | ||
: `https://dmmcy0pwk6bqi.cloudfront.net/${digestHash}`; | ||
} | ||
else if (digestType === 'sha+ephemeral') { | ||
// This URL is currently only used for cache files that constantly | ||
// change. We shouldn't cache it on CloudFront because it'd always be a | ||
// MISS. | ||
url = `https://now-ephemeral-files.s3.amazonaws.com/${digestHash}`; | ||
} | ||
else { | ||
throw new Error('Expected digest to be sha'); | ||
} | ||
await semaToDownloadFromS3.acquire(); | ||
// console.time(`downloading ${url}`); | ||
try { | ||
return await (0, async_retry_1.default)(async () => { | ||
const resp = await (0, node_fetch_1.default)(url); | ||
if (!resp.ok) { | ||
const error = new BailableError(`download: ${resp.status} ${resp.statusText} for ${url}`); | ||
if (resp.status === 403) | ||
error.bail = true; | ||
throw error; | ||
} | ||
return resp.body; | ||
}, { factor: 1, retries: 3 }); | ||
} | ||
finally { | ||
// console.timeEnd(`downloading ${url}`); | ||
semaToDownloadFromS3.release(); | ||
} | ||
await semaToDownloadFromS3.acquire(); | ||
try { | ||
return await (0, import_async_retry.default)( | ||
async () => { | ||
const resp = await (0, import_node_fetch.default)(url); | ||
if (!resp.ok) { | ||
const error = new BailableError( | ||
`download: ${resp.status} ${resp.statusText} for ${url}` | ||
); | ||
if (resp.status === 403) | ||
error.bail = true; | ||
throw error; | ||
} | ||
return resp.body; | ||
}, | ||
{ factor: 1, retries: 3 } | ||
); | ||
} finally { | ||
semaToDownloadFromS3.release(); | ||
} | ||
toStream() { | ||
let flag = false; | ||
// eslint-disable-next-line consistent-return | ||
return (0, multistream_1.default)(cb => { | ||
if (flag) | ||
return cb(null, null); | ||
flag = true; | ||
this.toStreamAsync() | ||
.then(stream => { | ||
cb(null, stream); | ||
}) | ||
.catch(error => { | ||
cb(error, null); | ||
}); | ||
}); | ||
} | ||
} | ||
toStream() { | ||
let flag = false; | ||
return (0, import_multistream.default)((cb) => { | ||
if (flag) | ||
return cb(null, null); | ||
flag = true; | ||
this.toStreamAsync().then((stream) => { | ||
cb(null, stream); | ||
}).catch((error) => { | ||
cb(error, null); | ||
}); | ||
}); | ||
} | ||
} | ||
exports.default = FileRef; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.downloadFile = exports.isSymbolicLink = exports.isDirectory = void 0; | ||
const path_1 = __importDefault(require("path")); | ||
const debug_1 = __importDefault(require("../debug")); | ||
const file_fs_ref_1 = __importDefault(require("../file-fs-ref")); | ||
const fs_extra_1 = require("fs-extra"); | ||
const stream_to_buffer_1 = __importDefault(require("./stream-to-buffer")); | ||
const S_IFDIR = 16384; /* 0040000 directory */ | ||
const S_IFLNK = 40960; /* 0120000 symbolic link */ | ||
const S_IFMT = 61440; /* 0170000 type of file */ | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var download_exports = {}; | ||
__export(download_exports, { | ||
default: () => download, | ||
downloadFile: () => downloadFile, | ||
isDirectory: () => isDirectory, | ||
isSymbolicLink: () => isSymbolicLink | ||
}); | ||
module.exports = __toCommonJS(download_exports); | ||
var import_path = __toESM(require("path")); | ||
var import_debug = __toESM(require("../debug")); | ||
var import_file_fs_ref = __toESM(require("../file-fs-ref")); | ||
var import_fs_extra = require("fs-extra"); | ||
var import_stream_to_buffer = __toESM(require("./stream-to-buffer")); | ||
const S_IFDIR = 16384; | ||
const S_IFLNK = 40960; | ||
const S_IFMT = 61440; | ||
function isDirectory(mode) { | ||
return (mode & S_IFMT) === S_IFDIR; | ||
return (mode & S_IFMT) === S_IFDIR; | ||
} | ||
exports.isDirectory = isDirectory; | ||
function isSymbolicLink(mode) { | ||
return (mode & S_IFMT) === S_IFLNK; | ||
return (mode & S_IFMT) === S_IFLNK; | ||
} | ||
exports.isSymbolicLink = isSymbolicLink; | ||
async function prepareSymlinkTarget(file, fsPath) { | ||
const mkdirPromise = (0, fs_extra_1.mkdirp)(path_1.default.dirname(fsPath)); | ||
if (file.type === 'FileFsRef') { | ||
const [target] = await Promise.all([(0, fs_extra_1.readlink)(file.fsPath), mkdirPromise]); | ||
return target; | ||
} | ||
if (file.type === 'FileRef' || file.type === 'FileBlob') { | ||
const targetPathBufferPromise = (0, stream_to_buffer_1.default)(await file.toStreamAsync()); | ||
const [targetPathBuffer] = await Promise.all([ | ||
targetPathBufferPromise, | ||
mkdirPromise, | ||
]); | ||
return targetPathBuffer.toString('utf8'); | ||
} | ||
throw new Error(`file.type "${file.type}" not supported for symlink`); | ||
const mkdirPromise = (0, import_fs_extra.mkdirp)(import_path.default.dirname(fsPath)); | ||
if (file.type === "FileFsRef") { | ||
const [target] = await Promise.all([(0, import_fs_extra.readlink)(file.fsPath), mkdirPromise]); | ||
return target; | ||
} | ||
if (file.type === "FileRef" || file.type === "FileBlob") { | ||
const targetPathBufferPromise = (0, import_stream_to_buffer.default)(await file.toStreamAsync()); | ||
const [targetPathBuffer] = await Promise.all([ | ||
targetPathBufferPromise, | ||
mkdirPromise | ||
]); | ||
return targetPathBuffer.toString("utf8"); | ||
} | ||
throw new Error( | ||
`file.type "${file.type}" not supported for symlink` | ||
); | ||
} | ||
async function downloadFile(file, fsPath) { | ||
const { mode } = file; | ||
if (isDirectory(mode)) { | ||
await (0, fs_extra_1.mkdirp)(fsPath); | ||
await (0, fs_extra_1.chmod)(fsPath, mode); | ||
return file_fs_ref_1.default.fromFsPath({ mode, fsPath }); | ||
} | ||
// If the source is a symlink, try to create it instead of copying the file. | ||
// Note: creating symlinks on Windows requires admin priviliges or symlinks | ||
// enabled in the group policy. We may want to improve the error message. | ||
if (isSymbolicLink(mode)) { | ||
const target = await prepareSymlinkTarget(file, fsPath); | ||
await (0, fs_extra_1.symlink)(target, fsPath); | ||
return file_fs_ref_1.default.fromFsPath({ mode, fsPath }); | ||
} | ||
const stream = file.toStream(); | ||
return file_fs_ref_1.default.fromStream({ mode, stream, fsPath }); | ||
const { mode } = file; | ||
if (isDirectory(mode)) { | ||
await (0, import_fs_extra.mkdirp)(fsPath); | ||
await (0, import_fs_extra.chmod)(fsPath, mode); | ||
return import_file_fs_ref.default.fromFsPath({ mode, fsPath }); | ||
} | ||
if (isSymbolicLink(mode)) { | ||
const target = await prepareSymlinkTarget(file, fsPath); | ||
await (0, import_fs_extra.symlink)(target, fsPath); | ||
return import_file_fs_ref.default.fromFsPath({ mode, fsPath }); | ||
} | ||
const stream = file.toStream(); | ||
return import_file_fs_ref.default.fromStream({ mode, stream, fsPath }); | ||
} | ||
exports.downloadFile = downloadFile; | ||
async function removeFile(basePath, fileMatched) { | ||
const file = path_1.default.join(basePath, fileMatched); | ||
await (0, fs_extra_1.remove)(file); | ||
const file = import_path.default.join(basePath, fileMatched); | ||
await (0, import_fs_extra.remove)(file); | ||
} | ||
async function download(files, basePath, meta) { | ||
const { isDev = false, skipDownload = false, filesChanged = null, filesRemoved = null, } = meta || {}; | ||
if (isDev || skipDownload) { | ||
// In `vercel dev`, the `download()` function is a no-op because | ||
// the `basePath` matches the `cwd` of the dev server, so the | ||
// source files are already available. | ||
return files; | ||
} | ||
(0, debug_1.default)('Downloading deployment source files...'); | ||
const start = Date.now(); | ||
const files2 = {}; | ||
const filenames = Object.keys(files); | ||
await Promise.all(filenames.map(async (name) => { | ||
// If the file does not exist anymore, remove it. | ||
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) { | ||
await removeFile(basePath, name); | ||
return; | ||
const { | ||
isDev = false, | ||
skipDownload = false, | ||
filesChanged = null, | ||
filesRemoved = null | ||
} = meta || {}; | ||
if (isDev || skipDownload) { | ||
return files; | ||
} | ||
(0, import_debug.default)("Downloading deployment source files..."); | ||
const start = Date.now(); | ||
const files2 = {}; | ||
const filenames = Object.keys(files); | ||
await Promise.all( | ||
filenames.map(async (name) => { | ||
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) { | ||
await removeFile(basePath, name); | ||
return; | ||
} | ||
if (Array.isArray(filesChanged) && !filesChanged.includes(name)) { | ||
return; | ||
} | ||
const parts = name.split("/"); | ||
for (let i = 1; i < parts.length; i++) { | ||
const dir = parts.slice(0, i).join("/"); | ||
const parent = files[dir]; | ||
if (parent && isSymbolicLink(parent.mode)) { | ||
console.warn( | ||
`Warning: file "${name}" is within a symlinked directory "${dir}" and will be ignored` | ||
); | ||
return; | ||
} | ||
// If a file didn't change, do not re-download it. | ||
if (Array.isArray(filesChanged) && !filesChanged.includes(name)) { | ||
return; | ||
} | ||
// Some builders resolve symlinks and return both | ||
// a file, node_modules/<symlink>/package.json, and | ||
// node_modules/<symlink>, a symlink. | ||
// Removing the file matches how the yazl lambda zip | ||
// behaves so we can use download() with `vercel build`. | ||
const parts = name.split('/'); | ||
for (let i = 1; i < parts.length; i++) { | ||
const dir = parts.slice(0, i).join('/'); | ||
const parent = files[dir]; | ||
if (parent && isSymbolicLink(parent.mode)) { | ||
console.warn(`Warning: file "${name}" is within a symlinked directory "${dir}" and will be ignored`); | ||
return; | ||
} | ||
} | ||
const file = files[name]; | ||
const fsPath = path_1.default.join(basePath, name); | ||
files2[name] = await downloadFile(file, fsPath); | ||
})); | ||
const duration = Date.now() - start; | ||
(0, debug_1.default)(`Downloaded ${filenames.length} source files: ${duration}ms`); | ||
return files2; | ||
} | ||
const file = files[name]; | ||
const fsPath = import_path.default.join(basePath, name); | ||
files2[name] = await downloadFile(file, fsPath); | ||
}) | ||
); | ||
const duration = Date.now() - start; | ||
(0, import_debug.default)(`Downloaded ${filenames.length} source files: ${duration}ms`); | ||
return files2; | ||
} | ||
exports.default = download; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
downloadFile, | ||
isDirectory, | ||
isSymbolicLink | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const path_1 = require("path"); | ||
const os_1 = require("os"); | ||
const fs_extra_1 = require("fs-extra"); | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var get_writable_directory_exports = {}; | ||
__export(get_writable_directory_exports, { | ||
default: () => getWritableDirectory | ||
}); | ||
module.exports = __toCommonJS(get_writable_directory_exports); | ||
var import_path = require("path"); | ||
var import_os = require("os"); | ||
var import_fs_extra = require("fs-extra"); | ||
async function getWritableDirectory() { | ||
const name = Math.floor(Math.random() * 0x7fffffff).toString(16); | ||
const directory = (0, path_1.join)((0, os_1.tmpdir)(), name); | ||
await (0, fs_extra_1.mkdirp)(directory); | ||
return directory; | ||
const name = Math.floor(Math.random() * 2147483647).toString(16); | ||
const directory = (0, import_path.join)((0, import_os.tmpdir)(), name); | ||
await (0, import_fs_extra.mkdirp)(directory); | ||
return directory; | ||
} | ||
exports.default = getWritableDirectory; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const path_1 = __importDefault(require("path")); | ||
const assert_1 = __importDefault(require("assert")); | ||
const glob_1 = __importDefault(require("glob")); | ||
const util_1 = require("util"); | ||
const fs_extra_1 = require("fs-extra"); | ||
const normalize_path_1 = require("./normalize-path"); | ||
const file_fs_ref_1 = __importDefault(require("../file-fs-ref")); | ||
const vanillaGlob = (0, util_1.promisify)(glob_1.default); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var glob_exports = {}; | ||
__export(glob_exports, { | ||
default: () => glob | ||
}); | ||
module.exports = __toCommonJS(glob_exports); | ||
var import_path = __toESM(require("path")); | ||
var import_assert = __toESM(require("assert")); | ||
var import_glob = __toESM(require("glob")); | ||
var import_util = require("util"); | ||
var import_fs_extra = require("fs-extra"); | ||
var import_normalize_path = require("./normalize-path"); | ||
var import_file_fs_ref = __toESM(require("../file-fs-ref")); | ||
const vanillaGlob = (0, import_util.promisify)(import_glob.default); | ||
async function glob(pattern, opts, mountpoint) { | ||
const options = typeof opts === 'string' ? { cwd: opts } : opts; | ||
if (!options.cwd) { | ||
throw new Error('Second argument (basePath) must be specified for names of resulting files'); | ||
const options = typeof opts === "string" ? { cwd: opts } : opts; | ||
if (!options.cwd) { | ||
throw new Error( | ||
"Second argument (basePath) must be specified for names of resulting files" | ||
); | ||
} | ||
if (!import_path.default.isAbsolute(options.cwd)) { | ||
throw new Error(`basePath/cwd must be an absolute path (${options.cwd})`); | ||
} | ||
const results = {}; | ||
const statCache = {}; | ||
const symlinks = {}; | ||
const files = await vanillaGlob(pattern, { | ||
...options, | ||
symlinks, | ||
statCache, | ||
stat: true, | ||
dot: true | ||
}); | ||
const dirs = /* @__PURE__ */ new Set(); | ||
const dirsWithEntries = /* @__PURE__ */ new Set(); | ||
for (const relativePath of files) { | ||
const absPath = import_path.default.join(options.cwd, relativePath); | ||
const fsPath = (0, import_normalize_path.normalizePath)(absPath); | ||
let stat = statCache[fsPath]; | ||
(0, import_assert.default)( | ||
stat, | ||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})` | ||
); | ||
const isSymlink = symlinks[fsPath]; | ||
if (options.follow && (isSymlink || (await (0, import_fs_extra.lstat)(fsPath)).isSymbolicLink())) { | ||
const target = await (0, import_fs_extra.readlink)(absPath); | ||
const absTarget = import_path.default.resolve(import_path.default.dirname(absPath), target); | ||
if (import_path.default.relative(options.cwd, absTarget).startsWith(`..${import_path.default.sep}`)) { | ||
continue; | ||
} | ||
} | ||
if (!path_1.default.isAbsolute(options.cwd)) { | ||
throw new Error(`basePath/cwd must be an absolute path (${options.cwd})`); | ||
if (isSymlink || stat.isFile() || stat.isDirectory()) { | ||
if (isSymlink) { | ||
stat = await (0, import_fs_extra.lstat)(absPath); | ||
} | ||
const dirname = import_path.default.dirname(relativePath); | ||
dirsWithEntries.add(dirname); | ||
if (stat.isDirectory()) { | ||
dirs.add(relativePath); | ||
continue; | ||
} | ||
let finalPath = relativePath; | ||
if (mountpoint) { | ||
finalPath = import_path.default.join(mountpoint, finalPath); | ||
} | ||
results[finalPath] = new import_file_fs_ref.default({ mode: stat.mode, fsPath }); | ||
} | ||
const results = {}; | ||
const statCache = {}; | ||
const symlinks = {}; | ||
const files = await vanillaGlob(pattern, { | ||
...options, | ||
symlinks, | ||
statCache, | ||
stat: true, | ||
dot: true, | ||
}); | ||
const dirs = new Set(); | ||
const dirsWithEntries = new Set(); | ||
for (const relativePath of files) { | ||
const absPath = path_1.default.join(options.cwd, relativePath); | ||
const fsPath = (0, normalize_path_1.normalizePath)(absPath); | ||
let stat = statCache[fsPath]; | ||
(0, assert_1.default)(stat, `statCache does not contain value for ${relativePath} (resolved to ${fsPath})`); | ||
const isSymlink = symlinks[fsPath]; | ||
// When `follow` mode is enabled, ensure that the entry is not a symlink | ||
// that points to outside of `cwd` | ||
if (options.follow && | ||
(isSymlink || (await (0, fs_extra_1.lstat)(fsPath)).isSymbolicLink())) { | ||
const target = await (0, fs_extra_1.readlink)(absPath); | ||
const absTarget = path_1.default.resolve(path_1.default.dirname(absPath), target); | ||
if (path_1.default.relative(options.cwd, absTarget).startsWith(`..${path_1.default.sep}`)) { | ||
continue; | ||
} | ||
} | ||
if (isSymlink || stat.isFile() || stat.isDirectory()) { | ||
if (isSymlink) { | ||
stat = await (0, fs_extra_1.lstat)(absPath); | ||
} | ||
// Some bookkeeping to track which directories already have entries within | ||
const dirname = path_1.default.dirname(relativePath); | ||
dirsWithEntries.add(dirname); | ||
if (stat.isDirectory()) { | ||
dirs.add(relativePath); | ||
continue; | ||
} | ||
let finalPath = relativePath; | ||
if (mountpoint) { | ||
finalPath = path_1.default.join(mountpoint, finalPath); | ||
} | ||
results[finalPath] = new file_fs_ref_1.default({ mode: stat.mode, fsPath }); | ||
} | ||
} | ||
if (options.includeDirectories) { | ||
for (const relativePath of dirs) { | ||
if (dirsWithEntries.has(relativePath)) | ||
continue; | ||
let finalPath = relativePath; | ||
if (mountpoint) { | ||
finalPath = import_path.default.join(mountpoint, finalPath); | ||
} | ||
const fsPath = (0, import_normalize_path.normalizePath)(import_path.default.join(options.cwd, relativePath)); | ||
const stat = statCache[fsPath]; | ||
results[finalPath] = new import_file_fs_ref.default({ mode: stat.mode, fsPath }); | ||
} | ||
// Add empty directory entries | ||
if (options.includeDirectories) { | ||
for (const relativePath of dirs) { | ||
if (dirsWithEntries.has(relativePath)) | ||
continue; | ||
let finalPath = relativePath; | ||
if (mountpoint) { | ||
finalPath = path_1.default.join(mountpoint, finalPath); | ||
} | ||
const fsPath = (0, normalize_path_1.normalizePath)(path_1.default.join(options.cwd, relativePath)); | ||
const stat = statCache[fsPath]; | ||
results[finalPath] = new file_fs_ref_1.default({ mode: stat.mode, fsPath }); | ||
} | ||
} | ||
return results; | ||
} | ||
return results; | ||
} | ||
exports.default = glob; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getSupportedNodeVersion = exports.getDiscontinuedNodeVersions = exports.getLatestNodeVersion = void 0; | ||
const semver_1 = require("semver"); | ||
const errors_1 = require("../errors"); | ||
const debug_1 = __importDefault(require("../debug")); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var node_version_exports = {}; | ||
__export(node_version_exports, { | ||
getDiscontinuedNodeVersions: () => getDiscontinuedNodeVersions, | ||
getLatestNodeVersion: () => getLatestNodeVersion, | ||
getSupportedNodeVersion: () => getSupportedNodeVersion | ||
}); | ||
module.exports = __toCommonJS(node_version_exports); | ||
var import_semver = require("semver"); | ||
var import_errors = require("../errors"); | ||
var import_debug = __toESM(require("../debug")); | ||
function getOptions() { | ||
const options = [ | ||
{ major: 18, range: '18.x', runtime: 'nodejs18.x' }, | ||
{ | ||
major: 16, | ||
range: '16.x', | ||
runtime: 'nodejs16.x', | ||
discontinueDate: new Date('2024-02-06'), | ||
}, | ||
{ | ||
major: 14, | ||
range: '14.x', | ||
runtime: 'nodejs14.x', | ||
discontinueDate: new Date('2023-08-15'), | ||
}, | ||
{ | ||
major: 12, | ||
range: '12.x', | ||
runtime: 'nodejs12.x', | ||
discontinueDate: new Date('2022-10-03'), | ||
}, | ||
{ | ||
major: 10, | ||
range: '10.x', | ||
runtime: 'nodejs10.x', | ||
discontinueDate: new Date('2021-04-20'), | ||
}, | ||
{ | ||
major: 8, | ||
range: '8.10.x', | ||
runtime: 'nodejs8.10', | ||
discontinueDate: new Date('2020-01-06'), | ||
}, | ||
]; | ||
return options; | ||
const options = [ | ||
{ major: 18, range: "18.x", runtime: "nodejs18.x" }, | ||
{ | ||
major: 16, | ||
range: "16.x", | ||
runtime: "nodejs16.x", | ||
discontinueDate: /* @__PURE__ */ new Date("2024-02-06") | ||
}, | ||
{ | ||
major: 14, | ||
range: "14.x", | ||
runtime: "nodejs14.x", | ||
discontinueDate: /* @__PURE__ */ new Date("2023-08-15") | ||
}, | ||
{ | ||
major: 12, | ||
range: "12.x", | ||
runtime: "nodejs12.x", | ||
discontinueDate: /* @__PURE__ */ new Date("2022-10-03") | ||
}, | ||
{ | ||
major: 10, | ||
range: "10.x", | ||
runtime: "nodejs10.x", | ||
discontinueDate: /* @__PURE__ */ new Date("2021-04-20") | ||
}, | ||
{ | ||
major: 8, | ||
range: "8.10.x", | ||
runtime: "nodejs8.10", | ||
discontinueDate: /* @__PURE__ */ new Date("2020-01-06") | ||
} | ||
]; | ||
return options; | ||
} | ||
function getHint(isAuto = false) { | ||
const { major, range } = getLatestNodeVersion(); | ||
return isAuto | ||
? `Please set Node.js Version to ${range} in your Project Settings to use Node.js ${major}.` | ||
: `Please set "engines": { "node": "${range}" } in your \`package.json\` file to use Node.js ${major}.`; | ||
const { major, range } = getLatestNodeVersion(); | ||
return isAuto ? `Please set Node.js Version to ${range} in your Project Settings to use Node.js ${major}.` : `Please set "engines": { "node": "${range}" } in your \`package.json\` file to use Node.js ${major}.`; | ||
} | ||
function getLatestNodeVersion() { | ||
return getOptions()[0]; | ||
return getOptions()[0]; | ||
} | ||
exports.getLatestNodeVersion = getLatestNodeVersion; | ||
function getDiscontinuedNodeVersions() { | ||
return getOptions().filter(isDiscontinued); | ||
return getOptions().filter(isDiscontinued); | ||
} | ||
exports.getDiscontinuedNodeVersions = getDiscontinuedNodeVersions; | ||
async function getSupportedNodeVersion(engineRange, isAuto = false) { | ||
let selection = getLatestNodeVersion(); | ||
if (engineRange) { | ||
const found = (0, semver_1.validRange)(engineRange) && | ||
getOptions().some(o => { | ||
// the array is already in order so return the first | ||
// match which will be the newest version of node | ||
selection = o; | ||
return (0, semver_1.intersects)(o.range, engineRange); | ||
}); | ||
if (!found) { | ||
throw new errors_1.NowBuildError({ | ||
code: 'BUILD_UTILS_NODE_VERSION_INVALID', | ||
link: 'http://vercel.link/node-version', | ||
message: `Found invalid Node.js Version: "${engineRange}". ${getHint(isAuto)}`, | ||
}); | ||
} | ||
let selection = getLatestNodeVersion(); | ||
if (engineRange) { | ||
const found = (0, import_semver.validRange)(engineRange) && getOptions().some((o) => { | ||
selection = o; | ||
return (0, import_semver.intersects)(o.range, engineRange); | ||
}); | ||
if (!found) { | ||
throw new import_errors.NowBuildError({ | ||
code: "BUILD_UTILS_NODE_VERSION_INVALID", | ||
link: "http://vercel.link/node-version", | ||
message: `Found invalid Node.js Version: "${engineRange}". ${getHint( | ||
isAuto | ||
)}` | ||
}); | ||
} | ||
if (isDiscontinued(selection)) { | ||
const intro = `Node.js Version "${selection.range}" is discontinued and must be upgraded.`; | ||
throw new errors_1.NowBuildError({ | ||
code: 'BUILD_UTILS_NODE_VERSION_DISCONTINUED', | ||
link: 'http://vercel.link/node-version', | ||
message: `${intro} ${getHint(isAuto)}`, | ||
}); | ||
} | ||
(0, debug_1.default)(`Selected Node.js ${selection.range}`); | ||
if (selection.discontinueDate) { | ||
const d = selection.discontinueDate.toISOString().split('T')[0]; | ||
console.warn(`Error: Node.js version ${selection.range} has reached End-of-Life. Deployments created on or after ${d} will fail to build. ${getHint(isAuto)}`); | ||
} | ||
return selection; | ||
} | ||
if (isDiscontinued(selection)) { | ||
const intro = `Node.js Version "${selection.range}" is discontinued and must be upgraded.`; | ||
throw new import_errors.NowBuildError({ | ||
code: "BUILD_UTILS_NODE_VERSION_DISCONTINUED", | ||
link: "http://vercel.link/node-version", | ||
message: `${intro} ${getHint(isAuto)}` | ||
}); | ||
} | ||
(0, import_debug.default)(`Selected Node.js ${selection.range}`); | ||
if (selection.discontinueDate) { | ||
const d = selection.discontinueDate.toISOString().split("T")[0]; | ||
console.warn( | ||
`Error: Node.js version ${selection.range} has reached End-of-Life. Deployments created on or after ${d} will fail to build. ${getHint( | ||
isAuto | ||
)}` | ||
); | ||
} | ||
return selection; | ||
} | ||
exports.getSupportedNodeVersion = getSupportedNodeVersion; | ||
function isDiscontinued({ discontinueDate }) { | ||
const today = Date.now(); | ||
return discontinueDate !== undefined && discontinueDate.getTime() <= today; | ||
const today = Date.now(); | ||
return discontinueDate !== void 0 && discontinueDate.getTime() <= today; | ||
} | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
getDiscontinuedNodeVersions, | ||
getLatestNodeVersion, | ||
getSupportedNodeVersion | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.normalizePath = void 0; | ||
const isWin = process.platform === 'win32'; | ||
/** | ||
* Convert Windows separators to Unix separators. | ||
*/ | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var normalize_path_exports = {}; | ||
__export(normalize_path_exports, { | ||
normalizePath: () => normalizePath | ||
}); | ||
module.exports = __toCommonJS(normalize_path_exports); | ||
const isWin = process.platform === "win32"; | ||
function normalizePath(p) { | ||
return isWin ? p.replace(/\\/g, '/') : p; | ||
return isWin ? p.replace(/\\/g, "/") : p; | ||
} | ||
exports.normalizePath = normalizePath; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
normalizePath | ||
}); |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.readConfigFile = void 0; | ||
const js_yaml_1 = __importDefault(require("js-yaml")); | ||
const toml_1 = __importDefault(require("@iarna/toml")); | ||
const fs_extra_1 = require("fs-extra"); | ||
const error_utils_1 = require("@vercel/error-utils"); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var read_config_file_exports = {}; | ||
__export(read_config_file_exports, { | ||
readConfigFile: () => readConfigFile | ||
}); | ||
module.exports = __toCommonJS(read_config_file_exports); | ||
var import_js_yaml = __toESM(require("js-yaml")); | ||
var import_toml = __toESM(require("@iarna/toml")); | ||
var import_fs_extra = require("fs-extra"); | ||
var import_error_utils = require("@vercel/error-utils"); | ||
async function readFileOrNull(file) { | ||
try { | ||
const data = await (0, fs_extra_1.readFile)(file); | ||
return data; | ||
try { | ||
const data = await (0, import_fs_extra.readFile)(file); | ||
return data; | ||
} catch (error) { | ||
if (!(0, import_error_utils.isErrnoException)(error)) { | ||
throw error; | ||
} | ||
catch (error) { | ||
if (!(0, error_utils_1.isErrnoException)(error)) { | ||
throw error; | ||
} | ||
if (error.code !== 'ENOENT') { | ||
throw error; | ||
} | ||
if (error.code !== "ENOENT") { | ||
throw error; | ||
} | ||
return null; | ||
} | ||
return null; | ||
} | ||
async function readConfigFile(files) { | ||
files = Array.isArray(files) ? files : [files]; | ||
for (const name of files) { | ||
const data = await readFileOrNull(name); | ||
if (data) { | ||
const str = data.toString('utf8'); | ||
try { | ||
if (name.endsWith('.json')) { | ||
return JSON.parse(str); | ||
} | ||
else if (name.endsWith('.toml')) { | ||
return toml_1.default.parse(str); | ||
} | ||
else if (name.endsWith('.yaml') || name.endsWith('.yml')) { | ||
return js_yaml_1.default.safeLoad(str, { filename: name }); | ||
} | ||
} | ||
catch (error) { | ||
console.log(`Error while parsing config file: "${name}"`); | ||
} | ||
files = Array.isArray(files) ? files : [files]; | ||
for (const name of files) { | ||
const data = await readFileOrNull(name); | ||
if (data) { | ||
const str = data.toString("utf8"); | ||
try { | ||
if (name.endsWith(".json")) { | ||
return JSON.parse(str); | ||
} else if (name.endsWith(".toml")) { | ||
return import_toml.default.parse(str); | ||
} else if (name.endsWith(".yaml") || name.endsWith(".yml")) { | ||
return import_js_yaml.default.safeLoad(str, { filename: name }); | ||
} | ||
} catch (error) { | ||
console.log(`Error while parsing config file: "${name}"`); | ||
} | ||
} | ||
return null; | ||
} | ||
return null; | ||
} | ||
exports.readConfigFile = readConfigFile; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
readConfigFile | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
/** | ||
* Renames the keys of a `Files` map. | ||
* | ||
* @param files A map of filenames to `File` instances | ||
* @param delegate A function that returns the new filename | ||
* @returns A new file map with the renamed filenames | ||
*/ | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var rename_exports = {}; | ||
__export(rename_exports, { | ||
default: () => rename | ||
}); | ||
module.exports = __toCommonJS(rename_exports); | ||
function rename(files, delegate) { | ||
const result = {}; | ||
for (const [name, file] of Object.entries(files)) { | ||
result[delegate(name)] = file; | ||
} | ||
return result; | ||
const result = {}; | ||
for (const [name, file] of Object.entries(files)) { | ||
result[delegate(name)] = file; | ||
} | ||
return result; | ||
} | ||
exports.default = rename; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.installDependencies = exports.getScriptName = exports.runPipInstall = exports.runBundleInstall = exports.runPackageJsonScript = exports.runCustomInstallCommand = exports.getEnvForPackageManager = exports.runNpmInstall = exports.walkParentDirs = exports.scanParentDirs = exports.getNodeVersion = exports.getSpawnOptions = exports.runShellScript = exports.getNodeBinPaths = exports.getNodeBinPath = exports.traverseUpDirectories = exports.execCommand = exports.spawnCommand = exports.spawnAsync = void 0; | ||
const assert_1 = __importDefault(require("assert")); | ||
const fs_extra_1 = __importDefault(require("fs-extra")); | ||
const path_1 = __importDefault(require("path")); | ||
const async_sema_1 = __importDefault(require("async-sema")); | ||
const cross_spawn_1 = __importDefault(require("cross-spawn")); | ||
const semver_1 = require("semver"); | ||
const util_1 = require("util"); | ||
const debug_1 = __importDefault(require("../debug")); | ||
const errors_1 = require("../errors"); | ||
const node_version_1 = require("./node-version"); | ||
const read_config_file_1 = require("./read-config-file"); | ||
const clone_env_1 = require("../clone-env"); | ||
// Only allow one `runNpmInstall()` invocation to run concurrently | ||
const runNpmInstallSema = new async_sema_1.default(1); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var run_user_scripts_exports = {}; | ||
__export(run_user_scripts_exports, { | ||
execCommand: () => execCommand, | ||
getEnvForPackageManager: () => getEnvForPackageManager, | ||
getNodeBinPath: () => getNodeBinPath, | ||
getNodeBinPaths: () => getNodeBinPaths, | ||
getNodeVersion: () => getNodeVersion, | ||
getScriptName: () => getScriptName, | ||
getSpawnOptions: () => getSpawnOptions, | ||
installDependencies: () => installDependencies, | ||
runBundleInstall: () => runBundleInstall, | ||
runCustomInstallCommand: () => runCustomInstallCommand, | ||
runNpmInstall: () => runNpmInstall, | ||
runPackageJsonScript: () => runPackageJsonScript, | ||
runPipInstall: () => runPipInstall, | ||
runShellScript: () => runShellScript, | ||
scanParentDirs: () => scanParentDirs, | ||
spawnAsync: () => spawnAsync, | ||
spawnCommand: () => spawnCommand, | ||
traverseUpDirectories: () => traverseUpDirectories, | ||
walkParentDirs: () => walkParentDirs | ||
}); | ||
module.exports = __toCommonJS(run_user_scripts_exports); | ||
var import_assert = __toESM(require("assert")); | ||
var import_fs_extra = __toESM(require("fs-extra")); | ||
var import_path = __toESM(require("path")); | ||
var import_async_sema = __toESM(require("async-sema")); | ||
var import_cross_spawn = __toESM(require("cross-spawn")); | ||
var import_semver = require("semver"); | ||
var import_util = require("util"); | ||
var import_debug = __toESM(require("../debug")); | ||
var import_errors = require("../errors"); | ||
var import_node_version = require("./node-version"); | ||
var import_read_config_file = require("./read-config-file"); | ||
var import_clone_env = require("../clone-env"); | ||
const runNpmInstallSema = new import_async_sema.default(1); | ||
function spawnAsync(command, args, opts = {}) { | ||
return new Promise((resolve, reject) => { | ||
const stderrLogs = []; | ||
opts = { stdio: 'inherit', ...opts }; | ||
const child = (0, cross_spawn_1.default)(command, args, opts); | ||
if (opts.stdio === 'pipe' && child.stderr) { | ||
child.stderr.on('data', data => stderrLogs.push(data)); | ||
} | ||
child.on('error', reject); | ||
child.on('close', (code, signal) => { | ||
if (code === 0 || opts.ignoreNon0Exit) { | ||
return resolve(); | ||
} | ||
const cmd = opts.prettyCommand | ||
? `Command "${opts.prettyCommand}"` | ||
: 'Command'; | ||
reject(new errors_1.NowBuildError({ | ||
code: `BUILD_UTILS_SPAWN_${code || signal}`, | ||
message: opts.stdio === 'inherit' | ||
? `${cmd} exited with ${code || signal}` | ||
: stderrLogs.map(line => line.toString()).join(''), | ||
})); | ||
}); | ||
return new Promise((resolve, reject) => { | ||
const stderrLogs = []; | ||
opts = { stdio: "inherit", ...opts }; | ||
const child = (0, import_cross_spawn.default)(command, args, opts); | ||
if (opts.stdio === "pipe" && child.stderr) { | ||
child.stderr.on("data", (data) => stderrLogs.push(data)); | ||
} | ||
child.on("error", reject); | ||
child.on("close", (code, signal) => { | ||
if (code === 0 || opts.ignoreNon0Exit) { | ||
return resolve(); | ||
} | ||
const cmd = opts.prettyCommand ? `Command "${opts.prettyCommand}"` : "Command"; | ||
reject( | ||
new import_errors.NowBuildError({ | ||
code: `BUILD_UTILS_SPAWN_${code || signal}`, | ||
message: opts.stdio === "inherit" ? `${cmd} exited with ${code || signal}` : stderrLogs.map((line) => line.toString()).join("") | ||
}) | ||
); | ||
}); | ||
}); | ||
} | ||
exports.spawnAsync = spawnAsync; | ||
function spawnCommand(command, options = {}) { | ||
const opts = { ...options, prettyCommand: command }; | ||
if (process.platform === 'win32') { | ||
return (0, cross_spawn_1.default)('cmd.exe', ['/C', command], opts); | ||
} | ||
return (0, cross_spawn_1.default)('sh', ['-c', command], opts); | ||
const opts = { ...options, prettyCommand: command }; | ||
if (process.platform === "win32") { | ||
return (0, import_cross_spawn.default)("cmd.exe", ["/C", command], opts); | ||
} | ||
return (0, import_cross_spawn.default)("sh", ["-c", command], opts); | ||
} | ||
exports.spawnCommand = spawnCommand; | ||
async function execCommand(command, options = {}) { | ||
const opts = { ...options, prettyCommand: command }; | ||
if (process.platform === 'win32') { | ||
await spawnAsync('cmd.exe', ['/C', command], opts); | ||
} | ||
else { | ||
await spawnAsync('sh', ['-c', command], opts); | ||
} | ||
return true; | ||
const opts = { ...options, prettyCommand: command }; | ||
if (process.platform === "win32") { | ||
await spawnAsync("cmd.exe", ["/C", command], opts); | ||
} else { | ||
await spawnAsync("sh", ["-c", command], opts); | ||
} | ||
return true; | ||
} | ||
exports.execCommand = execCommand; | ||
function* traverseUpDirectories({ start, base, }) { | ||
let current = path_1.default.normalize(start); | ||
const normalizedRoot = base ? path_1.default.normalize(base) : undefined; | ||
while (current) { | ||
yield current; | ||
if (current === normalizedRoot) | ||
break; | ||
// Go up one directory | ||
const next = path_1.default.join(current, '..'); | ||
current = next === current ? undefined : next; | ||
} | ||
function* traverseUpDirectories({ | ||
start, | ||
base | ||
}) { | ||
let current = import_path.default.normalize(start); | ||
const normalizedRoot = base ? import_path.default.normalize(base) : void 0; | ||
while (current) { | ||
yield current; | ||
if (current === normalizedRoot) | ||
break; | ||
const next = import_path.default.join(current, ".."); | ||
current = next === current ? void 0 : next; | ||
} | ||
} | ||
exports.traverseUpDirectories = traverseUpDirectories; | ||
/** | ||
* @deprecated Use `getNodeBinPaths()` instead. | ||
*/ | ||
async function getNodeBinPath({ cwd, }) { | ||
const { lockfilePath } = await scanParentDirs(cwd); | ||
const dir = path_1.default.dirname(lockfilePath || cwd); | ||
return path_1.default.join(dir, 'node_modules', '.bin'); | ||
async function getNodeBinPath({ | ||
cwd | ||
}) { | ||
const { lockfilePath } = await scanParentDirs(cwd); | ||
const dir = import_path.default.dirname(lockfilePath || cwd); | ||
return import_path.default.join(dir, "node_modules", ".bin"); | ||
} | ||
exports.getNodeBinPath = getNodeBinPath; | ||
function getNodeBinPaths({ start, base, }) { | ||
return Array.from(traverseUpDirectories({ start, base })).map(dir => path_1.default.join(dir, 'node_modules/.bin')); | ||
function getNodeBinPaths({ | ||
start, | ||
base | ||
}) { | ||
return Array.from(traverseUpDirectories({ start, base })).map( | ||
(dir) => import_path.default.join(dir, "node_modules/.bin") | ||
); | ||
} | ||
exports.getNodeBinPaths = getNodeBinPaths; | ||
async function chmodPlusX(fsPath) { | ||
const s = await fs_extra_1.default.stat(fsPath); | ||
const newMode = s.mode | 64 | 8 | 1; // eslint-disable-line no-bitwise | ||
if (s.mode === newMode) | ||
return; | ||
const base8 = newMode.toString(8).slice(-3); | ||
await fs_extra_1.default.chmod(fsPath, base8); | ||
const s = await import_fs_extra.default.stat(fsPath); | ||
const newMode = s.mode | 64 | 8 | 1; | ||
if (s.mode === newMode) | ||
return; | ||
const base8 = newMode.toString(8).slice(-3); | ||
await import_fs_extra.default.chmod(fsPath, base8); | ||
} | ||
async function runShellScript(fsPath, args = [], spawnOpts) { | ||
(0, assert_1.default)(path_1.default.isAbsolute(fsPath)); | ||
const destPath = path_1.default.dirname(fsPath); | ||
await chmodPlusX(fsPath); | ||
const command = `./${path_1.default.basename(fsPath)}`; | ||
await spawnAsync(command, args, { | ||
...spawnOpts, | ||
cwd: destPath, | ||
prettyCommand: command, | ||
}); | ||
return true; | ||
(0, import_assert.default)(import_path.default.isAbsolute(fsPath)); | ||
const destPath = import_path.default.dirname(fsPath); | ||
await chmodPlusX(fsPath); | ||
const command = `./${import_path.default.basename(fsPath)}`; | ||
await spawnAsync(command, args, { | ||
...spawnOpts, | ||
cwd: destPath, | ||
prettyCommand: command | ||
}); | ||
return true; | ||
} | ||
exports.runShellScript = runShellScript; | ||
function getSpawnOptions(meta, nodeVersion) { | ||
const opts = { | ||
env: (0, clone_env_1.cloneEnv)(process.env), | ||
}; | ||
if (!meta.isDev) { | ||
let found = false; | ||
const oldPath = opts.env.PATH || process.env.PATH || ''; | ||
const pathSegments = oldPath.split(path_1.default.delimiter).map(segment => { | ||
if (/^\/node[0-9]+\/bin/.test(segment)) { | ||
found = true; | ||
return `/node${nodeVersion.major}/bin`; | ||
} | ||
return segment; | ||
}); | ||
if (!found) { | ||
// If we didn't find & replace, prepend at beginning of PATH | ||
pathSegments.unshift(`/node${nodeVersion.major}/bin`); | ||
} | ||
opts.env.PATH = pathSegments.filter(Boolean).join(path_1.default.delimiter); | ||
const opts = { | ||
env: (0, import_clone_env.cloneEnv)(process.env) | ||
}; | ||
if (!meta.isDev) { | ||
let found = false; | ||
const oldPath = opts.env.PATH || process.env.PATH || ""; | ||
const pathSegments = oldPath.split(import_path.default.delimiter).map((segment) => { | ||
if (/^\/node[0-9]+\/bin/.test(segment)) { | ||
found = true; | ||
return `/node${nodeVersion.major}/bin`; | ||
} | ||
return segment; | ||
}); | ||
if (!found) { | ||
pathSegments.unshift(`/node${nodeVersion.major}/bin`); | ||
} | ||
return opts; | ||
opts.env.PATH = pathSegments.filter(Boolean).join(import_path.default.delimiter); | ||
} | ||
return opts; | ||
} | ||
exports.getSpawnOptions = getSpawnOptions; | ||
async function getNodeVersion(destPath, nodeVersionFallback = process.env.VERCEL_PROJECT_SETTINGS_NODE_VERSION, config = {}, meta = {}) { | ||
const latest = (0, node_version_1.getLatestNodeVersion)(); | ||
if (meta.isDev) { | ||
// Use the system-installed version of `node` in PATH for `vercel dev` | ||
return { ...latest, runtime: 'nodejs' }; | ||
const latest = (0, import_node_version.getLatestNodeVersion)(); | ||
if (meta.isDev) { | ||
return { ...latest, runtime: "nodejs" }; | ||
} | ||
const { packageJson } = await scanParentDirs(destPath, true); | ||
let nodeVersion = config.nodeVersion || nodeVersionFallback; | ||
let isAuto = true; | ||
if (packageJson?.engines?.node) { | ||
const { node } = packageJson.engines; | ||
if (nodeVersion && (0, import_semver.validRange)(node) && !(0, import_semver.intersects)(nodeVersion, node)) { | ||
console.warn( | ||
`Warning: Due to "engines": { "node": "${node}" } in your \`package.json\` file, the Node.js Version defined in your Project Settings ("${nodeVersion}") will not apply. Learn More: http://vercel.link/node-version` | ||
); | ||
} else if ((0, import_semver.coerce)(node)?.raw === node) { | ||
console.warn( | ||
`Warning: Detected "engines": { "node": "${node}" } in your \`package.json\` with major.minor.patch, but only major Node.js Version can be selected. Learn More: http://vercel.link/node-version` | ||
); | ||
} else if ((0, import_semver.validRange)(node) && (0, import_semver.intersects)(`${latest.major + 1}.x`, node)) { | ||
console.warn( | ||
`Warning: Detected "engines": { "node": "${node}" } in your \`package.json\` that will automatically upgrade when a new major Node.js Version is released. Learn More: http://vercel.link/node-version` | ||
); | ||
} | ||
const { packageJson } = await scanParentDirs(destPath, true); | ||
let nodeVersion = config.nodeVersion || nodeVersionFallback; | ||
let isAuto = true; | ||
if (packageJson?.engines?.node) { | ||
const { node } = packageJson.engines; | ||
if (nodeVersion && (0, semver_1.validRange)(node) && !(0, semver_1.intersects)(nodeVersion, node)) { | ||
console.warn(`Warning: Due to "engines": { "node": "${node}" } in your \`package.json\` file, the Node.js Version defined in your Project Settings ("${nodeVersion}") will not apply. Learn More: http://vercel.link/node-version`); | ||
} | ||
else if ((0, semver_1.coerce)(node)?.raw === node) { | ||
console.warn(`Warning: Detected "engines": { "node": "${node}" } in your \`package.json\` with major.minor.patch, but only major Node.js Version can be selected. Learn More: http://vercel.link/node-version`); | ||
} | ||
else if ((0, semver_1.validRange)(node) && (0, semver_1.intersects)(`${latest.major + 1}.x`, node)) { | ||
console.warn(`Warning: Detected "engines": { "node": "${node}" } in your \`package.json\` that will automatically upgrade when a new major Node.js Version is released. Learn More: http://vercel.link/node-version`); | ||
} | ||
nodeVersion = node; | ||
isAuto = false; | ||
} | ||
return (0, node_version_1.getSupportedNodeVersion)(nodeVersion, isAuto); | ||
nodeVersion = node; | ||
isAuto = false; | ||
} | ||
return (0, import_node_version.getSupportedNodeVersion)(nodeVersion, isAuto); | ||
} | ||
exports.getNodeVersion = getNodeVersion; | ||
async function scanParentDirs(destPath, readPackageJson = false) { | ||
(0, assert_1.default)(path_1.default.isAbsolute(destPath)); | ||
const pkgJsonPath = await walkParentDirs({ | ||
base: '/', | ||
start: destPath, | ||
filename: 'package.json', | ||
}); | ||
const packageJson = readPackageJson && pkgJsonPath | ||
? JSON.parse(await fs_extra_1.default.readFile(pkgJsonPath, 'utf8')) | ||
: undefined; | ||
const [yarnLockPath, npmLockPath, pnpmLockPath] = await walkParentDirsMulti({ | ||
base: '/', | ||
start: destPath, | ||
filenames: ['yarn.lock', 'package-lock.json', 'pnpm-lock.yaml'], | ||
}); | ||
let lockfilePath; | ||
let lockfileVersion; | ||
let cliType = 'yarn'; | ||
const [hasYarnLock, packageLockJson, pnpmLockYaml] = await Promise.all([ | ||
Boolean(yarnLockPath), | ||
npmLockPath | ||
? (0, read_config_file_1.readConfigFile)(npmLockPath) | ||
: null, | ||
pnpmLockPath | ||
? (0, read_config_file_1.readConfigFile)(pnpmLockPath) | ||
: null, | ||
]); | ||
// Priority order is Yarn > pnpm > npm | ||
if (hasYarnLock) { | ||
cliType = 'yarn'; | ||
lockfilePath = yarnLockPath; | ||
} | ||
else if (pnpmLockYaml) { | ||
cliType = 'pnpm'; | ||
lockfilePath = pnpmLockPath; | ||
lockfileVersion = Number(pnpmLockYaml.lockfileVersion); | ||
} | ||
else if (packageLockJson) { | ||
cliType = 'npm'; | ||
lockfilePath = npmLockPath; | ||
lockfileVersion = packageLockJson.lockfileVersion; | ||
} | ||
const packageJsonPath = pkgJsonPath || undefined; | ||
return { | ||
cliType, | ||
packageJson, | ||
lockfilePath, | ||
lockfileVersion, | ||
packageJsonPath, | ||
}; | ||
(0, import_assert.default)(import_path.default.isAbsolute(destPath)); | ||
const pkgJsonPath = await walkParentDirs({ | ||
base: "/", | ||
start: destPath, | ||
filename: "package.json" | ||
}); | ||
const packageJson = readPackageJson && pkgJsonPath ? JSON.parse(await import_fs_extra.default.readFile(pkgJsonPath, "utf8")) : void 0; | ||
const [yarnLockPath, npmLockPath, pnpmLockPath] = await walkParentDirsMulti({ | ||
base: "/", | ||
start: destPath, | ||
filenames: ["yarn.lock", "package-lock.json", "pnpm-lock.yaml"] | ||
}); | ||
let lockfilePath; | ||
let lockfileVersion; | ||
let cliType = "yarn"; | ||
const [hasYarnLock, packageLockJson, pnpmLockYaml] = await Promise.all([ | ||
Boolean(yarnLockPath), | ||
npmLockPath ? (0, import_read_config_file.readConfigFile)(npmLockPath) : null, | ||
pnpmLockPath ? (0, import_read_config_file.readConfigFile)(pnpmLockPath) : null | ||
]); | ||
if (hasYarnLock) { | ||
cliType = "yarn"; | ||
lockfilePath = yarnLockPath; | ||
} else if (pnpmLockYaml) { | ||
cliType = "pnpm"; | ||
lockfilePath = pnpmLockPath; | ||
lockfileVersion = Number(pnpmLockYaml.lockfileVersion); | ||
} else if (packageLockJson) { | ||
cliType = "npm"; | ||
lockfilePath = npmLockPath; | ||
lockfileVersion = packageLockJson.lockfileVersion; | ||
} | ||
const packageJsonPath = pkgJsonPath || void 0; | ||
return { | ||
cliType, | ||
packageJson, | ||
lockfilePath, | ||
lockfileVersion, | ||
packageJsonPath | ||
}; | ||
} | ||
exports.scanParentDirs = scanParentDirs; | ||
async function walkParentDirs({ base, start, filename, }) { | ||
(0, assert_1.default)(path_1.default.isAbsolute(base), 'Expected "base" to be absolute path'); | ||
(0, assert_1.default)(path_1.default.isAbsolute(start), 'Expected "start" to be absolute path'); | ||
for (const dir of traverseUpDirectories({ start, base })) { | ||
const fullPath = path_1.default.join(dir, filename); | ||
// eslint-disable-next-line no-await-in-loop | ||
if (await fs_extra_1.default.pathExists(fullPath)) { | ||
return fullPath; | ||
} | ||
async function walkParentDirs({ | ||
base, | ||
start, | ||
filename | ||
}) { | ||
(0, import_assert.default)(import_path.default.isAbsolute(base), 'Expected "base" to be absolute path'); | ||
(0, import_assert.default)(import_path.default.isAbsolute(start), 'Expected "start" to be absolute path'); | ||
for (const dir of traverseUpDirectories({ start, base })) { | ||
const fullPath = import_path.default.join(dir, filename); | ||
if (await import_fs_extra.default.pathExists(fullPath)) { | ||
return fullPath; | ||
} | ||
return null; | ||
} | ||
return null; | ||
} | ||
exports.walkParentDirs = walkParentDirs; | ||
async function walkParentDirsMulti({ base, start, filenames, }) { | ||
for (const dir of traverseUpDirectories({ start, base })) { | ||
const fullPaths = filenames.map(f => path_1.default.join(dir, f)); | ||
const existResults = await Promise.all(fullPaths.map(f => fs_extra_1.default.pathExists(f))); | ||
const foundOneOrMore = existResults.some(b => b); | ||
if (foundOneOrMore) { | ||
return fullPaths.map((f, i) => (existResults[i] ? f : undefined)); | ||
} | ||
async function walkParentDirsMulti({ | ||
base, | ||
start, | ||
filenames | ||
}) { | ||
for (const dir of traverseUpDirectories({ start, base })) { | ||
const fullPaths = filenames.map((f) => import_path.default.join(dir, f)); | ||
const existResults = await Promise.all( | ||
fullPaths.map((f) => import_fs_extra.default.pathExists(f)) | ||
); | ||
const foundOneOrMore = existResults.some((b) => b); | ||
if (foundOneOrMore) { | ||
return fullPaths.map((f, i) => existResults[i] ? f : void 0); | ||
} | ||
return []; | ||
} | ||
return []; | ||
} | ||
function isSet(v) { | ||
return v?.constructor?.name === 'Set'; | ||
return v?.constructor?.name === "Set"; | ||
} | ||
async function runNpmInstall(destPath, args = [], spawnOpts, meta, nodeVersion) { | ||
if (meta?.isDev) { | ||
(0, debug_1.default)('Skipping dependency installation because dev mode is enabled'); | ||
return false; | ||
if (meta?.isDev) { | ||
(0, import_debug.default)("Skipping dependency installation because dev mode is enabled"); | ||
return false; | ||
} | ||
(0, import_assert.default)(import_path.default.isAbsolute(destPath)); | ||
try { | ||
await runNpmInstallSema.acquire(); | ||
const { cliType, packageJsonPath, lockfileVersion } = await scanParentDirs( | ||
destPath | ||
); | ||
if (meta && packageJsonPath && args.length === 0) { | ||
if (!isSet(meta.runNpmInstallSet)) { | ||
meta.runNpmInstallSet = /* @__PURE__ */ new Set(); | ||
} | ||
if (isSet(meta.runNpmInstallSet)) { | ||
if (meta.runNpmInstallSet.has(packageJsonPath)) { | ||
return false; | ||
} else { | ||
meta.runNpmInstallSet.add(packageJsonPath); | ||
} | ||
} | ||
} | ||
(0, assert_1.default)(path_1.default.isAbsolute(destPath)); | ||
const installTime = Date.now(); | ||
console.log("Installing dependencies..."); | ||
(0, import_debug.default)(`Installing to ${destPath}`); | ||
const opts = { cwd: destPath, ...spawnOpts }; | ||
const env = (0, import_clone_env.cloneEnv)(opts.env || process.env); | ||
delete env.NODE_ENV; | ||
opts.env = getEnvForPackageManager({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env | ||
}); | ||
let commandArgs; | ||
const isPotentiallyBrokenNpm = cliType === "npm" && (nodeVersion?.major === 16 || opts.env.PATH?.includes("/node16/bin-npm7")) && !args.includes("--legacy-peer-deps") && spawnOpts?.env?.ENABLE_EXPERIMENTAL_COREPACK !== "1"; | ||
if (cliType === "npm") { | ||
opts.prettyCommand = "npm install"; | ||
commandArgs = args.filter((a) => a !== "--prefer-offline").concat(["install", "--no-audit", "--unsafe-perm"]); | ||
if (isPotentiallyBrokenNpm && spawnOpts?.env?.VERCEL_NPM_LEGACY_PEER_DEPS === "1") { | ||
commandArgs.push("--legacy-peer-deps"); | ||
} | ||
} else if (cliType === "pnpm") { | ||
opts.prettyCommand = "pnpm install"; | ||
commandArgs = args.filter((a) => a !== "--prefer-offline").concat(["install", "--unsafe-perm"]); | ||
} else { | ||
opts.prettyCommand = "yarn install"; | ||
commandArgs = ["install", ...args]; | ||
} | ||
if (process.env.NPM_ONLY_PRODUCTION) { | ||
commandArgs.push("--production"); | ||
} | ||
try { | ||
await runNpmInstallSema.acquire(); | ||
const { cliType, packageJsonPath, lockfileVersion } = await scanParentDirs(destPath); | ||
// Only allow `runNpmInstall()` to run once per `package.json` | ||
// when doing a default install (no additional args) | ||
if (meta && packageJsonPath && args.length === 0) { | ||
if (!isSet(meta.runNpmInstallSet)) { | ||
meta.runNpmInstallSet = new Set(); | ||
} | ||
if (isSet(meta.runNpmInstallSet)) { | ||
if (meta.runNpmInstallSet.has(packageJsonPath)) { | ||
return false; | ||
} | ||
else { | ||
meta.runNpmInstallSet.add(packageJsonPath); | ||
} | ||
} | ||
} | ||
const installTime = Date.now(); | ||
console.log('Installing dependencies...'); | ||
(0, debug_1.default)(`Installing to ${destPath}`); | ||
const opts = { cwd: destPath, ...spawnOpts }; | ||
const env = (0, clone_env_1.cloneEnv)(opts.env || process.env); | ||
delete env.NODE_ENV; | ||
opts.env = getEnvForPackageManager({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env, | ||
}); | ||
let commandArgs; | ||
const isPotentiallyBrokenNpm = cliType === 'npm' && | ||
(nodeVersion?.major === 16 || | ||
opts.env.PATH?.includes('/node16/bin-npm7')) && | ||
!args.includes('--legacy-peer-deps') && | ||
spawnOpts?.env?.ENABLE_EXPERIMENTAL_COREPACK !== '1'; | ||
if (cliType === 'npm') { | ||
opts.prettyCommand = 'npm install'; | ||
commandArgs = args | ||
.filter(a => a !== '--prefer-offline') | ||
.concat(['install', '--no-audit', '--unsafe-perm']); | ||
if (isPotentiallyBrokenNpm && | ||
spawnOpts?.env?.VERCEL_NPM_LEGACY_PEER_DEPS === '1') { | ||
// Starting in npm@8.6.0, if you ran `npm install --legacy-peer-deps`, | ||
// and then later ran `npm install`, it would fail. So the only way | ||
// to safely upgrade npm from npm@8.5.0 is to set this flag. The docs | ||
// say this flag is not recommended so its is behind a feature flag | ||
// so we can remove it in node@18, which can introduce breaking changes. | ||
// See https://docs.npmjs.com/cli/v8/using-npm/config#legacy-peer-deps | ||
commandArgs.push('--legacy-peer-deps'); | ||
} | ||
} | ||
else if (cliType === 'pnpm') { | ||
// PNPM's install command is similar to NPM's but without the audit nonsense | ||
// @see options https://pnpm.io/cli/install | ||
opts.prettyCommand = 'pnpm install'; | ||
commandArgs = args | ||
.filter(a => a !== '--prefer-offline') | ||
.concat(['install', '--unsafe-perm']); | ||
} | ||
else { | ||
opts.prettyCommand = 'yarn install'; | ||
commandArgs = ['install', ...args]; | ||
} | ||
if (process.env.NPM_ONLY_PRODUCTION) { | ||
commandArgs.push('--production'); | ||
} | ||
try { | ||
await spawnAsync(cliType, commandArgs, opts); | ||
} | ||
catch (err) { | ||
const potentialErrorPath = path_1.default.join(process.env.HOME || '/', '.npm', 'eresolve-report.txt'); | ||
if (isPotentiallyBrokenNpm && | ||
!commandArgs.includes('--legacy-peer-deps') && | ||
fs_extra_1.default.existsSync(potentialErrorPath)) { | ||
console.warn('Warning: Retrying "Install Command" with `--legacy-peer-deps` which may accept a potentially broken dependency and slow install time.'); | ||
commandArgs.push('--legacy-peer-deps'); | ||
await spawnAsync(cliType, commandArgs, opts); | ||
} | ||
else { | ||
throw err; | ||
} | ||
} | ||
(0, debug_1.default)(`Install complete [${Date.now() - installTime}ms]`); | ||
return true; | ||
await spawnAsync(cliType, commandArgs, opts); | ||
} catch (err) { | ||
const potentialErrorPath = import_path.default.join( | ||
process.env.HOME || "/", | ||
".npm", | ||
"eresolve-report.txt" | ||
); | ||
if (isPotentiallyBrokenNpm && !commandArgs.includes("--legacy-peer-deps") && import_fs_extra.default.existsSync(potentialErrorPath)) { | ||
console.warn( | ||
'Warning: Retrying "Install Command" with `--legacy-peer-deps` which may accept a potentially broken dependency and slow install time.' | ||
); | ||
commandArgs.push("--legacy-peer-deps"); | ||
await spawnAsync(cliType, commandArgs, opts); | ||
} else { | ||
throw err; | ||
} | ||
} | ||
finally { | ||
runNpmInstallSema.release(); | ||
} | ||
(0, import_debug.default)(`Install complete [${Date.now() - installTime}ms]`); | ||
return true; | ||
} finally { | ||
runNpmInstallSema.release(); | ||
} | ||
} | ||
exports.runNpmInstall = runNpmInstall; | ||
function getEnvForPackageManager({ cliType, lockfileVersion, nodeVersion, env, }) { | ||
const newEnv = { ...env }; | ||
const oldPath = env.PATH + ''; | ||
const npm7 = '/node16/bin-npm7'; | ||
const pnpm7 = '/pnpm7/node_modules/.bin'; | ||
const pnpm8 = '/pnpm8/node_modules/.bin'; | ||
const corepackEnabled = env.ENABLE_EXPERIMENTAL_COREPACK === '1'; | ||
if (cliType === 'npm') { | ||
if (typeof lockfileVersion === 'number' && | ||
lockfileVersion >= 2 && | ||
(nodeVersion?.major || 0) < 16 && | ||
!oldPath.includes(npm7) && | ||
!corepackEnabled) { | ||
// Ensure that npm 7 is at the beginning of the `$PATH` | ||
newEnv.PATH = `${npm7}${path_1.default.delimiter}${oldPath}`; | ||
console.log('Detected `package-lock.json` generated by npm 7+...'); | ||
} | ||
function getEnvForPackageManager({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env | ||
}) { | ||
const newEnv = { ...env }; | ||
const oldPath = env.PATH + ""; | ||
const npm7 = "/node16/bin-npm7"; | ||
const pnpm7 = "/pnpm7/node_modules/.bin"; | ||
const pnpm8 = "/pnpm8/node_modules/.bin"; | ||
const corepackEnabled = env.ENABLE_EXPERIMENTAL_COREPACK === "1"; | ||
if (cliType === "npm") { | ||
if (typeof lockfileVersion === "number" && lockfileVersion >= 2 && (nodeVersion?.major || 0) < 16 && !oldPath.includes(npm7) && !corepackEnabled) { | ||
newEnv.PATH = `${npm7}${import_path.default.delimiter}${oldPath}`; | ||
console.log("Detected `package-lock.json` generated by npm 7+..."); | ||
} | ||
else if (cliType === 'pnpm') { | ||
if (typeof lockfileVersion === 'number' && | ||
lockfileVersion === 5.4 && | ||
!oldPath.includes(pnpm7) && | ||
!corepackEnabled) { | ||
// Ensure that pnpm 7 is at the beginning of the `$PATH` | ||
newEnv.PATH = `${pnpm7}${path_1.default.delimiter}${oldPath}`; | ||
console.log(`Detected \`pnpm-lock.yaml\` version ${lockfileVersion} generated by pnpm 7...`); | ||
} | ||
else if (typeof lockfileVersion === 'number' && | ||
lockfileVersion >= 6 && | ||
!oldPath.includes(pnpm8) && | ||
!corepackEnabled) { | ||
// Ensure that pnpm 8 is at the beginning of the `$PATH` | ||
newEnv.PATH = `${pnpm8}${path_1.default.delimiter}${oldPath}`; | ||
console.log(`Detected \`pnpm-lock.yaml\` version ${lockfileVersion} generated by pnpm 8...`); | ||
} | ||
} else if (cliType === "pnpm") { | ||
if (typeof lockfileVersion === "number" && lockfileVersion === 5.4 && !oldPath.includes(pnpm7) && !corepackEnabled) { | ||
newEnv.PATH = `${pnpm7}${import_path.default.delimiter}${oldPath}`; | ||
console.log( | ||
`Detected \`pnpm-lock.yaml\` version ${lockfileVersion} generated by pnpm 7...` | ||
); | ||
} else if (typeof lockfileVersion === "number" && lockfileVersion >= 6 && !oldPath.includes(pnpm8) && !corepackEnabled) { | ||
newEnv.PATH = `${pnpm8}${import_path.default.delimiter}${oldPath}`; | ||
console.log( | ||
`Detected \`pnpm-lock.yaml\` version ${lockfileVersion} generated by pnpm 8...` | ||
); | ||
} | ||
else { | ||
// Yarn v2 PnP mode may be activated, so force "node-modules" linker style | ||
if (!env.YARN_NODE_LINKER) { | ||
newEnv.YARN_NODE_LINKER = 'node-modules'; | ||
} | ||
} else { | ||
if (!env.YARN_NODE_LINKER) { | ||
newEnv.YARN_NODE_LINKER = "node-modules"; | ||
} | ||
return newEnv; | ||
} | ||
return newEnv; | ||
} | ||
exports.getEnvForPackageManager = getEnvForPackageManager; | ||
async function runCustomInstallCommand({ destPath, installCommand, nodeVersion, spawnOpts, }) { | ||
console.log(`Running "install" command: \`${installCommand}\`...`); | ||
const { cliType, lockfileVersion } = await scanParentDirs(destPath); | ||
const env = getEnvForPackageManager({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env: spawnOpts?.env || {}, | ||
}); | ||
(0, debug_1.default)(`Running with $PATH:`, env?.PATH || ''); | ||
await execCommand(installCommand, { | ||
...spawnOpts, | ||
env, | ||
cwd: destPath, | ||
}); | ||
async function runCustomInstallCommand({ | ||
destPath, | ||
installCommand, | ||
nodeVersion, | ||
spawnOpts | ||
}) { | ||
console.log(`Running "install" command: \`${installCommand}\`...`); | ||
const { cliType, lockfileVersion } = await scanParentDirs(destPath); | ||
const env = getEnvForPackageManager({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion, | ||
env: spawnOpts?.env || {} | ||
}); | ||
(0, import_debug.default)(`Running with $PATH:`, env?.PATH || ""); | ||
await execCommand(installCommand, { | ||
...spawnOpts, | ||
env, | ||
cwd: destPath | ||
}); | ||
} | ||
exports.runCustomInstallCommand = runCustomInstallCommand; | ||
async function runPackageJsonScript(destPath, scriptNames, spawnOpts) { | ||
(0, assert_1.default)(path_1.default.isAbsolute(destPath)); | ||
const { packageJson, cliType, lockfileVersion } = await scanParentDirs(destPath, true); | ||
const scriptName = getScriptName(packageJson, typeof scriptNames === 'string' ? [scriptNames] : scriptNames); | ||
if (!scriptName) | ||
return false; | ||
(0, debug_1.default)('Running user script...'); | ||
const runScriptTime = Date.now(); | ||
const opts = { | ||
cwd: destPath, | ||
...spawnOpts, | ||
env: getEnvForPackageManager({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion: undefined, | ||
env: (0, clone_env_1.cloneEnv)(process.env, spawnOpts?.env), | ||
}), | ||
}; | ||
if (cliType === 'npm') { | ||
opts.prettyCommand = `npm run ${scriptName}`; | ||
} | ||
else if (cliType === 'pnpm') { | ||
opts.prettyCommand = `pnpm run ${scriptName}`; | ||
} | ||
else { | ||
opts.prettyCommand = `yarn run ${scriptName}`; | ||
} | ||
console.log(`Running "${opts.prettyCommand}"`); | ||
await spawnAsync(cliType, ['run', scriptName], opts); | ||
(0, debug_1.default)(`Script complete [${Date.now() - runScriptTime}ms]`); | ||
return true; | ||
(0, import_assert.default)(import_path.default.isAbsolute(destPath)); | ||
const { packageJson, cliType, lockfileVersion } = await scanParentDirs( | ||
destPath, | ||
true | ||
); | ||
const scriptName = getScriptName( | ||
packageJson, | ||
typeof scriptNames === "string" ? [scriptNames] : scriptNames | ||
); | ||
if (!scriptName) | ||
return false; | ||
(0, import_debug.default)("Running user script..."); | ||
const runScriptTime = Date.now(); | ||
const opts = { | ||
cwd: destPath, | ||
...spawnOpts, | ||
env: getEnvForPackageManager({ | ||
cliType, | ||
lockfileVersion, | ||
nodeVersion: void 0, | ||
env: (0, import_clone_env.cloneEnv)(process.env, spawnOpts?.env) | ||
}) | ||
}; | ||
if (cliType === "npm") { | ||
opts.prettyCommand = `npm run ${scriptName}`; | ||
} else if (cliType === "pnpm") { | ||
opts.prettyCommand = `pnpm run ${scriptName}`; | ||
} else { | ||
opts.prettyCommand = `yarn run ${scriptName}`; | ||
} | ||
console.log(`Running "${opts.prettyCommand}"`); | ||
await spawnAsync(cliType, ["run", scriptName], opts); | ||
(0, import_debug.default)(`Script complete [${Date.now() - runScriptTime}ms]`); | ||
return true; | ||
} | ||
exports.runPackageJsonScript = runPackageJsonScript; | ||
async function runBundleInstall(destPath, args = [], spawnOpts, meta) { | ||
if (meta && meta.isDev) { | ||
(0, debug_1.default)('Skipping dependency installation because dev mode is enabled'); | ||
return; | ||
} | ||
(0, assert_1.default)(path_1.default.isAbsolute(destPath)); | ||
const opts = { ...spawnOpts, cwd: destPath, prettyCommand: 'bundle install' }; | ||
await spawnAsync('bundle', args.concat(['install']), opts); | ||
if (meta && meta.isDev) { | ||
(0, import_debug.default)("Skipping dependency installation because dev mode is enabled"); | ||
return; | ||
} | ||
(0, import_assert.default)(import_path.default.isAbsolute(destPath)); | ||
const opts = { ...spawnOpts, cwd: destPath, prettyCommand: "bundle install" }; | ||
await spawnAsync("bundle", args.concat(["install"]), opts); | ||
} | ||
exports.runBundleInstall = runBundleInstall; | ||
async function runPipInstall(destPath, args = [], spawnOpts, meta) { | ||
if (meta && meta.isDev) { | ||
(0, debug_1.default)('Skipping dependency installation because dev mode is enabled'); | ||
return; | ||
} | ||
(0, assert_1.default)(path_1.default.isAbsolute(destPath)); | ||
const opts = { ...spawnOpts, cwd: destPath, prettyCommand: 'pip3 install' }; | ||
await spawnAsync('pip3', ['install', '--disable-pip-version-check', ...args], opts); | ||
if (meta && meta.isDev) { | ||
(0, import_debug.default)("Skipping dependency installation because dev mode is enabled"); | ||
return; | ||
} | ||
(0, import_assert.default)(import_path.default.isAbsolute(destPath)); | ||
const opts = { ...spawnOpts, cwd: destPath, prettyCommand: "pip3 install" }; | ||
await spawnAsync( | ||
"pip3", | ||
["install", "--disable-pip-version-check", ...args], | ||
opts | ||
); | ||
} | ||
exports.runPipInstall = runPipInstall; | ||
function getScriptName(pkg, possibleNames) { | ||
if (pkg?.scripts) { | ||
for (const name of possibleNames) { | ||
if (name in pkg.scripts) { | ||
return name; | ||
} | ||
} | ||
if (pkg?.scripts) { | ||
for (const name of possibleNames) { | ||
if (name in pkg.scripts) { | ||
return name; | ||
} | ||
} | ||
return null; | ||
} | ||
return null; | ||
} | ||
exports.getScriptName = getScriptName; | ||
/** | ||
* @deprecate installDependencies() is deprecated. | ||
* Please use runNpmInstall() instead. | ||
*/ | ||
exports.installDependencies = (0, util_1.deprecate)(runNpmInstall, 'installDependencies() is deprecated. Please use runNpmInstall() instead.'); | ||
const installDependencies = (0, import_util.deprecate)( | ||
runNpmInstall, | ||
"installDependencies() is deprecated. Please use runNpmInstall() instead." | ||
); | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
execCommand, | ||
getEnvForPackageManager, | ||
getNodeBinPath, | ||
getNodeBinPaths, | ||
getNodeVersion, | ||
getScriptName, | ||
getSpawnOptions, | ||
installDependencies, | ||
runBundleInstall, | ||
runCustomInstallCommand, | ||
runNpmInstall, | ||
runPackageJsonScript, | ||
runPipInstall, | ||
runShellScript, | ||
scanParentDirs, | ||
spawnAsync, | ||
spawnCommand, | ||
traverseUpDirectories, | ||
walkParentDirs | ||
}); |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const end_of_stream_1 = __importDefault(require("end-of-stream")); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var stream_to_buffer_exports = {}; | ||
__export(stream_to_buffer_exports, { | ||
default: () => streamToBuffer | ||
}); | ||
module.exports = __toCommonJS(stream_to_buffer_exports); | ||
var import_end_of_stream = __toESM(require("end-of-stream")); | ||
function streamToBuffer(stream) { | ||
return new Promise((resolve, reject) => { | ||
const buffers = []; | ||
stream.on('data', buffers.push.bind(buffers)); | ||
(0, end_of_stream_1.default)(stream, err => { | ||
if (err) { | ||
reject(err); | ||
return; | ||
} | ||
switch (buffers.length) { | ||
case 0: | ||
resolve(Buffer.allocUnsafe(0)); | ||
break; | ||
case 1: | ||
resolve(buffers[0]); | ||
break; | ||
default: | ||
resolve(Buffer.concat(buffers)); | ||
} | ||
}); | ||
return new Promise((resolve, reject) => { | ||
const buffers = []; | ||
stream.on("data", buffers.push.bind(buffers)); | ||
(0, import_end_of_stream.default)(stream, (err) => { | ||
if (err) { | ||
reject(err); | ||
return; | ||
} | ||
switch (buffers.length) { | ||
case 0: | ||
resolve(Buffer.allocUnsafe(0)); | ||
break; | ||
case 1: | ||
resolve(buffers[0]); | ||
break; | ||
default: | ||
resolve(Buffer.concat(buffers)); | ||
} | ||
}); | ||
}); | ||
} | ||
exports.default = streamToBuffer; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const path_1 = __importDefault(require("path")); | ||
const fs_extra_1 = __importDefault(require("fs-extra")); | ||
const ignore_1 = __importDefault(require("ignore")); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var get_ignore_filter_exports = {}; | ||
__export(get_ignore_filter_exports, { | ||
default: () => get_ignore_filter_default | ||
}); | ||
module.exports = __toCommonJS(get_ignore_filter_exports); | ||
var import_path = __toESM(require("path")); | ||
var import_fs_extra = __toESM(require("fs-extra")); | ||
var import_ignore = __toESM(require("ignore")); | ||
function isCodedError(error) { | ||
return (error !== null && | ||
error !== undefined && | ||
error.code !== undefined); | ||
return error !== null && error !== void 0 && error.code !== void 0; | ||
} | ||
function clearRelative(s) { | ||
return s.replace(/(\n|^)\.\//g, '$1'); | ||
return s.replace(/(\n|^)\.\//g, "$1"); | ||
} | ||
async function default_1(downloadPath, rootDirectory) { | ||
const readFile = async (p) => { | ||
try { | ||
return await fs_extra_1.default.readFile(p, 'utf8'); | ||
} | ||
catch (error) { | ||
if (error.code === 'ENOENT' || | ||
(error instanceof Error && error.message.includes('ENOENT'))) { | ||
return undefined; | ||
} | ||
throw error; | ||
} | ||
}; | ||
const vercelIgnorePath = path_1.default.join(downloadPath, rootDirectory || '', '.vercelignore'); | ||
const nowIgnorePath = path_1.default.join(downloadPath, rootDirectory || '', '.nowignore'); | ||
const ignoreContents = []; | ||
async function get_ignore_filter_default(downloadPath, rootDirectory) { | ||
const readFile = async (p) => { | ||
try { | ||
ignoreContents.push(...(await Promise.all([readFile(vercelIgnorePath), readFile(nowIgnorePath)])).filter(Boolean)); | ||
return await import_fs_extra.default.readFile(p, "utf8"); | ||
} catch (error) { | ||
if (error.code === "ENOENT" || error instanceof Error && error.message.includes("ENOENT")) { | ||
return void 0; | ||
} | ||
throw error; | ||
} | ||
catch (error) { | ||
if (isCodedError(error) && error.code === 'ENOTDIR') { | ||
console.log(`Warning: Cannot read ignore file from ${vercelIgnorePath}`); | ||
} | ||
else { | ||
throw error; | ||
} | ||
}; | ||
const vercelIgnorePath = import_path.default.join( | ||
downloadPath, | ||
rootDirectory || "", | ||
".vercelignore" | ||
); | ||
const nowIgnorePath = import_path.default.join( | ||
downloadPath, | ||
rootDirectory || "", | ||
".nowignore" | ||
); | ||
const ignoreContents = []; | ||
try { | ||
ignoreContents.push( | ||
...(await Promise.all([readFile(vercelIgnorePath), readFile(nowIgnorePath)])).filter(Boolean) | ||
); | ||
} catch (error) { | ||
if (isCodedError(error) && error.code === "ENOTDIR") { | ||
console.log(`Warning: Cannot read ignore file from ${vercelIgnorePath}`); | ||
} else { | ||
throw error; | ||
} | ||
if (ignoreContents.length === 2) { | ||
throw new Error('Cannot use both a `.vercelignore` and `.nowignore` file. Please delete the `.nowignore` file.'); | ||
} | ||
if (ignoreContents.length === 0) { | ||
return () => false; | ||
} | ||
const ignoreFilter = (0, ignore_1.default)().add(clearRelative(ignoreContents[0])); | ||
return function (p) { | ||
// we should not ignore now.json and vercel.json if it asked to. | ||
// we depend on these files for building the app with sourceless | ||
if (p === 'now.json' || p === 'vercel.json') | ||
return false; | ||
return ignoreFilter.test(p).ignored; | ||
}; | ||
} | ||
if (ignoreContents.length === 2) { | ||
throw new Error( | ||
"Cannot use both a `.vercelignore` and `.nowignore` file. Please delete the `.nowignore` file." | ||
); | ||
} | ||
if (ignoreContents.length === 0) { | ||
return () => false; | ||
} | ||
const ignoreFilter = (0, import_ignore.default)().add(clearRelative(ignoreContents[0])); | ||
return function(p) { | ||
if (p === "now.json" || p === "vercel.json") | ||
return false; | ||
return ignoreFilter.test(p).ignored; | ||
}; | ||
} | ||
exports.default = default_1; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getPlatformEnv = void 0; | ||
const errors_1 = require("./errors"); | ||
/** | ||
* Helper function to support both `VERCEL_` and legacy `NOW_` env vars. | ||
* Throws an error if *both* env vars are defined. | ||
*/ | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var get_platform_env_exports = {}; | ||
__export(get_platform_env_exports, { | ||
getPlatformEnv: () => getPlatformEnv | ||
}); | ||
module.exports = __toCommonJS(get_platform_env_exports); | ||
var import_errors = require("./errors"); | ||
const getPlatformEnv = (name) => { | ||
const vName = `VERCEL_${name}`; | ||
const nName = `NOW_${name}`; | ||
const v = process.env[vName]; | ||
const n = process.env[nName]; | ||
if (typeof v === 'string') { | ||
if (typeof n === 'string') { | ||
throw new errors_1.NowBuildError({ | ||
code: 'CONFLICTING_ENV_VAR_NAMES', | ||
message: `Both "${vName}" and "${nName}" env vars are defined. Please only define the "${vName}" env var.`, | ||
link: 'https://vercel.link/combining-old-and-new-config', | ||
}); | ||
} | ||
return v; | ||
const vName = `VERCEL_${name}`; | ||
const nName = `NOW_${name}`; | ||
const v = process.env[vName]; | ||
const n = process.env[nName]; | ||
if (typeof v === "string") { | ||
if (typeof n === "string") { | ||
throw new import_errors.NowBuildError({ | ||
code: "CONFLICTING_ENV_VAR_NAMES", | ||
message: `Both "${vName}" and "${nName}" env vars are defined. Please only define the "${vName}" env var.`, | ||
link: "https://vercel.link/combining-old-and-new-config" | ||
}); | ||
} | ||
return n; | ||
return v; | ||
} | ||
return n; | ||
}; | ||
exports.getPlatformEnv = getPlatformEnv; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
getPlatformEnv | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getPrefixedEnvVars = void 0; | ||
/** | ||
* Get the framework-specific prefixed System Environment Variables. | ||
* See https://vercel.com/docs/concepts/projects/environment-variables#system-environment-variables | ||
* @param envPrefix - Prefix, typically from `@vercel/frameworks` | ||
* @param envs - Environment Variables, typically from `process.env` | ||
*/ | ||
function getPrefixedEnvVars({ envPrefix, envs, }) { | ||
const vercelSystemEnvPrefix = 'VERCEL_'; | ||
const allowed = [ | ||
'VERCEL_URL', | ||
'VERCEL_ENV', | ||
'VERCEL_REGION', | ||
'VERCEL_BRANCH_URL', | ||
]; | ||
const newEnvs = {}; | ||
if (envPrefix && envs.VERCEL_URL) { | ||
Object.keys(envs) | ||
.filter(key => allowed.includes(key) || key.startsWith('VERCEL_GIT_')) | ||
.forEach(key => { | ||
const newKey = `${envPrefix}${key}`; | ||
if (!(newKey in envs)) { | ||
newEnvs[newKey] = envs[key]; | ||
} | ||
}); | ||
// Tell turbo to exclude all Vercel System Env Vars | ||
// See https://github.com/vercel/turborepo/pull/1622 | ||
newEnvs.TURBO_CI_VENDOR_ENV_KEY = `${envPrefix}${vercelSystemEnvPrefix}`; | ||
} | ||
return newEnvs; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var get_prefixed_env_vars_exports = {}; | ||
__export(get_prefixed_env_vars_exports, { | ||
getPrefixedEnvVars: () => getPrefixedEnvVars | ||
}); | ||
module.exports = __toCommonJS(get_prefixed_env_vars_exports); | ||
function getPrefixedEnvVars({ | ||
envPrefix, | ||
envs | ||
}) { | ||
const vercelSystemEnvPrefix = "VERCEL_"; | ||
const allowed = [ | ||
"VERCEL_URL", | ||
"VERCEL_ENV", | ||
"VERCEL_REGION", | ||
"VERCEL_BRANCH_URL" | ||
]; | ||
const newEnvs = {}; | ||
if (envPrefix && envs.VERCEL_URL) { | ||
Object.keys(envs).filter((key) => allowed.includes(key) || key.startsWith("VERCEL_GIT_")).forEach((key) => { | ||
const newKey = `${envPrefix}${key}`; | ||
if (!(newKey in envs)) { | ||
newEnvs[newKey] = envs[key]; | ||
} | ||
}); | ||
newEnvs.TURBO_CI_VENDOR_ENV_KEY = `${envPrefix}${vercelSystemEnvPrefix}`; | ||
} | ||
return newEnvs; | ||
} | ||
exports.getPrefixedEnvVars = getPrefixedEnvVars; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
getPrefixedEnvVars | ||
}); |
"use strict"; | ||
// Source: https://github.com/pnpm/pnpm/blob/b38d711f3892a473e20e69dd32ca7e1b439efaec/fs/hard-link-dir/src/index.ts#L4 | ||
// LICENSE (MIT): https://github.com/pnpm/pnpm/blob/b38d711f3892a473e20e69dd32ca7e1b439efaec/LICENSE | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.hardLinkDir = void 0; | ||
const path_1 = __importDefault(require("path")); | ||
const fs_1 = require("fs"); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var hard_link_dir_exports = {}; | ||
__export(hard_link_dir_exports, { | ||
hardLinkDir: () => hardLinkDir | ||
}); | ||
module.exports = __toCommonJS(hard_link_dir_exports); | ||
var import_path = __toESM(require("path")); | ||
var import_fs = require("fs"); | ||
async function hardLinkDir(src, destDirs) { | ||
if (destDirs.length === 0) | ||
if (destDirs.length === 0) | ||
return; | ||
destDirs = destDirs.filter((destDir) => import_path.default.relative(destDir, src) !== ""); | ||
const files = await import_fs.promises.readdir(src); | ||
await Promise.all( | ||
files.map(async (file) => { | ||
if (file === "node_modules") | ||
return; | ||
// Don't try to hard link the source directory to itself | ||
destDirs = destDirs.filter(destDir => path_1.default.relative(destDir, src) !== ''); | ||
const files = await fs_1.promises.readdir(src); | ||
await Promise.all(files.map(async (file) => { | ||
if (file === 'node_modules') | ||
return; | ||
const srcFile = path_1.default.join(src, file); | ||
if ((await fs_1.promises.lstat(srcFile)).isDirectory()) { | ||
const destSubdirs = await Promise.all(destDirs.map(async (destDir) => { | ||
const destSubdir = path_1.default.join(destDir, file); | ||
try { | ||
await fs_1.promises.mkdir(destSubdir, { recursive: true }); | ||
} | ||
catch (err) { | ||
// eslint-disable-line | ||
if (err.code !== 'EEXIST') | ||
throw err; | ||
} | ||
return destSubdir; | ||
})); | ||
await hardLinkDir(srcFile, destSubdirs); | ||
return; | ||
} | ||
await Promise.all(destDirs.map(async (destDir) => { | ||
const destFile = path_1.default.join(destDir, file); | ||
const srcFile = import_path.default.join(src, file); | ||
if ((await import_fs.promises.lstat(srcFile)).isDirectory()) { | ||
const destSubdirs = await Promise.all( | ||
destDirs.map(async (destDir) => { | ||
const destSubdir = import_path.default.join(destDir, file); | ||
try { | ||
await linkOrCopyFile(srcFile, destFile); | ||
} | ||
catch (err) { | ||
// eslint-disable-line | ||
if (err.code === 'ENOENT') { | ||
// Ignore broken symlinks | ||
return; | ||
} | ||
await import_fs.promises.mkdir(destSubdir, { recursive: true }); | ||
} catch (err) { | ||
if (err.code !== "EEXIST") | ||
throw err; | ||
} | ||
})); | ||
})); | ||
return destSubdir; | ||
}) | ||
); | ||
await hardLinkDir(srcFile, destSubdirs); | ||
return; | ||
} | ||
await Promise.all( | ||
destDirs.map(async (destDir) => { | ||
const destFile = import_path.default.join(destDir, file); | ||
try { | ||
await linkOrCopyFile(srcFile, destFile); | ||
} catch (err) { | ||
if (err.code === "ENOENT") { | ||
return; | ||
} | ||
throw err; | ||
} | ||
}) | ||
); | ||
}) | ||
); | ||
} | ||
exports.hardLinkDir = hardLinkDir; | ||
async function linkOrCopyFile(srcFile, destFile) { | ||
try { | ||
await linkOrCopy(srcFile, destFile); | ||
try { | ||
await linkOrCopy(srcFile, destFile); | ||
} catch (err) { | ||
if (err.code === "ENOENT") { | ||
await import_fs.promises.mkdir(import_path.default.dirname(destFile), { recursive: true }); | ||
await linkOrCopy(srcFile, destFile); | ||
return; | ||
} | ||
catch (err) { | ||
// eslint-disable-line | ||
if (err.code === 'ENOENT') { | ||
await fs_1.promises.mkdir(path_1.default.dirname(destFile), { recursive: true }); | ||
await linkOrCopy(srcFile, destFile); | ||
return; | ||
} | ||
if (err.code !== 'EEXIST') { | ||
throw err; | ||
} | ||
if (err.code !== "EEXIST") { | ||
throw err; | ||
} | ||
} | ||
} | ||
/* | ||
* This function could be optimized because we don't really need to try linking again | ||
* if linking failed once. | ||
*/ | ||
async function linkOrCopy(srcFile, destFile) { | ||
try { | ||
await fs_1.promises.link(srcFile, destFile); | ||
} | ||
catch (err) { | ||
// eslint-disable-line | ||
if (err.code !== 'EXDEV') | ||
throw err; | ||
await fs_1.promises.copyFile(srcFile, destFile); | ||
} | ||
try { | ||
await import_fs.promises.link(srcFile, destFile); | ||
} catch (err) { | ||
if (err.code !== "EXDEV") | ||
throw err; | ||
await import_fs.promises.copyFile(srcFile, destFile); | ||
} | ||
} | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
hardLinkDir | ||
}); |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getLambdaOptionsFromFunction = exports.createZip = exports.createLambda = exports.Lambda = void 0; | ||
const assert_1 = __importDefault(require("assert")); | ||
const async_sema_1 = __importDefault(require("async-sema")); | ||
const yazl_1 = require("yazl"); | ||
const minimatch_1 = __importDefault(require("minimatch")); | ||
const fs_extra_1 = require("fs-extra"); | ||
const download_1 = require("./fs/download"); | ||
const stream_to_buffer_1 = __importDefault(require("./fs/stream-to-buffer")); | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var lambda_exports = {}; | ||
__export(lambda_exports, { | ||
Lambda: () => Lambda, | ||
createLambda: () => createLambda, | ||
createZip: () => createZip, | ||
getLambdaOptionsFromFunction: () => getLambdaOptionsFromFunction | ||
}); | ||
module.exports = __toCommonJS(lambda_exports); | ||
var import_assert = __toESM(require("assert")); | ||
var import_async_sema = __toESM(require("async-sema")); | ||
var import_yazl = require("yazl"); | ||
var import_minimatch = __toESM(require("minimatch")); | ||
var import_fs_extra = require("fs-extra"); | ||
var import_download = require("./fs/download"); | ||
var import_stream_to_buffer = __toESM(require("./fs/stream-to-buffer")); | ||
class Lambda { | ||
constructor(opts) { | ||
const { handler, runtime, maxDuration, architecture, memory, environment = {}, allowQuery, regions, supportsMultiPayloads, supportsWrapper, supportsResponseStreaming, experimentalResponseStreaming, operationType, framework, } = opts; | ||
if ('files' in opts) { | ||
(0, assert_1.default)(typeof opts.files === 'object', '"files" must be an object'); | ||
} | ||
if ('zipBuffer' in opts) { | ||
(0, assert_1.default)(Buffer.isBuffer(opts.zipBuffer), '"zipBuffer" must be a Buffer'); | ||
} | ||
(0, assert_1.default)(typeof handler === 'string', '"handler" is not a string'); | ||
(0, assert_1.default)(typeof runtime === 'string', '"runtime" is not a string'); | ||
(0, assert_1.default)(typeof environment === 'object', '"environment" is not an object'); | ||
if (architecture !== undefined) { | ||
(0, assert_1.default)(architecture === 'x86_64' || architecture === 'arm64', '"architecture" must be either "x86_64" or "arm64"'); | ||
} | ||
if (memory !== undefined) { | ||
(0, assert_1.default)(typeof memory === 'number', '"memory" is not a number'); | ||
} | ||
if (maxDuration !== undefined) { | ||
(0, assert_1.default)(typeof maxDuration === 'number', '"maxDuration" is not a number'); | ||
} | ||
if (allowQuery !== undefined) { | ||
(0, assert_1.default)(Array.isArray(allowQuery), '"allowQuery" is not an Array'); | ||
(0, assert_1.default)(allowQuery.every(q => typeof q === 'string'), '"allowQuery" is not a string Array'); | ||
} | ||
if (supportsMultiPayloads !== undefined) { | ||
(0, assert_1.default)(typeof supportsMultiPayloads === 'boolean', '"supportsMultiPayloads" is not a boolean'); | ||
} | ||
if (supportsWrapper !== undefined) { | ||
(0, assert_1.default)(typeof supportsWrapper === 'boolean', '"supportsWrapper" is not a boolean'); | ||
} | ||
if (regions !== undefined) { | ||
(0, assert_1.default)(Array.isArray(regions), '"regions" is not an Array'); | ||
(0, assert_1.default)(regions.every(r => typeof r === 'string'), '"regions" is not a string Array'); | ||
} | ||
if (framework !== undefined) { | ||
(0, assert_1.default)(typeof framework === 'object', '"framework" is not an object'); | ||
(0, assert_1.default)(typeof framework.slug === 'string', '"framework.slug" is not a string'); | ||
if (framework.version !== undefined) { | ||
(0, assert_1.default)(typeof framework.version === 'string', '"framework.version" is not a string'); | ||
} | ||
} | ||
this.type = 'Lambda'; | ||
this.operationType = operationType; | ||
this.files = 'files' in opts ? opts.files : undefined; | ||
this.handler = handler; | ||
this.runtime = runtime; | ||
this.architecture = architecture; | ||
this.memory = memory; | ||
this.maxDuration = maxDuration; | ||
this.environment = environment; | ||
this.allowQuery = allowQuery; | ||
this.regions = regions; | ||
this.zipBuffer = 'zipBuffer' in opts ? opts.zipBuffer : undefined; | ||
this.supportsMultiPayloads = supportsMultiPayloads; | ||
this.supportsWrapper = supportsWrapper; | ||
this.supportsResponseStreaming = | ||
supportsResponseStreaming ?? experimentalResponseStreaming; | ||
this.framework = framework; | ||
constructor(opts) { | ||
const { | ||
handler, | ||
runtime, | ||
maxDuration, | ||
architecture, | ||
memory, | ||
environment = {}, | ||
allowQuery, | ||
regions, | ||
supportsMultiPayloads, | ||
supportsWrapper, | ||
supportsResponseStreaming, | ||
experimentalResponseStreaming, | ||
operationType, | ||
framework | ||
} = opts; | ||
if ("files" in opts) { | ||
(0, import_assert.default)(typeof opts.files === "object", '"files" must be an object'); | ||
} | ||
async createZip() { | ||
let { zipBuffer } = this; | ||
if (!zipBuffer) { | ||
if (!this.files) { | ||
throw new Error('`files` is not defined'); | ||
} | ||
await sema.acquire(); | ||
try { | ||
zipBuffer = await createZip(this.files); | ||
} | ||
finally { | ||
sema.release(); | ||
} | ||
} | ||
return zipBuffer; | ||
if ("zipBuffer" in opts) { | ||
(0, import_assert.default)(Buffer.isBuffer(opts.zipBuffer), '"zipBuffer" must be a Buffer'); | ||
} | ||
/** | ||
* @deprecated Use the `supportsResponseStreaming` property instead. | ||
*/ | ||
get experimentalResponseStreaming() { | ||
return this.supportsResponseStreaming; | ||
(0, import_assert.default)(typeof handler === "string", '"handler" is not a string'); | ||
(0, import_assert.default)(typeof runtime === "string", '"runtime" is not a string'); | ||
(0, import_assert.default)(typeof environment === "object", '"environment" is not an object'); | ||
if (architecture !== void 0) { | ||
(0, import_assert.default)( | ||
architecture === "x86_64" || architecture === "arm64", | ||
'"architecture" must be either "x86_64" or "arm64"' | ||
); | ||
} | ||
set experimentalResponseStreaming(v) { | ||
this.supportsResponseStreaming = v; | ||
if (memory !== void 0) { | ||
(0, import_assert.default)(typeof memory === "number", '"memory" is not a number'); | ||
} | ||
if (maxDuration !== void 0) { | ||
(0, import_assert.default)(typeof maxDuration === "number", '"maxDuration" is not a number'); | ||
} | ||
if (allowQuery !== void 0) { | ||
(0, import_assert.default)(Array.isArray(allowQuery), '"allowQuery" is not an Array'); | ||
(0, import_assert.default)( | ||
allowQuery.every((q) => typeof q === "string"), | ||
'"allowQuery" is not a string Array' | ||
); | ||
} | ||
if (supportsMultiPayloads !== void 0) { | ||
(0, import_assert.default)( | ||
typeof supportsMultiPayloads === "boolean", | ||
'"supportsMultiPayloads" is not a boolean' | ||
); | ||
} | ||
if (supportsWrapper !== void 0) { | ||
(0, import_assert.default)( | ||
typeof supportsWrapper === "boolean", | ||
'"supportsWrapper" is not a boolean' | ||
); | ||
} | ||
if (regions !== void 0) { | ||
(0, import_assert.default)(Array.isArray(regions), '"regions" is not an Array'); | ||
(0, import_assert.default)( | ||
regions.every((r) => typeof r === "string"), | ||
'"regions" is not a string Array' | ||
); | ||
} | ||
if (framework !== void 0) { | ||
(0, import_assert.default)(typeof framework === "object", '"framework" is not an object'); | ||
(0, import_assert.default)( | ||
typeof framework.slug === "string", | ||
'"framework.slug" is not a string' | ||
); | ||
if (framework.version !== void 0) { | ||
(0, import_assert.default)( | ||
typeof framework.version === "string", | ||
'"framework.version" is not a string' | ||
); | ||
} | ||
} | ||
this.type = "Lambda"; | ||
this.operationType = operationType; | ||
this.files = "files" in opts ? opts.files : void 0; | ||
this.handler = handler; | ||
this.runtime = runtime; | ||
this.architecture = architecture; | ||
this.memory = memory; | ||
this.maxDuration = maxDuration; | ||
this.environment = environment; | ||
this.allowQuery = allowQuery; | ||
this.regions = regions; | ||
this.zipBuffer = "zipBuffer" in opts ? opts.zipBuffer : void 0; | ||
this.supportsMultiPayloads = supportsMultiPayloads; | ||
this.supportsWrapper = supportsWrapper; | ||
this.supportsResponseStreaming = supportsResponseStreaming ?? experimentalResponseStreaming; | ||
this.framework = framework; | ||
} | ||
async createZip() { | ||
let { zipBuffer } = this; | ||
if (!zipBuffer) { | ||
if (!this.files) { | ||
throw new Error("`files` is not defined"); | ||
} | ||
await sema.acquire(); | ||
try { | ||
zipBuffer = await createZip(this.files); | ||
} finally { | ||
sema.release(); | ||
} | ||
} | ||
return zipBuffer; | ||
} | ||
/** | ||
* @deprecated Use the `supportsResponseStreaming` property instead. | ||
*/ | ||
get experimentalResponseStreaming() { | ||
return this.supportsResponseStreaming; | ||
} | ||
set experimentalResponseStreaming(v) { | ||
this.supportsResponseStreaming = v; | ||
} | ||
} | ||
exports.Lambda = Lambda; | ||
const sema = new async_sema_1.default(10); | ||
const mtime = new Date(1540000000000); | ||
/** | ||
* @deprecated Use `new Lambda()` instead. | ||
*/ | ||
const sema = new import_async_sema.default(10); | ||
const mtime = /* @__PURE__ */ new Date(154e10); | ||
async function createLambda(opts) { | ||
const lambda = new Lambda(opts); | ||
// backwards compat | ||
lambda.zipBuffer = await lambda.createZip(); | ||
return lambda; | ||
const lambda = new Lambda(opts); | ||
lambda.zipBuffer = await lambda.createZip(); | ||
return lambda; | ||
} | ||
exports.createLambda = createLambda; | ||
async function createZip(files) { | ||
const names = Object.keys(files).sort(); | ||
const symlinkTargets = new Map(); | ||
const names = Object.keys(files).sort(); | ||
const symlinkTargets = /* @__PURE__ */ new Map(); | ||
for (const name of names) { | ||
const file = files[name]; | ||
if (file.mode && (0, import_download.isSymbolicLink)(file.mode) && file.type === "FileFsRef") { | ||
const symlinkTarget = await (0, import_fs_extra.readlink)(file.fsPath); | ||
symlinkTargets.set(name, symlinkTarget); | ||
} | ||
} | ||
const zipFile = new import_yazl.ZipFile(); | ||
const zipBuffer = await new Promise((resolve, reject) => { | ||
for (const name of names) { | ||
const file = files[name]; | ||
if (file.mode && (0, download_1.isSymbolicLink)(file.mode) && file.type === 'FileFsRef') { | ||
const symlinkTarget = await (0, fs_extra_1.readlink)(file.fsPath); | ||
symlinkTargets.set(name, symlinkTarget); | ||
} | ||
const file = files[name]; | ||
const opts = { mode: file.mode, mtime }; | ||
const symlinkTarget = symlinkTargets.get(name); | ||
if (typeof symlinkTarget === "string") { | ||
zipFile.addBuffer(Buffer.from(symlinkTarget, "utf8"), name, opts); | ||
} else if (file.mode && (0, import_download.isDirectory)(file.mode)) { | ||
zipFile.addEmptyDirectory(name, opts); | ||
} else { | ||
const stream = file.toStream(); | ||
stream.on("error", reject); | ||
zipFile.addReadStream(stream, name, opts); | ||
} | ||
} | ||
const zipFile = new yazl_1.ZipFile(); | ||
const zipBuffer = await new Promise((resolve, reject) => { | ||
for (const name of names) { | ||
const file = files[name]; | ||
const opts = { mode: file.mode, mtime }; | ||
const symlinkTarget = symlinkTargets.get(name); | ||
if (typeof symlinkTarget === 'string') { | ||
zipFile.addBuffer(Buffer.from(symlinkTarget, 'utf8'), name, opts); | ||
} | ||
else if (file.mode && (0, download_1.isDirectory)(file.mode)) { | ||
zipFile.addEmptyDirectory(name, opts); | ||
} | ||
else { | ||
const stream = file.toStream(); | ||
stream.on('error', reject); | ||
zipFile.addReadStream(stream, name, opts); | ||
} | ||
} | ||
zipFile.end(); | ||
(0, stream_to_buffer_1.default)(zipFile.outputStream).then(resolve).catch(reject); | ||
}); | ||
return zipBuffer; | ||
zipFile.end(); | ||
(0, import_stream_to_buffer.default)(zipFile.outputStream).then(resolve).catch(reject); | ||
}); | ||
return zipBuffer; | ||
} | ||
exports.createZip = createZip; | ||
async function getLambdaOptionsFromFunction({ sourceFile, config, }) { | ||
if (config?.functions) { | ||
for (const [pattern, fn] of Object.entries(config.functions)) { | ||
if (sourceFile === pattern || (0, minimatch_1.default)(sourceFile, pattern)) { | ||
return { | ||
memory: fn.memory, | ||
maxDuration: fn.maxDuration, | ||
}; | ||
} | ||
} | ||
async function getLambdaOptionsFromFunction({ | ||
sourceFile, | ||
config | ||
}) { | ||
if (config?.functions) { | ||
for (const [pattern, fn] of Object.entries(config.functions)) { | ||
if (sourceFile === pattern || (0, import_minimatch.default)(sourceFile, pattern)) { | ||
return { | ||
memory: fn.memory, | ||
maxDuration: fn.maxDuration | ||
}; | ||
} | ||
} | ||
return {}; | ||
} | ||
return {}; | ||
} | ||
exports.getLambdaOptionsFromFunction = getLambdaOptionsFromFunction; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
Lambda, | ||
createLambda, | ||
createZip, | ||
getLambdaOptionsFromFunction | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.NodejsLambda = void 0; | ||
const lambda_1 = require("./lambda"); | ||
class NodejsLambda extends lambda_1.Lambda { | ||
constructor({ shouldAddHelpers, shouldAddSourcemapSupport, awsLambdaHandler, ...opts }) { | ||
super(opts); | ||
this.launcherType = 'Nodejs'; | ||
this.shouldAddHelpers = shouldAddHelpers; | ||
this.shouldAddSourcemapSupport = shouldAddSourcemapSupport; | ||
this.awsLambdaHandler = awsLambdaHandler; | ||
} | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var nodejs_lambda_exports = {}; | ||
__export(nodejs_lambda_exports, { | ||
NodejsLambda: () => NodejsLambda | ||
}); | ||
module.exports = __toCommonJS(nodejs_lambda_exports); | ||
var import_lambda = require("./lambda"); | ||
class NodejsLambda extends import_lambda.Lambda { | ||
constructor({ | ||
shouldAddHelpers, | ||
shouldAddSourcemapSupport, | ||
awsLambdaHandler, | ||
...opts | ||
}) { | ||
super(opts); | ||
this.launcherType = "Nodejs"; | ||
this.shouldAddHelpers = shouldAddHelpers; | ||
this.shouldAddSourcemapSupport = shouldAddSourcemapSupport; | ||
this.awsLambdaHandler = awsLambdaHandler; | ||
} | ||
} | ||
exports.NodejsLambda = NodejsLambda; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
NodejsLambda | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.Prerender = void 0; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var prerender_exports = {}; | ||
__export(prerender_exports, { | ||
Prerender: () => Prerender | ||
}); | ||
module.exports = __toCommonJS(prerender_exports); | ||
class Prerender { | ||
constructor({ expiration, lambda, fallback, group, bypassToken, allowQuery, initialHeaders, initialStatus, passQuery, sourcePath, }) { | ||
this.type = 'Prerender'; | ||
this.expiration = expiration; | ||
this.sourcePath = sourcePath; | ||
this.lambda = lambda; | ||
if (this.lambda) { | ||
// "ISR" is the platform default lambda label for prerender functions | ||
this.lambda.operationType = this.lambda.operationType || 'ISR'; | ||
} | ||
if (typeof group !== 'undefined' && | ||
(group <= 0 || !Number.isInteger(group))) { | ||
throw new Error('The `group` argument for `Prerender` needs to be a natural number.'); | ||
} | ||
this.group = group; | ||
if (passQuery === true) { | ||
this.passQuery = true; | ||
} | ||
else if (typeof passQuery !== 'boolean' && | ||
typeof passQuery !== 'undefined') { | ||
throw new Error(`The \`passQuery\` argument for \`Prerender\` must be a boolean.`); | ||
} | ||
if (bypassToken == null) { | ||
this.bypassToken = null; | ||
} | ||
else if (typeof bypassToken === 'string') { | ||
if (bypassToken.length < 32) { | ||
// Enforce 128 bits of entropy for safety reasons (UUIDv4 size) | ||
throw new Error('The `bypassToken` argument for `Prerender` must be 32 characters or more.'); | ||
} | ||
this.bypassToken = bypassToken; | ||
} | ||
else { | ||
throw new Error('The `bypassToken` argument for `Prerender` must be a `string`.'); | ||
} | ||
if (typeof fallback === 'undefined') { | ||
throw new Error('The `fallback` argument for `Prerender` needs to be a `FileBlob`, `FileFsRef`, `FileRef`, or null.'); | ||
} | ||
this.fallback = fallback; | ||
if (initialHeaders !== undefined) { | ||
if (!initialHeaders || | ||
typeof initialHeaders !== 'object' || | ||
Object.entries(initialHeaders).some(([key, value]) => typeof key !== 'string' || typeof value !== 'string')) { | ||
throw new Error(`The \`initialHeaders\` argument for \`Prerender\` must be an object with string key/values`); | ||
} | ||
this.initialHeaders = initialHeaders; | ||
} | ||
if (initialStatus !== undefined) { | ||
if (initialStatus <= 0 || !Number.isInteger(initialStatus)) { | ||
throw new Error(`The \`initialStatus\` argument for \`Prerender\` must be a natural number.`); | ||
} | ||
this.initialStatus = initialStatus; | ||
} | ||
if (allowQuery !== undefined) { | ||
if (!Array.isArray(allowQuery)) { | ||
throw new Error('The `allowQuery` argument for `Prerender` must be Array.'); | ||
} | ||
if (!allowQuery.every(q => typeof q === 'string')) { | ||
throw new Error('The `allowQuery` argument for `Prerender` must be Array of strings.'); | ||
} | ||
this.allowQuery = allowQuery; | ||
} | ||
constructor({ | ||
expiration, | ||
lambda, | ||
fallback, | ||
group, | ||
bypassToken, | ||
allowQuery, | ||
initialHeaders, | ||
initialStatus, | ||
passQuery, | ||
sourcePath | ||
}) { | ||
this.type = "Prerender"; | ||
this.expiration = expiration; | ||
this.sourcePath = sourcePath; | ||
this.lambda = lambda; | ||
if (this.lambda) { | ||
this.lambda.operationType = this.lambda.operationType || "ISR"; | ||
} | ||
if (typeof group !== "undefined" && (group <= 0 || !Number.isInteger(group))) { | ||
throw new Error( | ||
"The `group` argument for `Prerender` needs to be a natural number." | ||
); | ||
} | ||
this.group = group; | ||
if (passQuery === true) { | ||
this.passQuery = true; | ||
} else if (typeof passQuery !== "boolean" && typeof passQuery !== "undefined") { | ||
throw new Error( | ||
`The \`passQuery\` argument for \`Prerender\` must be a boolean.` | ||
); | ||
} | ||
if (bypassToken == null) { | ||
this.bypassToken = null; | ||
} else if (typeof bypassToken === "string") { | ||
if (bypassToken.length < 32) { | ||
throw new Error( | ||
"The `bypassToken` argument for `Prerender` must be 32 characters or more." | ||
); | ||
} | ||
this.bypassToken = bypassToken; | ||
} else { | ||
throw new Error( | ||
"The `bypassToken` argument for `Prerender` must be a `string`." | ||
); | ||
} | ||
if (typeof fallback === "undefined") { | ||
throw new Error( | ||
"The `fallback` argument for `Prerender` needs to be a `FileBlob`, `FileFsRef`, `FileRef`, or null." | ||
); | ||
} | ||
this.fallback = fallback; | ||
if (initialHeaders !== void 0) { | ||
if (!initialHeaders || typeof initialHeaders !== "object" || Object.entries(initialHeaders).some( | ||
([key, value]) => typeof key !== "string" || typeof value !== "string" | ||
)) { | ||
throw new Error( | ||
`The \`initialHeaders\` argument for \`Prerender\` must be an object with string key/values` | ||
); | ||
} | ||
this.initialHeaders = initialHeaders; | ||
} | ||
if (initialStatus !== void 0) { | ||
if (initialStatus <= 0 || !Number.isInteger(initialStatus)) { | ||
throw new Error( | ||
`The \`initialStatus\` argument for \`Prerender\` must be a natural number.` | ||
); | ||
} | ||
this.initialStatus = initialStatus; | ||
} | ||
if (allowQuery !== void 0) { | ||
if (!Array.isArray(allowQuery)) { | ||
throw new Error( | ||
"The `allowQuery` argument for `Prerender` must be Array." | ||
); | ||
} | ||
if (!allowQuery.every((q) => typeof q === "string")) { | ||
throw new Error( | ||
"The `allowQuery` argument for `Prerender` must be Array of strings." | ||
); | ||
} | ||
this.allowQuery = allowQuery; | ||
} | ||
} | ||
} | ||
exports.Prerender = Prerender; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
Prerender | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.buildsSchema = exports.functionsSchema = void 0; | ||
exports.functionsSchema = { | ||
type: 'object', | ||
minProperties: 1, | ||
maxProperties: 50, | ||
additionalProperties: false, | ||
patternProperties: { | ||
'^.{1,256}$': { | ||
type: 'object', | ||
additionalProperties: false, | ||
properties: { | ||
runtime: { | ||
type: 'string', | ||
maxLength: 256, | ||
}, | ||
memory: { | ||
minimum: 128, | ||
maximum: 3008, | ||
}, | ||
maxDuration: { | ||
type: 'number', | ||
minimum: 1, | ||
maximum: 900, | ||
}, | ||
includeFiles: { | ||
type: 'string', | ||
maxLength: 256, | ||
}, | ||
excludeFiles: { | ||
type: 'string', | ||
maxLength: 256, | ||
}, | ||
}, | ||
}, | ||
}, | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
exports.buildsSchema = { | ||
type: 'array', | ||
minItems: 0, | ||
maxItems: 128, | ||
items: { | ||
type: 'object', | ||
additionalProperties: false, | ||
required: ['use'], | ||
properties: { | ||
src: { | ||
type: 'string', | ||
minLength: 1, | ||
maxLength: 4096, | ||
}, | ||
use: { | ||
type: 'string', | ||
minLength: 3, | ||
maxLength: 256, | ||
}, | ||
config: { type: 'object' }, | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var schemas_exports = {}; | ||
__export(schemas_exports, { | ||
buildsSchema: () => buildsSchema, | ||
functionsSchema: () => functionsSchema | ||
}); | ||
module.exports = __toCommonJS(schemas_exports); | ||
const functionsSchema = { | ||
type: "object", | ||
minProperties: 1, | ||
maxProperties: 50, | ||
additionalProperties: false, | ||
patternProperties: { | ||
"^.{1,256}$": { | ||
type: "object", | ||
additionalProperties: false, | ||
properties: { | ||
runtime: { | ||
type: "string", | ||
maxLength: 256 | ||
}, | ||
}, | ||
memory: { | ||
minimum: 128, | ||
maximum: 3008 | ||
}, | ||
maxDuration: { | ||
type: "number", | ||
minimum: 1, | ||
maximum: 900 | ||
}, | ||
includeFiles: { | ||
type: "string", | ||
maxLength: 256 | ||
}, | ||
excludeFiles: { | ||
type: "string", | ||
maxLength: 256 | ||
} | ||
} | ||
} | ||
} | ||
}; | ||
const buildsSchema = { | ||
type: "array", | ||
minItems: 0, | ||
maxItems: 128, | ||
items: { | ||
type: "object", | ||
additionalProperties: false, | ||
required: ["use"], | ||
properties: { | ||
src: { | ||
type: "string", | ||
minLength: 1, | ||
maxLength: 4096 | ||
}, | ||
use: { | ||
type: "string", | ||
minLength: 3, | ||
maxLength: 256 | ||
}, | ||
config: { type: "object" } | ||
} | ||
} | ||
}; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
buildsSchema, | ||
functionsSchema | ||
}); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.shouldServe = void 0; | ||
const path_1 = require("path"); | ||
const shouldServe = ({ entrypoint, files, requestPath, }) => { | ||
requestPath = requestPath.replace(/\/$/, ''); // sanitize trailing '/' | ||
entrypoint = entrypoint.replace(/\\/, '/'); // windows compatibility | ||
if (entrypoint === requestPath && hasProp(files, entrypoint)) { | ||
return true; | ||
} | ||
const { dir, name } = (0, path_1.parse)(entrypoint); | ||
if (name === 'index' && dir === requestPath && hasProp(files, entrypoint)) { | ||
return true; | ||
} | ||
return false; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
exports.shouldServe = shouldServe; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var should_serve_exports = {}; | ||
__export(should_serve_exports, { | ||
shouldServe: () => shouldServe | ||
}); | ||
module.exports = __toCommonJS(should_serve_exports); | ||
var import_path = require("path"); | ||
const shouldServe = ({ | ||
entrypoint, | ||
files, | ||
requestPath | ||
}) => { | ||
requestPath = requestPath.replace(/\/$/, ""); | ||
entrypoint = entrypoint.replace(/\\/, "/"); | ||
if (entrypoint === requestPath && hasProp(files, entrypoint)) { | ||
return true; | ||
} | ||
const { dir, name } = (0, import_path.parse)(entrypoint); | ||
if (name === "index" && dir === requestPath && hasProp(files, entrypoint)) { | ||
return true; | ||
} | ||
return false; | ||
}; | ||
function hasProp(obj, key) { | ||
return Object.hasOwnProperty.call(obj, key); | ||
return Object.hasOwnProperty.call(obj, key); | ||
} | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
shouldServe | ||
}); |
@@ -268,5 +268,9 @@ /// <reference types="node" /> | ||
export interface NodeVersion { | ||
/** major version number: 18 */ | ||
major: number; | ||
/** major version range: "18.x" */ | ||
range: string; | ||
/** runtime descriptor: "nodejs18.x" */ | ||
runtime: string; | ||
/** date beyond which this version is discontinued: 2023-08-17T19:05:45.951Z */ | ||
discontinueDate?: Date; | ||
@@ -273,0 +277,0 @@ } |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var types_exports = {}; | ||
module.exports = __toCommonJS(types_exports); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.validateNpmrc = void 0; | ||
const path_1 = require("path"); | ||
const promises_1 = require("fs/promises"); | ||
/** | ||
* Checks if there is a `.npmrc` in the cwd (project root) and makes sure it | ||
* doesn't contain a `use-node-version`. This config setting is not supported | ||
* since it causes the package manager to install the Node.js version which in | ||
* the case of newer Node.js versions is not compatible with AWS due to | ||
* outdated GLIBC binaries. | ||
* | ||
* @see https://pnpm.io/npmrc#use-node-version | ||
* | ||
* @param cwd The current working directory (e.g. project root); | ||
*/ | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var validate_npmrc_exports = {}; | ||
__export(validate_npmrc_exports, { | ||
validateNpmrc: () => validateNpmrc | ||
}); | ||
module.exports = __toCommonJS(validate_npmrc_exports); | ||
var import_path = require("path"); | ||
var import_promises = require("fs/promises"); | ||
async function validateNpmrc(cwd) { | ||
const npmrc = await (0, promises_1.readFile)((0, path_1.join)(cwd, '.npmrc'), 'utf-8').catch(err => { | ||
if (err.code !== 'ENOENT') | ||
throw err; | ||
}); | ||
const nodeRegExp = /(?<!#.*)use-node-version/; | ||
if (npmrc?.match(nodeRegExp)) { | ||
throw new Error('Detected unsupported "use-node-version" in your ".npmrc". Please use "engines" in your "package.json" instead.'); | ||
} | ||
const npmrc = await (0, import_promises.readFile)((0, import_path.join)(cwd, ".npmrc"), "utf-8").catch((err) => { | ||
if (err.code !== "ENOENT") | ||
throw err; | ||
}); | ||
const nodeRegExp = /(?<!#.*)use-node-version/; | ||
if (npmrc?.match(nodeRegExp)) { | ||
throw new Error( | ||
'Detected unsupported "use-node-version" in your ".npmrc". Please use "engines" in your "package.json" instead.' | ||
); | ||
} | ||
} | ||
exports.validateNpmrc = validateNpmrc; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
validateNpmrc | ||
}); |
{ | ||
"name": "@vercel/build-utils", | ||
"version": "7.1.0", | ||
"version": "7.1.1", | ||
"license": "Apache-2.0", | ||
@@ -46,3 +46,3 @@ "main": "./dist/index.js", | ||
"node-fetch": "2.6.7", | ||
"semver": "6.1.1", | ||
"semver": "6.3.1", | ||
"typescript": "4.9.5", | ||
@@ -52,3 +52,3 @@ "yazl": "2.5.1" | ||
"scripts": { | ||
"build": "node build", | ||
"build": "node build.mjs", | ||
"test": "jest --reporters=default --reporters=jest-junit --env node --verbose --runInBand --bail", | ||
@@ -55,0 +55,0 @@ "test-unit": "pnpm test test/unit.*test.*", |
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
21
1214089
27526