boxednode
Advanced tools
Comparing version 2.4.0 to 2.4.1
@@ -15,3 +15,3 @@ "use strict"; | ||
const events_1 = require("events"); | ||
exports.pipeline = util_1.promisify(stream_1.default.pipeline); | ||
exports.pipeline = (0, util_1.promisify)(stream_1.default.pipeline); | ||
async function spawnBuildCommand(command, options) { | ||
@@ -33,3 +33,3 @@ var _a, _b; | ||
}); | ||
const [code] = await events_1.once(proc, 'exit'); | ||
const [code] = await (0, events_1.once)(proc, 'exit'); | ||
if (code !== 0) { | ||
@@ -43,3 +43,3 @@ throw new Error(`Command failed: ${command.join(' ')} (code ${code})`); | ||
await fs_1.promises.mkdir(targetDir, { recursive: true }); | ||
await exports.pipeline(tar_1.default.c({ | ||
await (0, exports.pipeline)(tar_1.default.c({ | ||
cwd: sourceDir, | ||
@@ -97,3 +97,3 @@ gzip: false | ||
async function createCompressedBlobDefinition(fnName, source) { | ||
const compressed = await util_1.promisify(zlib_1.default.brotliCompress)(source, { | ||
const compressed = await (0, util_1.promisify)(zlib_1.default.brotliCompress)(source, { | ||
params: { | ||
@@ -100,0 +100,0 @@ [zlib_1.default.constants.BROTLI_PARAM_QUALITY]: zlib_1.default.constants.BROTLI_MAX_QUALITY, |
@@ -34,3 +34,3 @@ 'use strict'; | ||
await fs_1.promises.mkdir(dir, { recursive: true }); | ||
await helpers_1.pipeline(fs_1.createReadStream(url_1.fileURLToPath(range)), zlib_1.default.createGunzip(), tar_1.default.x({ | ||
await (0, helpers_1.pipeline)((0, fs_1.createReadStream)((0, url_1.fileURLToPath)(range)), zlib_1.default.createGunzip(), tar_1.default.x({ | ||
cwd: dir | ||
@@ -53,3 +53,3 @@ })); | ||
else { | ||
const ver = (await nv_1.default(range)).pop(); | ||
const ver = (await (0, nv_1.default)(range)).pop(); | ||
if (!ver) { | ||
@@ -74,3 +74,3 @@ throw new Error(`No node version found for ${range}`); | ||
try { | ||
const shaSums = await node_fetch_1.default(shaSumsUrl); | ||
const shaSums = await (0, node_fetch_1.default)(shaSumsUrl); | ||
if (!shaSums.ok) | ||
@@ -89,3 +89,3 @@ return; | ||
const hash = crypto_1.default.createHash('sha256'); | ||
await helpers_1.pipeline(fs_1.createReadStream(cachedTarballPath), hash); | ||
await (0, helpers_1.pipeline)((0, fs_1.createReadStream)(cachedTarballPath), hash); | ||
return hash.digest('hex'); | ||
@@ -105,3 +105,3 @@ })() | ||
if (hasCachedTarball) { | ||
tarballStream = fs_1.createReadStream(cachedTarballPath); | ||
tarballStream = (0, fs_1.createReadStream)(cachedTarballPath); | ||
} | ||
@@ -111,3 +111,3 @@ else { | ||
logger.stepStarting(`Downloading from ${url}`); | ||
const tarball = await node_fetch_1.default(url); | ||
const tarball = await (0, node_fetch_1.default)(url); | ||
if (!tarball.ok) { | ||
@@ -129,3 +129,3 @@ throw new Error(`Could not download Node.js source tarball: ${tarball.statusText}`); | ||
tarballWritePromise = | ||
helpers_1.pipeline(tarball.body, fs_1.createWriteStream(cachedTarballPath)); | ||
(0, helpers_1.pipeline)(tarball.body, (0, fs_1.createWriteStream)(cachedTarballPath)); | ||
} | ||
@@ -135,3 +135,3 @@ try { | ||
Promise.all([ | ||
helpers_1.pipeline(tarballStream, zlib_1.default.createGunzip(), tar_1.default.x({ | ||
(0, helpers_1.pipeline)(tarballStream, zlib_1.default.createGunzip(), tar_1.default.x({ | ||
cwd: dir | ||
@@ -141,3 +141,3 @@ })), | ||
]), | ||
events_1.once(process, 'beforeExit').then(() => { | ||
(0, events_1.once)(process, 'beforeExit').then(() => { | ||
throw new Error('premature exit from the event loop'); | ||
@@ -188,3 +188,3 @@ }) | ||
} | ||
await helpers_1.spawnBuildCommand(configure, options); | ||
await (0, helpers_1.spawnBuildCommand)(configure, options); | ||
if (configure.includes('--fully-static') || configure.includes('--partly-static')) { | ||
@@ -214,3 +214,3 @@ for (const file of [ | ||
} | ||
await helpers_1.spawnBuildCommand(make, options); | ||
await (0, helpers_1.spawnBuildCommand)(make, options); | ||
return path_1.default.join(sourcePath, 'out', 'Release', 'node'); | ||
@@ -233,3 +233,3 @@ } | ||
} | ||
await helpers_1.spawnBuildCommand(['.\\vcbuild.bat', ...vcbuildArgs], options); | ||
await (0, helpers_1.spawnBuildCommand)(['cmd', '/c', '.\\vcbuild.bat', ...vcbuildArgs], options); | ||
return path_1.default.join(sourcePath, 'Release', 'node.exe'); | ||
@@ -258,3 +258,3 @@ } | ||
for (const addon of (options.addons || [])) { | ||
const addonResult = await native_addons_1.modifyAddonGyp(addon, nodeSourcePath, options.env || process.env, logger); | ||
const addonResult = await (0, native_addons_1.modifyAddonGyp)(addon, nodeSourcePath, options.env || process.env, logger); | ||
for (const { linkedModuleName, targetName, registerFunction } of addonResult) { | ||
@@ -268,6 +268,6 @@ requireMappings.push([addon.requireRegexp, linkedModuleName]); | ||
const nodeGypPath = path_1.default.join(nodeSourcePath, 'node.gyp'); | ||
const nodeGyp = await native_addons_1.loadGYPConfig(nodeGypPath); | ||
const nodeGyp = await (0, native_addons_1.loadGYPConfig)(nodeGypPath); | ||
const mainTarget = nodeGyp.targets.find((target) => ['<(node_core_target_name)', 'node'].includes(target.target_name)); | ||
mainTarget.dependencies = [...(mainTarget.dependencies || []), ...extraGypDependencies]; | ||
await native_addons_1.storeGYPConfig(nodeGypPath, nodeGyp); | ||
await (0, native_addons_1.storeGYPConfig)(nodeGypPath, nodeGyp); | ||
for (const header of ['node.h', 'node_api.h']) { | ||
@@ -302,3 +302,3 @@ const source = (await fs_1.promises.readFile(path_1.default.join(nodeSourcePath, 'src', header), 'utf8') + | ||
const resPath = path_1.default.join(nodeSourcePath, 'src', 'res'); | ||
await fs_1.promises.writeFile(path_1.default.join(resPath, 'node.rc'), await executable_metadata_1.generateRCFile(resPath, options.targetFile, options.executableMetadata)); | ||
await fs_1.promises.writeFile(path_1.default.join(resPath, 'node.rc'), await (0, executable_metadata_1.generateRCFile)(resPath, options.targetFile, options.executableMetadata)); | ||
logger.stepCompleted(); | ||
@@ -316,5 +316,5 @@ if (options.preCompileHook) { | ||
mainSource = mainSource.replace(/\bREPLACE_DEFINE_LINKED_MODULES\b/g, registerFunctions.map((fn) => `${fn},`).join('')); | ||
mainSource = mainSource.replace(/\bREPLACE_WITH_MAIN_SCRIPT_SOURCE_GETTER\b/g, helpers_1.createCppJsStringDefinition('GetBoxednodeMainScriptSource', snapshotMode !== 'consume' ? jsMainSource : '') + '\n' + | ||
await helpers_1.createCompressedBlobDefinition('GetBoxednodeCodeCache', codeCacheBlob) + '\n' + | ||
await helpers_1.createCompressedBlobDefinition('GetBoxednodeSnapshotBlob', snapshotBlob)); | ||
mainSource = mainSource.replace(/\bREPLACE_WITH_MAIN_SCRIPT_SOURCE_GETTER\b/g, (0, helpers_1.createCppJsStringDefinition)('GetBoxednodeMainScriptSource', snapshotMode !== 'consume' ? jsMainSource : '') + '\n' + | ||
await (0, helpers_1.createCompressedBlobDefinition)('GetBoxednodeCodeCache', codeCacheBlob) + '\n' + | ||
await (0, helpers_1.createCompressedBlobDefinition)('GetBoxednodeSnapshotBlob', snapshotBlob)); | ||
mainSource = mainSource.replace(/\bBOXEDNODE_CODE_CACHE_MODE\b/g, JSON.stringify(codeCacheMode)); | ||
@@ -353,3 +353,3 @@ if (options.useLegacyDefaultUvLoop) { | ||
await fs_1.promises.rm(intermediateFile, { force: true }); | ||
await util_1.promisify(child_process_1.execFile)(binaryPath, { cwd: nodeSourcePath }); | ||
await (0, util_1.promisify)(child_process_1.execFile)(binaryPath, { cwd: nodeSourcePath }); | ||
const result = await fs_1.promises.readFile(intermediateFile); | ||
@@ -374,3 +374,3 @@ if (result.length === 0) { | ||
logger.stepStarting('Cleaning temporary directory'); | ||
await util_1.promisify(rimraf_1.default)(options.tmpdir, { glob: false }); | ||
await (0, util_1.promisify)(rimraf_1.default)(options.tmpdir, { glob: false }); | ||
logger.stepCompleted(); | ||
@@ -377,0 +377,0 @@ } |
@@ -15,3 +15,3 @@ "use strict"; | ||
try { | ||
return gyp_parser_1.parse(await fs_1.promises.readFile(filename, 'utf8')); | ||
return (0, gyp_parser_1.parse)(await fs_1.promises.readFile(filename, 'utf8')); | ||
} | ||
@@ -50,3 +50,3 @@ catch (err) { | ||
`BOXEDNODE_MODULE_NAME=${linkedModuleName}`, | ||
`NAPI_CPP_CUSTOM_NAMESPACE=i${crypto_1.randomBytes(12).toString('hex')}` | ||
`NAPI_CPP_CUSTOM_NAMESPACE=i${(0, crypto_1.randomBytes)(12).toString('hex')}` | ||
]) { | ||
@@ -69,3 +69,3 @@ negDefines.delete(want); | ||
async function prepForUsageWithNode(config, nodeSourcePath) { | ||
const nodeGypDir = path_1.default.dirname(await pkg_up_1.default({ cwd: require.resolve('node-gyp') })); | ||
const nodeGypDir = path_1.default.dirname(await (0, pkg_up_1.default)({ cwd: require.resolve('node-gyp') })); | ||
(config.includes = config.includes || []).push(path_1.default.join(nodeGypDir, 'addon.gypi')); | ||
@@ -87,7 +87,7 @@ config.variables = { | ||
logger.stepStarting(`Copying addon at ${addon.path}`); | ||
const addonId = helpers_1.objhash(addon); | ||
const addonId = (0, helpers_1.objhash)(addon); | ||
const addonPath = path_1.default.resolve(nodeSourcePath, 'deps', addonId); | ||
await helpers_1.copyRecursive(addon.path, addonPath); | ||
await (0, helpers_1.copyRecursive)(addon.path, addonPath); | ||
logger.stepCompleted(); | ||
await helpers_1.spawnBuildCommand([...helpers_1.npm(), 'install', '--ignore-scripts', '--production'], { | ||
await (0, helpers_1.spawnBuildCommand)([...(0, helpers_1.npm)(), 'install', '--ignore-scripts', '--production'], { | ||
cwd: addonPath, | ||
@@ -94,0 +94,0 @@ logger, |
{ | ||
"name": "boxednode", | ||
"version": "2.4.0", | ||
"version": "2.4.1", | ||
"description": "Create a shippable binary from a JS file", | ||
@@ -5,0 +5,0 @@ "main": "lib/index.js", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
126232
26
1063