New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

boxednode

Package Overview
Dependencies
Maintainers
30
Versions
42
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

boxednode - npm Package Compare versions

Comparing version

to
2.0.0

12

bin/boxednode.js

@@ -16,3 +16,3 @@ #!/usr/bin/env node

.option('node-version', {
alias: 'n', type: 'string', desc: 'Node.js version or semver version range', default: '*'
alias: 'n', type: 'string', desc: 'Node.js version or semver version range or .tar.gz file url', default: '*'
})

@@ -31,2 +31,8 @@ .option('configure-args', {

})
.option('use-legacy-default-uv-loop', {
type: 'boolean', desc: 'Use the global singleton libuv event loop rather than a separate local one'
})
.option('use-code-cache', {
alias: 'H', type: 'boolean', desc: 'Use Node.js code cache support to speed up startup'
})
.example('$0 -s myProject.js -t myProject.exe -n ^14.0.0',

@@ -47,3 +53,5 @@ 'Create myProject.exe from myProject.js using Node.js v14')

makeArgs: (argv.M || '').split(',').filter(Boolean),
namespace: argv.N
namespace: argv.N,
useLegacyDefaultUvLoop: argv.useLegacyDefaultUvLoop,
useCodeCache: argv.H
});

@@ -50,0 +58,0 @@ } catch (err) {

@@ -17,1 +17,3 @@ /// <reference types="node" />

export declare function npm(): string[];
export declare function createCppJsStringDefinition(fnName: string, source: string): string;
export declare function createCompressedBlobDefinition(fnName: string, source: Uint8Array): Promise<string>;

@@ -6,3 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.npm = exports.objhash = exports.copyRecursive = exports.spawnBuildCommand = exports.pipeline = void 0;
exports.createCompressedBlobDefinition = exports.createCppJsStringDefinition = exports.npm = exports.objhash = exports.copyRecursive = exports.spawnBuildCommand = exports.pipeline = void 0;
const fs_1 = require("fs");

@@ -14,2 +14,3 @@ const crypto_1 = __importDefault(require("crypto"));

const stream_1 = __importDefault(require("stream"));
const zlib_1 = __importDefault(require("zlib"));
const events_1 = require("events");

@@ -66,2 +67,75 @@ exports.pipeline = (0, util_1.promisify)(stream_1.default.pipeline);

exports.npm = npm;
function createCppJsStringDefinition(fnName, source) {
const sourceAsCharCodeArray = new Uint16Array(source.length);
let isAllLatin1 = true;
for (let i = 0; i < source.length; i++) {
const charCode = source.charCodeAt(i);
sourceAsCharCodeArray[i] = charCode;
isAllLatin1 && (isAllLatin1 = charCode <= 0xFF);
}
return `
static const ${isAllLatin1 ? 'uint8_t' : 'uint16_t'} ${fnName}_source_[] = {
${sourceAsCharCodeArray}
};
static_assert(
${sourceAsCharCodeArray.length} <= v8::String::kMaxLength,
"main script source exceeds max string length");
Local<String> ${fnName}(Isolate* isolate) {
return v8::String::NewFrom${isAllLatin1 ? 'One' : 'Two'}Byte(
isolate,
${fnName}_source_,
v8::NewStringType::kNormal,
${sourceAsCharCodeArray.length}).ToLocalChecked();
}
`;
}
exports.createCppJsStringDefinition = createCppJsStringDefinition;
async function createCompressedBlobDefinition(fnName, source) {
const compressed = await (0, util_1.promisify)(zlib_1.default.brotliCompress)(source, {
params: {
[zlib_1.default.constants.BROTLI_PARAM_QUALITY]: zlib_1.default.constants.BROTLI_MAX_QUALITY,
[zlib_1.default.constants.BROTLI_PARAM_SIZE_HINT]: source.length
}
});
return `
static const uint8_t ${fnName}_source_[] = {
${Uint8Array.prototype.toString.call(compressed)}
};
std::string ${fnName}() {
${source.length === 0 ? 'return {};' : `
size_t decoded_size = ${source.length};
std::string dst(decoded_size, 0);
const auto result = BrotliDecoderDecompress(
${compressed.length},
${fnName}_source_,
&decoded_size,
reinterpret_cast<uint8_t*>(&dst[0]));
assert(result == BROTLI_DECODER_RESULT_SUCCESS);
assert(decoded_size == ${source.length});
return dst;`}
}
std::shared_ptr<v8::BackingStore> ${fnName}BackingStore() {
std::string* str = new std::string(std::move(${fnName}()));
return v8::SharedArrayBuffer::NewBackingStore(
&str->front(),
str->size(),
[](void*, size_t, void* deleter_data) {
delete static_cast<std::string*>(deleter_data);
},
static_cast<void*>(str));
}
v8::Local<v8::Uint8Array> ${fnName}Buffer(v8::Isolate* isolate) {
${source.length === 0 ? `
auto array_buffer = v8::SharedArrayBuffer::New(isolate, 0);
` : `
auto array_buffer = v8::SharedArrayBuffer::New(isolate, ${fnName}BackingStore());
`}
return v8::Uint8Array::New(array_buffer, 0, array_buffer->ByteLength());
}
`;
}
exports.createCompressedBlobDefinition = createCompressedBlobDefinition;
//# sourceMappingURL=helpers.js.map

@@ -18,2 +18,4 @@ import { Logger } from './logger';

enableBindingsPatch?: boolean;
useLegacyDefaultUvLoop?: boolean;
useCodeCache?: boolean;
executableMetadata?: ExecutableMetadata;

@@ -20,0 +22,0 @@ preCompileHook?: (nodeSourceTree: string, options: CompilationOptions) => void | Promise<void>;

119

lib/index.js

@@ -9,3 +9,2 @@ 'use strict';

const node_fetch_1 = __importDefault(require("node-fetch"));
const semver_1 = __importDefault(require("semver"));
const tar_1 = __importDefault(require("tar"));

@@ -23,4 +22,25 @@ const path_1 = __importDefault(require("path"));

const nv_1 = __importDefault(require("@pkgjs/nv"));
const url_1 = require("url");
const child_process_1 = require("child_process");
async function getNodeSourceForVersion(range, dir, logger, retries = 2) {
logger.stepStarting(`Looking for Node.js version matching ${JSON.stringify(range)}`);
let inputIsFileUrl = false;
try {
inputIsFileUrl = new url_1.URL(range).protocol === 'file:';
}
catch (_a) { }
if (inputIsFileUrl) {
logger.stepStarting(`Extracting tarball from ${range} to ${dir}`);
await fs_1.promises.mkdir(dir, { recursive: true });
await (0, helpers_1.pipeline)((0, fs_1.createReadStream)((0, url_1.fileURLToPath)(range)), zlib_1.default.createGunzip(), tar_1.default.x({
cwd: dir
}));
logger.stepCompleted();
const filesInDir = await fs_1.promises.readdir(dir, { withFileTypes: true });
const dirsInDir = filesInDir.filter(f => f.isDirectory());
if (dirsInDir.length !== 1) {
throw new Error('Node.js tarballs should contain exactly one directory');
}
return path_1.default.join(dir, dirsInDir[0].name);
}
const ver = (await (0, nv_1.default)(range)).pop();

@@ -38,3 +58,3 @@ if (!ver) {

}
catch (_a) { }
catch (_b) { }
if (hasCachedTarball) {

@@ -114,4 +134,12 @@ const shaSumsUrl = `${releaseBaseUrl}/SHASUMS256.txt`;

logger.stepCompleted();
return [version, path_1.default.join(dir, `node-${version}`)];
return path_1.default.join(dir, `node-${version}`);
}
async function getNodeVersionFromSourceDirectory(dir) {
var _a, _b, _c, _d, _e, _f;
const versionFile = await fs_1.promises.readFile(path_1.default.join(dir, 'src', 'node_version.h'), 'utf8');
const major = +((_b = (_a = versionFile.match(/^#define\s+NODE_MAJOR_VERSION\s+(?<version>\d+)\s*$/m)) === null || _a === void 0 ? void 0 : _a.groups) === null || _b === void 0 ? void 0 : _b.version);
const minor = +((_d = (_c = versionFile.match(/^#define\s+NODE_MINOR_VERSION\s+(?<version>\d+)\s*$/m)) === null || _c === void 0 ? void 0 : _c.groups) === null || _d === void 0 ? void 0 : _d.version);
const patch = +((_f = (_e = versionFile.match(/^#define\s+NODE_PATCH_VERSION\s+(?<version>\d+)\s*$/m)) === null || _e === void 0 ? void 0 : _e.groups) === null || _f === void 0 ? void 0 : _f.version);
return [major, minor, patch];
}
async function compileNode(sourcePath, linkedJSModules, buildArgs, makeArgs, env, logger) {

@@ -125,2 +153,11 @@ logger.stepStarting('Compiling Node.js from source');

};
const nodeVersion = await getNodeVersionFromSourceDirectory(sourcePath);
if (nodeVersion[0] > 19 || (nodeVersion[0] === 19 && nodeVersion[1] >= 4)) {
if (process.platform !== 'win32') {
buildArgs = ['--disable-shared-readonly-heap', ...buildArgs];
}
else {
buildArgs = ['no-shared-roheap', ...buildArgs];
}
}
if (process.platform !== 'win32') {

@@ -143,2 +180,6 @@ const configure = ['./configure', ...buildArgs];

else {
await fs_1.promises.rm(path_1.default.join(sourcePath, 'out', 'Release'), {
recursive: true,
force: true
});
const vcbuildArgs = [...buildArgs, ...makeArgs, 'projgen'];

@@ -168,9 +209,10 @@ if (!vcbuildArgs.includes('debug') && !vcbuildArgs.includes('release')) {

}
const [nodeVersion, nodeSourcePath] = await getNodeSourceForVersion(options.nodeVersionRange, options.tmpdir, logger);
const nodeSourcePath = await getNodeSourceForVersion(options.nodeVersionRange, options.tmpdir, logger);
const requireMappings = [];
const extraJSSourceFiles = [];
const enableBindingsPatch = (_a = options.enableBindingsPatch) !== null && _a !== void 0 ? _a : ((_b = options.addons) === null || _b === void 0 ? void 0 : _b.length) > 0;
if (semver_1.default.gte(nodeVersion, '12.19.0')) {
const jsMainSource = await fs_1.promises.readFile(options.sourceFile, 'utf8');
const registerFunctions = [];
{
const extraGypDependencies = [];
const registerFunctions = [];
for (const addon of (options.addons || [])) {

@@ -196,30 +238,7 @@ const addonResult = await (0, native_addons_1.modifyAddonGyp)(addon, nodeSourcePath, options.env || process.env, logger);

logger.stepCompleted();
logger.stepStarting('Handling main file source');
let mainSource = await fs_1.promises.readFile(path_1.default.join(__dirname, '..', 'resources', 'main-template.cc'), 'utf8');
mainSource = mainSource.replace(/\bREPLACE_WITH_ENTRY_POINT\b/g, JSON.stringify(JSON.stringify(`${namespace}/${namespace}`)));
mainSource = mainSource.replace(/\bREPLACE_DECLARE_LINKED_MODULES\b/g, registerFunctions.map((fn) => `void ${fn}(const void**,const void**);\n`).join(''));
mainSource = mainSource.replace(/\bREPLACE_DEFINE_LINKED_MODULES\b/g, registerFunctions.map((fn) => `${fn},`).join(''));
await fs_1.promises.writeFile(path_1.default.join(nodeSourcePath, 'src', 'node_main.cc'), mainSource);
logger.stepCompleted();
}
else {
let tpmSource = await fs_1.promises.readFile(path_1.default.join(__dirname, '..', 'resources', 'third_party_main.js'), 'utf8');
tpmSource = tpmSource.replace(/\bREPLACE_WITH_ENTRY_POINT\b/g, JSON.stringify(`${namespace}/${namespace}`));
await fs_1.promises.writeFile(path_1.default.join(nodeSourcePath, 'lib', '_third_party_main.js'), tpmSource);
extraJSSourceFiles.push('./lib/_third_party_main.js');
let nodeCCSource = await fs_1.promises.readFile(path_1.default.join(nodeSourcePath, 'src', 'node.cc'), 'utf8');
nodeCCSource = nodeCCSource.replace(/ProcessGlobalArgs\((?:[^{};]|[\r\n])*?kDisallowedInEnvironment(?:[^{}]|[\r\n])*?\)/, '0');
await fs_1.promises.writeFile(path_1.default.join(nodeSourcePath, 'src', 'node.cc'), nodeCCSource);
if (options.addons && options.addons.length > 0) {
logger.stepStarting('Handling linked addons');
logger.stepFailed(new Error('Addons are not supported on Node v12.x, ignoring...'));
}
}
logger.stepStarting('Inserting custom code into Node.js source');
await fs_1.promises.mkdir(path_1.default.join(nodeSourcePath, 'lib', namespace), { recursive: true });
const source = await fs_1.promises.readFile(options.sourceFile, 'utf8');
await fs_1.promises.writeFile(path_1.default.join(nodeSourcePath, 'lib', namespace, `${namespace}_src.js`), `module.exports = ${JSON.stringify(source)}`);
let entryPointTrampolineSource = await fs_1.promises.readFile(path_1.default.join(__dirname, '..', 'resources', 'entry-point-trampoline.js'), 'utf8');
entryPointTrampolineSource = entryPointTrampolineSource.replace(/\bREPLACE_WITH_BOXEDNODE_CONFIG\b/g, JSON.stringify({
srcMod: `${namespace}/${namespace}_src`,
requireMappings: requireMappings.map(([re, linked]) => [re.source, re.flags, linked]),

@@ -229,3 +248,3 @@ enableBindingsPatch

await fs_1.promises.writeFile(path_1.default.join(nodeSourcePath, 'lib', namespace, `${namespace}.js`), entryPointTrampolineSource);
extraJSSourceFiles.push(`./lib/${namespace}/${namespace}.js`, `./lib/${namespace}/${namespace}_src.js`);
extraJSSourceFiles.push(`./lib/${namespace}/${namespace}.js`);
logger.stepCompleted();

@@ -241,3 +260,41 @@ logger.stepStarting('Storing executable metadata');

}
const binaryPath = await compileNode(nodeSourcePath, extraJSSourceFiles, options.configureArgs, options.makeArgs, options.env || process.env, logger);
async function writeMainFileAndCompile({ codeCacheBlob, codeCacheMode }) {
logger.stepStarting('Handling main file source');
let mainSource = await fs_1.promises.readFile(path_1.default.join(__dirname, '..', 'resources', 'main-template.cc'), 'utf8');
mainSource = mainSource.replace(/\bREPLACE_WITH_ENTRY_POINT\b/g, JSON.stringify(JSON.stringify(`${namespace}/${namespace}`)));
mainSource = mainSource.replace(/\bREPLACE_DECLARE_LINKED_MODULES\b/g, registerFunctions.map((fn) => `void ${fn}(const void**,const void**);\n`).join(''));
mainSource = mainSource.replace(/\bREPLACE_DEFINE_LINKED_MODULES\b/g, registerFunctions.map((fn) => `${fn},`).join(''));
mainSource = mainSource.replace(/\bREPLACE_WITH_MAIN_SCRIPT_SOURCE_GETTER\b/g, (0, helpers_1.createCppJsStringDefinition)('GetBoxednodeMainScriptSource', jsMainSource) + '\n' +
await (0, helpers_1.createCompressedBlobDefinition)('GetBoxednodeCodeCache', codeCacheBlob));
mainSource = mainSource.replace(/\bBOXEDNODE_CODE_CACHE_MODE\b/g, JSON.stringify(codeCacheMode));
if (options.useLegacyDefaultUvLoop) {
mainSource = `#define BOXEDNODE_USE_DEFAULT_UV_LOOP 1\n${mainSource}`;
}
await fs_1.promises.writeFile(path_1.default.join(nodeSourcePath, 'src', 'node_main.cc'), mainSource);
logger.stepCompleted();
return await compileNode(nodeSourcePath, extraJSSourceFiles, options.configureArgs, options.makeArgs, options.env || process.env, logger);
}
let binaryPath;
if (!options.useCodeCache) {
binaryPath = await writeMainFileAndCompile({
codeCacheBlob: new Uint8Array(0),
codeCacheMode: 'ignore'
});
}
else {
binaryPath = await writeMainFileAndCompile({
codeCacheBlob: new Uint8Array(0),
codeCacheMode: 'generate'
});
logger.stepStarting('Running code cache generation');
const codeCacheResult = await (0, util_1.promisify)(child_process_1.execFile)(binaryPath, { encoding: 'buffer' });
if (codeCacheResult.stdout.length === 0) {
throw new Error('Empty code cache result');
}
logger.stepCompleted();
binaryPath = await writeMainFileAndCompile({
codeCacheBlob: codeCacheResult.stdout,
codeCacheMode: 'consume'
});
}
logger.stepStarting(`Moving resulting binary to ${options.targetFile}`);

@@ -244,0 +301,0 @@ await fs_1.promises.mkdir(path_1.default.dirname(options.targetFile), { recursive: true });

{
"name": "boxednode",
"version": "1.12.0",
"version": "2.0.0",
"description": "Create a shippable binary from a JS file",

@@ -5,0 +5,0 @@ "main": "lib/index.js",

@@ -5,8 +5,7 @@ 'use strict';

const path = require('path');
const assert = require('assert');
const {
srcMod,
requireMappings,
enableBindingsPatch
} = REPLACE_WITH_BOXEDNODE_CONFIG;
const src = require(srcMod);
const hydatedRequireMappings =

@@ -55,3 +54,3 @@ requireMappings.map(([re, reFlags, linked]) => [new RegExp(re, reFlags), linked]);

module.exports = (() => {
module.exports = (src, codeCacheMode, codeCache) => {
const __filename = process.execPath;

@@ -75,2 +74,3 @@ const __dirname = path.dirname(process.execPath);

process.argv.unshift(__filename);
process.boxednode = {};

@@ -85,8 +85,21 @@ const module = {

};
vm.compileFunction(src, [
const mainFunction = vm.compileFunction(src, [
'__filename', '__dirname', 'require', 'exports', 'module'
], {
filename: __filename
})(__filename, __dirname, require, exports, module);
filename: __filename,
cachedData: codeCache.length > 0 ? codeCache : undefined,
produceCachedData: codeCacheMode === 'generate'
});
if (codeCacheMode === 'generate') {
assert.strictEqual(mainFunction.cachedDataProduced, true);
process.stdout.write(mainFunction.cachedData);
return;
}
process.boxednode.hasCodeCache = codeCache.length > 0;
// https://github.com/nodejs/node/pull/46320
process.boxednode.rejectedCodeCache = mainFunction.cachedDataRejected;
mainFunction(__filename, __dirname, require, exports, module);
return module.exports;
})();
};

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet