Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

node-pre-gyp

Package Overview
Dependencies
Maintainers
1
Versions
109
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

node-pre-gyp - npm Package Compare versions

Comparing version 0.4.2 to 0.5.0

lib/pre-binding.js

7

CHANGELOG.md
# node-pre-gyp changelog
## 0.5.0
- Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`.
- Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18).
- Added `remote_path` which also supports versioning.
- Changed `remote_uri` to `host`.
## 0.4.2

@@ -4,0 +11,0 @@

35

lib/build.js

@@ -8,2 +8,6 @@

, compile = require('./util/compile.js')
, versioning = require('./util/versioning.js')
, path = require('path')
, fs = require('fs')
, mkdirp = require('mkdirp')

@@ -19,9 +23,32 @@ function build(gyp, argv, callback) {

var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
// options look different depending on whether node-pre-gyp is called directly
// or whether it is called from npm install, hence the following two lines.
var command_line_opts = (typeof(gyp.opts.argv.original) === 'string') ? JSON.parse(gyp.opts.argv).original : gyp.opts.argv.original || [];
command_line_opts = command_line_opts.filter(function(opt) { return opt.length > 2 && opt.slice(0,2) == '--'});
compile.run_gyp(gyp_args.concat(command_line_opts),gyp.opts,function(err,opts) {
// or whether it is called from npm install, hence the following line.
// TODO: check if this is really necessary with latest npm/nopt versions
var original_args = (typeof(gyp.opts.argv.original) === 'string') ? JSON.parse(gyp.opts.argv).original : gyp.opts.argv.original || [];
// add command line options to existing opts
original_args.forEach(function(opt) {
// we ignore any args like 'install' since we know
// we are either running 'build' or 'rebuild' but we
// do want to pass along to node-gyp/nw-gyp any command
// line options like --option or --option=value passed in
if (opt.length > 2 && opt.slice(0,2) == '--') {
var parts = opt.split('=');
if (parts.length > 1) {
var key = parts[0]
opts[key] = parts[1];
}
}
})
var command_line_args = [];
// turn back into command line options
Object.keys(opts).forEach(function(o) {
var val = opts[o];
if (val) {
command_line_args.push('--' + o + '=' + val);
}
})
compile.run_gyp(gyp_args.concat(command_line_args),opts,function(err,gopts) {
return callback(err);
});
}

23

lib/clean.js
module.exports = exports = clean
exports.usage = 'Removes the generated .node module'
exports.usage = 'Removes the entire folder containing the compiled .node module'

@@ -15,14 +15,11 @@ var fs = require('fs')

var package_json = JSON.parse(fs.readFileSync('./package.json'));
versioning.evaluate(package_json, gyp.opts, function(err,opts) {
if (err) return callback(err);
var to = package_json.binary.module_path;
var binary_module = path.join(to,opts.module_name + '.node');
exists(binary_module,function(found) {
if (found) {
console.log('Removing "%s"', binary_module)
return rm(binary_module, callback);
}
return callback();
})
});
var opts = versioning.evaluate(package_json, gyp.opts);
var to_delete = opts.module_path
exists(to_delete,function(found) {
if (found) {
console.log('['+package_json.name+'] Removing "%s"', to_delete)
return rm(to_delete, callback);
}
return callback();
})
}
module.exports = exports = unpublish
exports.usage = 'Fetches info on published binaries'
exports.usage = 'Lists all published binaries'

@@ -17,33 +17,31 @@ var fs = require('fs')

function unpublish(gyp, argv, callback) {
var package_json = JSON.parse(fs.readFileSync('./package.json'));
versioning.evaluate(package_json, gyp.opts, function(err,opts) {
if (err) return callback(err);
if(!config.accessKeyId || !config.secretAccessKey) {
return callback(new Error("Unknown S3 `accessKeyId` and `secretAccessKey`"));
} else {
s3_setup.detect(package_json.binary.remote_uri,config);
AWS.config.update(config);
var s3 = new AWS.S3();
var s3_opts = { Bucket: config.bucket,
Prefix: config.prefix
};
s3.listObjects(s3_opts, function(err, meta){
if (err && err.code == 'NotFound') {
return callback(new Error('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix));
} else if(err) {
return callback(err);
if(!config.accessKeyId || !config.secretAccessKey) {
return callback(new Error("Unknown S3 `accessKeyId` and `secretAccessKey`"));
} else {
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
s3_setup.detect(opts.hosted_path,config);
AWS.config.update(config);
var s3 = new AWS.S3();
var s3_opts = { Bucket: config.bucket,
Prefix: config.prefix
};
s3.listObjects(s3_opts, function(err, meta){
if (err && err.code == 'NotFound') {
return callback(new Error('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix));
} else if(err) {
return callback(err);
} else {
log.verbose(JSON.stringify(meta,null,1));
if (meta && meta.Contents) {
meta.Contents.forEach(function(obj) {
console.log(obj.Key);
});
} else {
log.verbose(JSON.stringify(meta,null,1));
if (meta && meta.Contents) {
meta.Contents.forEach(function(obj) {
console.log(obj.Key);
});
} else {
console.error('No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix )
}
return callback();
console.error('['+package_json.name+'] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix )
}
});
}
});
return callback();
}
});
}
}

@@ -11,9 +11,4 @@

, log = require('npmlog')
, semver = require('semver')
, request = require('request')
, win = process.platform == 'win32'
, os = require('os')
, existsAsync = fs.exists || path.exists
, cp = require('child_process')
, url = require('url')
, versioning = require('./util/versioning.js')

@@ -24,8 +19,8 @@ , compile = require('./util/compile.js')

function download(url,opts,callback) {
log.http('GET', url)
function download(uri,opts,callback) {
log.http('GET', uri)
var req = null
var requestOpts = {
uri: url
uri: uri
, headers: {

@@ -51,10 +46,10 @@ 'User-Agent': 'node-pre-gyp (node ' + process.version + ')'

} catch (e) {
callback(e)
return callback(e)
}
if (req) {
req.on('response', function (res) {
log.http(res.statusCode, url)
log.http(res.statusCode, uri)
})
}
callback(null,req);
return callback(null,req);
}

@@ -133,64 +128,59 @@

}
versioning.evaluate(package_json, gyp.opts, function(err,opts) {
if (err) return callback(err);
var base = package_json.binary.remote_uri;
// url.resolve needs single trailing slash
// to behave correctly, otherwise a double slash
// may end up in the url which breaks request
if (base.slice(-1) != '/') {
base = base + '/';
try {
var opts = versioning.evaluate(package_json, gyp.opts);
} catch (err) {
return callback(err);
}
var from = opts.hosted_tarball;
var to = opts.module_path;
var binary_module = path.join(to,opts.module_name + '.node');
if (existsAsync(binary_module,function(found) {
if (found) {
test_binary.validate(opts,function(err) {
if (err) {
console.error('['+package_json.name+'] ' + err.message);
log.error("Testing local pre-built binary failed, attempting to re-download");
place_binary(from,to,opts,function(err) {
if (err && should_do_fallback_build) {
log.info('build','source compile required');
return do_build(gyp,argv,callback);
} else if (err) {
return callback(err);
} else {
console.log('['+package_json.name+'] Success: "' + binary_module + '" is installed');
return callback();
}
});
} else {
console.log('['+package_json.name+'] Success: "' + binary_module + '" already installed');
console.log('Run pass --build-from-source to compile');
return callback();
}
});
} else {
log.info('check','checked for "' + binary_module + '" (not found)')
place_binary(from,to,opts,function(err) {
if (err && should_do_fallback_build) {
log.error('Source compile required: ' + err.message);
return do_build(gyp,argv,callback);
} else if (err) {
return callback(err);
} else {
test_binary.validate(opts,function(err) {
if (err && should_do_fallback_build) {
console.error('['+package_json.name+'] ' + err.message);
log.error("Testing pre-built binary failed, attempting to source compile");
return do_build(gyp,argv,callback);
} else if (err) {
return callback(err);
} else {
console.log('['+package_json.name+'] Success: "' + binary_module + '" is installed');
return callback();
}
});
};
});
}
var from = url.resolve(base,opts.versioned);
var to = package_json.binary.module_path;
var binary_module = path.join(to,opts.module_name + '.node');
if (existsAsync(binary_module,function(found) {
if (found) {
test_binary.validate(opts,function(err) {
if (err) {
console.error(err.message);
log.error("Testing local pre-built binary failed, attempting to re-download");
place_binary(from,to,opts,function(err) {
if (err && should_do_fallback_build) {
log.info('build','source compile required');
return do_build(gyp,argv,callback);
} else if (err) {
return callback(err);
} else {
console.log('['+package_json.name+'] Success: "' + binary_module + '" is installed');
return callback();
}
});
} else {
console.log('['+package_json.name+'] Success: "' + binary_module + '" already installed');
console.log('Run pass --build-from-source to compile');
return callback();
}
});
} else {
log.info('check','checked for "' + path.join(process.cwd(),binary_module) + '" (not found)')
place_binary(from,to,opts,function(err) {
if (err && should_do_fallback_build) {
log.error('Source compile required: ' + err.message);
return do_build(gyp,argv,callback);
} else if (err) {
return callback(err);
} else {
test_binary.validate(opts,function(err) {
if (err && should_do_fallback_build) {
console.error(err.message);
log.error("Testing pre-built binary failed, attempting to source compile");
return do_build(gyp,argv,callback);
} else if (err) {
return callback(err);
} else {
console.log('['+package_json.name+'] Success: "' + binary_module + '" is installed');
return callback();
}
});
};
});
}
}));
});
}));
}
}
};

@@ -6,3 +6,3 @@

module.exports = exports = binary
module.exports = exports;

@@ -38,11 +38,5 @@ /**

/**
* The `binary` function.
*/
exports.find = require('./pre-binding').find;
function binary () {
return new Binary()
}
function Binary () {
function Run() {
var self = this

@@ -59,5 +53,5 @@

}
inherits(Binary, EE)
exports.Binary = Binary
var proto = Binary.prototype
inherits(Run, EE)
exports.Run = Run
var proto = Run.prototype

@@ -64,0 +58,0 @@ /**

module.exports = exports = package
exports.usage = 'Packs binary into tarball'
exports.usage = 'Packs binary (and enclosing directory) into locally staged tarball'

@@ -18,29 +18,27 @@ var fs = require('fs')

var package_json = JSON.parse(fs.readFileSync('./package.json'));
versioning.evaluate(package_json, gyp.opts, function(err,opts) {
if (err) return callback(err);
var from = package_json.binary.module_path;
var binary_module = path.join(from,opts.module_name + '.node');
existsAsync(binary_module,function(found) {
if (!found) {
return callback(new Error("Cannot package because " + binary_module + " missing: run `node-pre-gyp rebuild` first"))
}
var tarball = path.join('build/stage',opts.versioned);
var basedir = path.basename(from);
var filter_func = function (entry) {
console.log('packing ' + entry.path);
return true;
}
mkdirp(path.dirname(tarball),function(err) {
pack(from, { filter: filter_func })
.pipe(write(tarball))
.on('error', function (err) {
return callback(err);
})
.on('close', function () {
log.info('install','Binary staged at "' + tarball + '"');
return callback();
})
});
var opts = versioning.evaluate(package_json, gyp.opts);
var from = opts.module_path;
var binary_module = path.join(from,opts.module_name + '.node');
existsAsync(binary_module,function(found) {
if (!found) {
return callback(new Error("Cannot package because " + binary_module + " missing: run `node-pre-gyp rebuild` first"))
}
var tarball = opts.staged_tarball;
var basedir = path.basename(from);
var filter_func = function (entry) {
console.log('['+package_json.name+'] packing ' + entry.path);
return true;
}
mkdirp(path.dirname(tarball),function(err) {
pack(from, { filter: filter_func })
.pipe(write(tarball))
.on('error', function (err) {
return callback(err);
})
.on('close', function () {
log.info('install','Binary staged at "' + tarball + '"');
return callback();
})
});
});
}

@@ -18,45 +18,42 @@

function publish(gyp, argv, callback) {
if(!config.accessKeyId || !config.secretAccessKey) {
return callback(new Error("Unknown S3 `accessKeyId` and `secretAccessKey`"));
}
var package_json = JSON.parse(fs.readFileSync('./package.json'));
versioning.evaluate(package_json, gyp.opts, function(err,opts) {
if (err) return callback(err);
var tarball = path.join('build/stage',opts.versioned);
existsAsync(tarball,function(found) {
if (!found) {
return callback(new Error("Cannot publish because " + tarball + " missing: run `node-pre-gyp rebuild` first"))
}
if(!config.accessKeyId || !config.secretAccessKey) {
return callback(new Error("Unknown S3 `accessKeyId` and `secretAccessKey`"));
} else {
s3_setup.detect(package_json.binary.remote_uri,config);
AWS.config.update(config);
var opts = versioning.evaluate(package_json, gyp.opts);
var tarball = opts.staged_tarball;
existsAsync(tarball,function(found) {
if (!found) {
return callback(new Error("Cannot publish because " + tarball + " missing: run `node-pre-gyp package` first"))
}
s3_setup.detect(opts.hosted_path,config);
var key_name = path.join(config.prefix,opts.package_name)
AWS.config.update(config);
var s3 = new AWS.S3();
var s3_opts = { Bucket: config.bucket,
Key: key_name
};
s3.headObject(s3_opts, function(err, meta){
if (err && err.code == 'NotFound') {
// we are safe to publish because
// the object does not already exist
var s3 = new AWS.S3();
var s3_opts = { Bucket: config.bucket,
Key: path.join(config.prefix,opts.versioned)
};
s3.headObject(s3_opts, function(err, meta){
if (err && err.code == 'NotFound') {
// we are safe to publish because
// the object does not already exist
var s3 = new AWS.S3();
var s3_obj_opts = { ACL: config.acl,
Body: fs.readFileSync(tarball),
Bucket: config.bucket,
Key: path.join(config.prefix,opts.versioned)
};
s3.putObject(s3_obj_opts, function(err, resp){
if(err) return callback(err);
console.log('['+package_json.name+'] Success: published to https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
return callback();
});
} else if(err) {
return callback(err);
} else {
log.error('publish','Cannot publish over existing version');
log.error('publish',"Update the 'version' field in package.json and try again");
log.error('publish','If the previous version was published in error see:');
log.error('publish','\t node-pre-gyp unpublish');
return callback(new Error('Failed to publish "'+s3_opts.Key + '"'));
}
var s3_obj_opts = { ACL: config.acl,
Body: fs.readFileSync(tarball),
Bucket: config.bucket,
Key: key_name
};
s3.putObject(s3_obj_opts, function(err, resp){
if(err) return callback(err);
console.log('['+package_json.name+'] Success: published to https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
return callback();
});
} else if(err) {
return callback(err);
} else {
log.error('publish','Cannot publish over existing version');
log.error('publish',"Update the 'version' field in package.json and try again");
log.error('publish','If the previous version was published in error see:');
log.error('publish','\t node-pre-gyp unpublish');
return callback(new Error('Failed to publish "' + s3_opts.Key + '"'));
}

@@ -63,0 +60,0 @@ });

module.exports = exports = testpackage
exports.usage = 'Tests that staged package is valid'
exports.usage = 'Tests that the staged package is valid'
var fs = require('fs')
, tar = require('tar')
, path = require('path')
, zlib = require('zlib')
, log = require('npmlog')
, semver = require('semver')
, request = require('request')
, win = process.platform == 'win32'
, os = require('os')
, existsAsync = fs.exists || path.exists
, existsSync = fs.existsSync || path.existsSync
, cp = require('child_process')
, versioning = require('./util/versioning.js')
, compile = require('./util/compile.js')
, test_binary = require('./util/test_binary.js')

@@ -26,27 +17,23 @@ , read = require('fs').createReadStream

var package_json = JSON.parse(fs.readFileSync('./package.json'));
versioning.evaluate(package_json, gyp.opts, function(err,opts) {
if (err) return callback(err);
var tarball = path.join('build/stage',opts.versioned);
var to = package_json.binary.module_path;
if (existsSync(to) && !gyp.opts['overwrite']) {
return callback(new Error('WARNING: ' + to + ' already exists and will be overwritten: pass --overwrite to confirm'));
var opts = versioning.evaluate(package_json, gyp.opts);
var tarball = opts.staged_tarball;
existsAsync(tarball, function(found) {
if (!found) {
return callback(new Error("Cannot test package because " + tarball + " missing: run `node-pre-gyp package` first"))
}
existsAsync(tarball,function(found) {
if (!found) {
return callback(new Error("Cannot test package because " + tarball + " missing: run `node-pre-gyp package` first"))
}
read(tarball)
.pipe(unpack(to, function (err) {
if (err) return callback(err);
test_binary.validate(opts,function(err) {
if (err) {
return callback(err);
} else {
console.log("Package appears valid");
return callback();
}
});
}))
});
var to = opts.module_path;
existsAsync(to, function(found) {
read(tarball).pipe(unpack(to, function (err) {
if (err) return callback(err);
test_binary.validate(opts,function(err) {
if (err) {
return callback(err);
} else {
console.log('['+package_json.name+'] Package appears valid');
return callback();
}
});
}))
});
});
}

@@ -11,4 +11,2 @@

, s3_setup = require('./util/s3_setup.js')
, mkdirp = require('mkdirp')
, existsAsync = fs.exists || path.exists
, AWS = require("aws-sdk")

@@ -18,32 +16,31 @@ , config = require('rc')("node_pre_gyp",{acl:"public-read"});

function unpublish(gyp, argv, callback) {
var package_json = JSON.parse(fs.readFileSync('./package.json'));
versioning.evaluate(package_json, gyp.opts, function(err,opts) {
if (err) return callback(err);
if(!config.accessKeyId || !config.secretAccessKey) {
return callback(new Error("Unknown S3 `accessKeyId` and `secretAccessKey`"));
} else {
s3_setup.detect(package_json.binary.remote_uri,config);
AWS.config.update(config);
var s3 = new AWS.S3();
var s3_opts = { Bucket: config.bucket,
Key: path.join(config.prefix,opts.versioned)
};
s3.headObject(s3_opts, function(err, meta){
if (err && err.code == 'NotFound') {
console.log('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
if(!config.accessKeyId || !config.secretAccessKey) {
return callback(new Error("Unknown S3 `accessKeyId` and `secretAccessKey`"));
} else {
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
s3_setup.detect(opts.hosted_path,config);
AWS.config.update(config);
var key_name = path.join(config.prefix,opts.package_name)
var s3 = new AWS.S3();
var s3_opts = { Bucket: config.bucket,
Key: key_name
};
s3.headObject(s3_opts, function(err, meta){
if (err && err.code == 'NotFound') {
console.log('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
return callback();
} else if(err) {
return callback(err);
} else {
log.info(JSON.stringify(meta));
s3.deleteObject(s3_opts, function(err, resp){
if (err) return callback(err);
log.info(JSON.stringify(resp));
console.log('['+package_json.name+'] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
return callback();
} else if(err) {
return callback(err);
} else {
log.info(JSON.stringify(meta));
s3.deleteObject(s3_opts, function(err, resp){
if (err) return callback(err);
log.info(JSON.stringify(resp));
console.log('['+package_json.name+'] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
return callback();
})
}
});
}
});
})
}
});
}
}
{
"0.9.1": "0.11.9",
"0.9.0": "0.11.9",
"0.8.4": "0.10.22",
"0.8.3": "0.10.22",

@@ -4,0 +6,0 @@ "0.8.2": "0.10.22",

module.exports = exports;
var fs = require('fs')
, tar = require('tar')
, path = require('path')
, zlib = require('zlib')
, log = require('npmlog')
, semver = require('semver')
, request = require('request')
, win = process.platform == 'win32'
, os = require('os')
, existsAsync = fs.exists || path.exists
, url = require('url')
, cp = require('child_process')
var url = require('url')
module.exports.detect = function(to,config) {

@@ -19,0 +7,0 @@ var uri = url.parse(to);

module.exports = exports;
var fs = require('fs')
, tar = require('tar')
, path = require('path')
, zlib = require('zlib')
, log = require('npmlog')
, semver = require('semver')
, request = require('request')
, win = process.platform == 'win32'
, os = require('os')
, existsAsync = fs.exists || path.exists
var log = require('npmlog')
, cp = require('child_process')

@@ -25,4 +16,10 @@

if (nw) {
shell_cmd = 'node-webkit';
options.timeout = 5000;
if (process.platform === 'darwin') {
shell_cmd = 'node-webkit';
} else if (process.platform === 'win32') {
shell_cmd = 'nw.exe';
} else {
shell_cmd = 'nw';
}
} else if (process.platform === 'darwin' && arch_names[opts.target_arch]) {

@@ -39,3 +36,3 @@ shell_cmd = 'arch';

args.push(opts.module_main);
log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' '));
log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'");
cp.execFile(shell_cmd, args, options, function(err, stdout, stderr) {

@@ -42,0 +39,0 @@ // check for normal timeout for node-webkit

module.exports = exports;
var fs = require('fs')
, tar = require('tar')
, path = require('path')
, zlib = require('zlib')
, log = require('npmlog')
var path = require('path')
, semver = require('semver')
, request = require('request')
, win = process.platform == 'win32'
, os = require('os')
, url = require('url')
, existsAsync = fs.exists || path.exists
, cp = require('child_process')
, abi_crosswalk = require('./abi_crosswalk.json')
, nw_crosswalk = require('./nw_crosswalk.json')
function eval_template(template,opts) {
Object.keys(opts).forEach(function(key) {
var pattern = '{'+key+'}';
while (template.indexOf(pattern) > -1) {
template = template.replace(pattern,opts[key]);
}
});
return template;
}

@@ -63,29 +45,36 @@ function get_node_abi(runtime, target) {

var required_parameters = [
'module_name',
'module_path',
'host'
];
function validate_config(package_json,callback) {
var msg = package_json.name + ' package.json is not node-pre-gyp ready:\n';
var missing = [];
if (!package_json.main) {
return callback(new Error(msg+"package.json must declare 'main'"));
missing.push('main');
}
if (!package_json.binary) {
return callback(new Error(msg+"package.json must declare 'binary' property.\nSee https://github.com/springmeyer/node-pre-gyp#design for details\n"));
missing.push('binary');
}
var o = package_json.binary;
if (!o.module_path) {
return callback(new Error(msg+"package.json must declare 'binary.module_path'"));
required_parameters.forEach(function(p) {
if (missing.indexOf('binary') > -1) {
missing.pop('binary');
}
if (!o || o[p] == undefined) {
missing.push('binary.' + p);
}
});
if (missing.length >= 1) {
throw new Error(msg+"package.json must declare these properties: \n" + missing.join('\n'));
}
if (!o.module_name) {
return callback(new Error(msg+"package.json must declare 'binary.module_name'"));
if (o) {
// enforce https over http
var protocol = url.parse(o.host).protocol;
if (protocol === 'http:') {
throw new Error("'host' protocol ("+protocol+") is invalid - only 'https:' is accepted");
}
}
if (!o.template) {
return callback(new Error(msg+"package.json must declare 'binary.template'"));
}
if (!o.remote_uri) {
return callback(new Error(msg+"package.json must declare 'binary.remote_uri'"));
}
// enforce `remote_uri` as https
var protocol = url.parse(o.remote_uri).protocol;
if (protocol !== 'https:') {
return callback(new Error("'remote_uri' protocol ("+protocol+") is invalid - only 'https:' is accepted"));
}
return callback();
};

@@ -95,27 +84,65 @@

module.exports.evaluate = function(package_json,options,callback) {
validate_config(package_json,function(err) {
if (err) return callback(err);
var v = package_json.version;
var module_version = semver.parse(v);
var runtime = options.runtime || 'node';
var opts = {
configuration: (options.debug === true) ? 'Debug' : 'Release'
, module_name: package_json.binary.module_name
, version: module_version.version
, prerelease: module_version.prerelease.length ? v.slice(v.indexOf(module_version.prerelease[0])) : ''
, major: module_version.major
, minor: module_version.minor
, patch: module_version.patch
, runtime: runtime
, node_abi: get_node_abi(runtime,options.target)
, target: options.target || ''
, platform: options.target_platform || process.platform
, arch: options.target_arch || process.arch
, target_arch: options.target_arch || process.arch
, module_main: package_json.main
function eval_template(template,opts) {
Object.keys(opts).forEach(function(key) {
var pattern = '{'+key+'}';
while (template.indexOf(pattern) > -1) {
template = template.replace(pattern,opts[key]);
}
opts.versioned = eval_template(package_json.binary.template,opts);
return callback(null,opts);
});
return template;
}
// url.resolve needs single trailing slash
// to behave correctly, otherwise a double slash
// may end up in the url which breaks requests
// and a lacking slash may not lead to proper joining
function add_trailing_slash(pathname) {
if (pathname.slice(-1) != '/') {
return pathname + '/';
}
return pathname;
}
var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
var default_remote_path = '';
module.exports.evaluate = function(package_json,options) {
options = options || {};
validate_config(package_json);
var v = package_json.version;
var module_version = semver.parse(v);
var runtime = options.runtime || (process.versions['node-webkit'] ? 'node-webkit' : 'node');
var opts = {
configuration: (options.debug === true) ? 'Debug' : 'Release'
, module_name: package_json.binary.module_name
, version: module_version.version
, prerelease: module_version.prerelease.length ? v.slice(v.indexOf(module_version.prerelease[0])) : ''
, major: module_version.major
, minor: module_version.minor
, patch: module_version.patch
, runtime: runtime
, node_abi: get_node_abi(runtime,options.target)
, target: options.target || ''
, platform: options.target_platform || process.platform
, arch: options.target_arch || process.arch
, target_arch: options.target_arch || process.arch
, module_main: package_json.main
}
opts.host = add_trailing_slash(eval_template(package_json.binary.host,opts));
opts.module_path = eval_template(package_json.binary.module_path,opts);
// now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably
if (options.module_root) {
// resolve relative to known module root: works for pre-binding require
opts.module_path = path.join(options.module_root,opts.module_path);
} else {
// resolve relative to current working directory: works for node-pre-gyp commands
opts.module_path = path.resolve(opts.module_path);
}
opts.remote_path = package_json.binary.remote_path ? add_trailing_slash(eval_template(package_json.binary.remote_path,opts)) : default_remote_path;
var package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name;
opts.package_name = eval_template(package_name,opts);
opts.staged_tarball = path.join('build/stage',opts.remote_path,opts.package_name);
opts.hosted_path = url.resolve(opts.host,opts.remote_path);
opts.hosted_tarball = url.resolve(opts.hosted_path,opts.package_name);
return opts;
}
{
"name": "node-pre-gyp",
"description": "Node.js native addon binary install tool",
"version": "0.4.2",
"version": "0.5.0",
"keywords": [

@@ -17,19 +17,18 @@ "native",

"type": "git",
"url": "git://github.com/springmeyer/node-pre-gyp.git"
"url": "git://github.com/mapbox/node-pre-gyp.git"
},
"preferGlobal": true,
"bin": "./bin/node-pre-gyp",
"main": "./lib/node-pre-gyp.js",
"dependencies": {
"nopt": "~2.1.2",
"nopt": "~2.2.0",
"npmlog": "~0.0.6",
"request": "2",
"semver": "~2.1.0",
"semver": "~2.2.1",
"tar": "~0.1.19",
"tar-pack":"~2.0.0",
"mkdirp":"~0.3.5",
"aws-sdk": "~2.0.0-rc9 ",
"rc":"~0.3.2",
"rc":"~0.3.4",
"rimraf":"~2.2.5"
},
"engineStrict": true,
"engines": {

@@ -36,0 +35,0 @@ "node": ">= 0.8.0"

# node-pre-gyp
[![Build Status](https://secure.travis-ci.org/springmeyer/node-pre-gyp.png)](https://travis-ci.org/springmeyer/node-pre-gyp)
#### node-pre-gyp makes it easy to publish and install Node.js C++ from binaries
`node-pre-gyp` is a Node.js native add-on install tool.
[![NPM](https://nodei.co/npm/node-pre-gyp.png)](https://nodei.co/npm/node-pre-gyp/)
## Does this replace npm or node-gyp?
[![Build Status](https://secure.travis-ci.org/mapbox/node-pre-gyp.png)](https://travis-ci.org/mapbox/node-pre-gyp)
[![Dependencies](https://david-dm.org/mapbox/node-pre-gyp.png)](https://david-dm.org/mapbox/node-pre-gyp)
No: it plays nicely with them.
`node-pre-gyp` stands between [npm](https://github.com/npm/npm) and [node-gyp](https://github.com/Tootallnate/node-gyp) and offers a cross-platform method of binary deployment.
- You still publish your package to the npm repository
- You still create a `binding.gyp` to compile your module with `node-gyp`
### Features
What `node-pre-gyp` does is stand between `npm` and `node-gyp`.
- A command line tool called `node-pre-gyp` that can install your package's c++ module from a binary.
- A variety of developer targeted commands for packaging, testing, and publishing binaries.
- A Javascript module that can dynamically require your installed binary: `require('node-pre-gyp').find`
## Who uses node-pre-gyp?
For a hello world example of a module packaged with `node-pre-gyp` see <https://github.com/springmeyer/node-addon-example> and [the wiki ](https://github.com/mapbox/node-pre-gyp/wiki/Modules-using-node-pre-gyp) for real world examples.
**Developers** of C++ modules can use `node-pre-gyp` to package and publish the binary `.node` before running `npm publish`.
## Credits
**Users** can then `npm install` your module from a binary and `node-pre-gyp` does the work to make this seamless.
- The module is modeled after [node-gyp](https://github.com/Tootallnate/node-gyp) by [@Tootallnate](https://github.com/Tootallnate)
- Motivation for initial development came from [@ErisDS](https://github.com/ErisDS) and the [Ghost Project](https://github.com/TryGhost/Ghost).
- Development is sponsered by [MapBox](https://www.mapbox.com/)
## Modules using `node-pre-gyp`:
## Depends
- [node-sqlite3](https://github.com/mapbox/node-sqlite3)
- [node-mapnik](https://github.com/mapnik/node-mapnik)
- [node-osmium](https://github.com/osmcode/node-osmium)
- [node-osrm](https://github.com/DennisOSRM/node-OSRM)
- Node.js 0.10.x or 0.8.x
For more examples see the [test apps](test/).
## Install
`node-pre-gyp` is designed to be installed as a local dependency of your Node.js C++ addon and accessed like:
./node_modules/.bin/node-pre-gyp --help
But you can also install it globally:
npm install node-pre-gyp -g
## Usage
### Commands
**1) Add a custom `install` script to `package.json`**
View all possible commands:
node-pre-gyp --help
- clean - Removes the entire folder containing the compiled .node module
- install - Attempts to install pre-built binary for module
- reinstall - Runs "clean" and "install" at once
- build - Attempts to compile the module by dispatching to node-gyp or nw-gyp
- rebuild - Runs "clean" and "build" at once
- package - Packs binary into tarball
- testpackage - Tests that the staged package is valid
- publish - Publishes pre-built binary
- unpublish - Unpublishes pre-built binary
- info - Fetches info on published binaries
You can also chain commands:
node-pre-gyp clean build unpublish publish info
### Options
Options include:
- `-C/--directory`: run the command in this directory
- `--build-from-source`: build from source instead of using pre-built binary
- `--fallback-to-build`: fallback to building from source if pre-built binary is not available
- `--target=0.10.25`: Pass the target node or node-webkit version to compile against
- `--target_arch=ia32`: Pass the target arch (will override the host `arch`)
Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. You can pass `--fallback-to-build=false` to override the option as declared in package.json. In addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module.
For example: `npm install --build-from-source=myapp`. This is useful if:
- `myapp` is referenced in the package.json of a larger app and therefore `myapp` is being installed as a dependent with `npm install`.
- The larger app also depends on other modules installed with `node-pre-gyp`
- You only want to trigger a source compile for `myapp` and the other modules.
### Configuring
This is guide to configuring your module to use node-pre-gyp.
#### 1) Add new entries to your `package.json`
- Add `node-pre-gyp` as a bundled dependency
- Add a custom `install` script
- Declare a `binary` object
This looks like:
```js
"dependencies" : {
"node-pre-gyp": "0.5.x"
},
"bundledDependencies":["node-pre-gyp"],
"scripts": {
"install": "node-pre-gyp install --fallback-to-build",
}
"binary": {
"module_name": "your_module",
"module_path": "./lib/binding/",
"host": "https://your_module.s3-us-west-1.amazonaws.com",
}
```
##### The `binary` object has three required properties
**2) Add a `binary` property to `package.json`**
###### module_name
It must provide these properties:
The name of your native node module. This must match the name passed to [the NODE_MODULE macro](http://nodejs.org/api/addons.html#addons_hello_world) and should not include the `.node` extension.
- `module_name`: The name of your native node module.
- `module_path`: The location your native module is placed after a build. This should be an empty directory without other javascript files.
- `remote_uri`: A url to the remote location where you've published tarball binaries
- `template`: A string describing the tarball versioning scheme for your binaries
###### module_path
And example from `node-sqlite3` looks like:
The location your native module is placed after a build. This should be an empty directory without other javascript files. This entire directory will be packaged in the binary tarball. When installing from a remote package this directory will be overwritten with the contents of the tarball.
Note: This property supports variables based on [Versioning](#versioning).
###### host
A url to the remote location where you've published tarball binaries (must be `https` not `http`).
##### The `binary` object has two optional properties
###### remote_path
It **is recommended** that you customize this property. This is an extra path to use for publishing and finding remote tarballs. The default value for `remote_path` is `""` meaning that if you do not provide it then all packages will be published at the base of the `host`. It is recommended to provide a value like `./{module_name}/v{version}` to help organize remote packages in the case that you choose to publish multiple node addons to the same `host`.
Note: This property supports variables based on [Versioning](#versioning).
###### package_name
It is **not recommended** to override this property. This is the versioned name of the remote tarball containing the binary `.node` module and any supporting files you've placed inside the `module_path`. If you do not provide it in your `package.json` then it defaults to `{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz` which is a versioning string capable of supporting any remove lookup of your modules across all of its pubished versions and various node versions, platforms and architectures. But if you only wish to support windows you could could change it to `{module_name}-v{version}-{node_abi}-win32-{arch}.tar.gz`.
Note: This property supports variables based on [Versioning](#versioning).
#### 2) Dynamically require your `.node`
Inside the main js file that requires your addon module you are likely currently doing:
```js
"binary": {
"module_name": "node_sqlite3",
"module_path": "./lib/binding/",
"remote_uri": "http://node-sqlite3.s3.amazonaws.com",
"template": "{configuration}/{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz"
}
var binding = require('../build/Release/binding.node');
```
**3) Add node-pre-gyp as a bundled dependency**
or:
```js
"dependencies" : {
"node-pre-gyp": "~0.4.0",
},
"bundledDependencies":["node-pre-gyp"],
var bindings = require('./bindings')
```
**4) Build and package your app**
Change those lines to:
Install node-pre-gyp globally:
```js
var binary = require('node-pre-gyp');
var path = require('path')
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
var binding = require(binding_path);
```
npm install -g node-pre-gyp
#### 3) Build and package your app
Then build and package your app:
Now build your module from source:
node-pre-gyp build package
npm install --build-from-source
**5) Publish the tarball**
The `--build-from-source` tells `node-pre-gyp` to not look for a remote package and instead dispatch to node-gyp to build.
Now `node-pre-gyp` should now also be installed as a local dependency so the command line tool it offers can be found at `./node_modules/.bin/node-pre-gyp`.
#### 4) Test
Now `npm test` should work just as it did before.
#### 5) Publish the tarball
Then package your app:
./node_modules/.bin/node-pre-gyp package
Once packaged, now you can publish:
node-pre-gyp publish
./node_modules/.bin/node-pre-gyp publish
Currently the `publish` command pushes your binary to S3. This requires:
- You have installed `aws-sdk` with `npm install aws-sdk`
- You have created a bucket already.
- The `remote-uri` points to an S3 http or https endpoint.
- The `host` points to an S3 http or https endpoint.
- You have configured node-pre-gyp to read your S3 credentials (see [S3 hosting](#s3-hosting) for details).

@@ -96,11 +194,10 @@

- You manually publish the binary created by the `package` command.
- The package is available as a tarball in the `build/stage/` directory.
- You provide a remote location and point the `remote_uri` value to it.
- You manually publish the binary created by the `package` command to an `https` endpoint
- Ensure that the `host` value points to your custom `https` endpoint.
**6) Automating builds**
#### 6) Automate builds
Now you need to publish builds for all the platforms and node versions you wish to support. This is best automated. See [Travis Automation](#travis-automation) for how to auto-publish builds on OS X and Linux. On windows consider using a script [like this](https://github.com/mapbox/node-sqlite3/blob/master/scripts/build.bat) to quickly create and publish binaries.
Now you need to publish builds for all the platforms and node versions you wish to support. This is best automated. See [Travis Automation](#travis-automation) for how to auto-publish builds on OS X and Linux. On windows consider using a script [like this](https://github.com/mapbox/node-sqlite3/blob/master/scripts/build.bat) to quickly create and publish binaries and check out <https://appveyor.com>.
**7) You're done!**
#### 7) You're done!

@@ -116,3 +213,3 @@ Now publish your package to the npm registry. Users will now be able to install your module from a binary.

If a failure occurred and `--fallback-to-build` was used then `node-gyp rebuild` will be called to try to source compile the module.
If a a binary was not available for a given platform and `--fallback-to-build` was used then `node-gyp rebuild` will be called to try to source compile the module.

@@ -157,3 +254,3 @@ ## S3 Hosting

You may also need to specify the `region` if it is not explicit in the `remote_uri` value you use. The `bucket` can also be specified but it is optional because `node-pre-gyp` will detect it from the `remote_uri` value.
You may also need to specify the `region` if it is not explicit in the `host` value you use. The `bucket` can also be specified but it is optional because `node-pre-gyp` will detect it from the `host` value.

@@ -250,43 +347,15 @@ **4) Package and publish your build**

### Commands
# Versioning
View all possible commands:
The `binary` properties of `module_path`, `remote_path`, and `package_name` support variable substitution. The strings are evaluated by `node-pre-gyp` depending on your system and any custom build flags you passed.
node-pre-gyp --help
- `configuration` - Either 'Release' or 'Debug' depending on if `--debug` is passed during the build.
- `module_name` - the `binary.module_name` attribute from `package.json`.
- `version` - the semver `version` value for your module from `package.json`.
- `major`, `minor`, `patch`, and `prelease` match the individual semver values for your module's `version`
- `node_abi`: The node C++ `ABI` number. This value is available in javascript as `process.versions.modules` as of [`>= v0.10.4 >= v0.11.7`](https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e) and in C++ as the `NODE_MODULE_VERSION` define much earlier. For versions of Node before this was available we fallback to the V8 major and minor version.
- `platform` matches node's `process.platform` like `linux`, `darwin`, and `win32`
- `arch` matches node's `process.arch` like `x64` or `ia32` unless the user passes the `--target_arch` option to override.
#### List published binaries
node-pre-gyp info
#### Unpublish binaries
node-pre-gyp unpublish
#### Clean install and build artifacts
node-pre-gyp clean
#### Clean and install
node-pre-gyp reinstall # runs "clean" and "install"
#### Chaining commands
node-pre-gyp clean build unpublish publish info
### Options
Options include:
- `-C/--directory`: run the command in this directory
- `--build-from-source`: build from source instead of using pre-built binary
- `--fallback-to-build`: fallback to building from source if pre-built binary is not available
Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. So, in addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module.
For example: `npm install --build-from-source=myapp`. This is useful if:
- `myapp` is referenced in the package.json of a larger app and therefore `myapp` is being installed as a dependent with `npm install`.
- The larger app also depends on other modules installed with `node-pre-gyp`
- You only want to trigger a source compile for `myapp` and the other modules.
The options are visible in the code at <https://github.com/mapbox/node-pre-gyp/blob/612b7bca2604508d881e1187614870ba19a7f0c5/lib/util/versioning.js#L114-L127>

@@ -9,3 +9,3 @@ var https = require("https");

// https://github.com/springmeyer/node-pre-gyp/wiki/Node-ABI
// https://github.com/mapbox/node-pre-gyp/wiki/Node-ABI
node scripts/abi_crosswalk.js > lib/util/abi_crosswalk.json

@@ -12,0 +12,0 @@

@@ -1,9 +0,6 @@

var path = require('path');
var pkg = require('./package.json');
var assert = require('assert');
var module_path = path.join(
path.join(__dirname,pkg.binary.module_path),
pkg.binary.module_name + '.node');
var binding = require(module_path);
var binary = require('node-pre-gyp');
var path = require('path')
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
var binding = require(binding_path);
assert.ok(binding);
require('assert').equal(binding.hello(),"hello");

@@ -7,3 +7,3 @@ {

"type" : "git",
"url" : "git://github.com/springmeyer/node-pre-gyp.git"
"url" : "git://github.com/mapbox/node-pre-gyp.git"
},

@@ -15,4 +15,3 @@ "version": "0.1.0",

"module_path": "./lib/binding/",
"remote_uri": "https://node-pre-gyp-tests.s3-us-west-1.amazonaws.com/app1",
"template": "v{version}/{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz"
"host":"https://node-pre-gyp-tests.s3-us-west-1.amazonaws.com"
},

@@ -19,0 +18,0 @@ "scripts": {

@@ -1,9 +0,6 @@

var path = require('path');
var pkg = require('../package.json');
var assert = require('assert');
var module_path = path.join(
path.join(__dirname,'../' + pkg.binary.module_path),
pkg.binary.module_name + '.node');
var app = require(module_path);
var binary = require('node-pre-gyp');
var path = require('path')
var binding_path = binary.find(path.resolve(path.join(__dirname,'../package.json')));
var binding = require(binding_path);
assert.ok(app);
require('assert').equal(binding.hello(),"hello");

@@ -7,3 +7,3 @@ {

"type" : "git",
"url" : "git://github.com/springmeyer/node-pre-gyp.git"
"url" : "git://github.com/mapbox/node-pre-gyp.git"
},

@@ -14,5 +14,6 @@ "version": "0.1.0",

"module_name": "app2",
"module_path": "./lib/binding/",
"remote_uri": "https://node-pre-gyp-tests.s3-us-west-1.amazonaws.com/app2/",
"template": "{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz"
"module_path": "./lib/binding/{node_abi}-{platform}-{arch}",
"remote_path": "./{module_name}/v{version}",
"package_name": "{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz",
"host":"https://node-pre-gyp-tests.s3-us-west-1.amazonaws.com"
},

@@ -19,0 +20,0 @@ "scripts": {

@@ -1,10 +0,6 @@

var path = require('path');
var pkg = require('./package.json');
var assert = require('assert');
var module_path = path.join(
path.join(__dirname,pkg.binary.module_path),
pkg.binary.module_name + '.node');
var binding = require(module_path);
var binary = require('node-pre-gyp');
var path = require('path')
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
var binding = require(binding_path);
assert.ok(binding);
assert.equal(binding.hello(),"hello");
require('assert').equal(binding.hello(),"hello");

@@ -7,3 +7,3 @@ {

"type" : "git",
"url" : "git://github.com/springmeyer/node-pre-gyp.git"
"url" : "git://github.com/mapbox/node-pre-gyp.git"
},

@@ -14,5 +14,6 @@ "version": "0.1.0-dev.4",

"module_name": "app3",
"module_path": "./lib/binding/",
"remote_uri": "https://node-pre-gyp-tests.s3-us-west-1.amazonaws.com/app3",
"template": "{module_name}-v{major}.{minor}.{patch}-{prerelease}-{node_abi}-{platform}-{arch}.tar.gz"
"module_path": "./lib/binding/{node_abi}-{platform}-{arch}",
"remote_path": "./{module_name}/v{version}",
"package_name": "{node_abi}-{platform}-{arch}.tar.gz",
"host":"https://node-pre-gyp-tests.s3-us-west-1.amazonaws.com"
},

@@ -19,0 +20,0 @@ "scripts": {

@@ -1,10 +0,6 @@

var path = require('path');
var pkg = require('./package.json');
var assert = require('assert');
var module_path = path.join(
path.join(__dirname,pkg.binary.module_path),
pkg.binary.module_name + '.node');
var binding = require(module_path);
var binary = require('node-pre-gyp');
var path = require('path')
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
var binding = require(binding_path);
assert.ok(binding);
assert.equal(binding.hello(),"hello");
require('assert').equal(binding.hello(),"hello");

@@ -7,3 +7,3 @@ {

"type" : "git",
"url" : "git://github.com/springmeyer/node-pre-gyp.git"
"url" : "git://github.com/mapbox/node-pre-gyp.git"
},

@@ -14,5 +14,6 @@ "version": "0.1.0-dev.4",

"module_name": "app4",
"module_path": "./lib/binding/",
"remote_uri": "https://node-pre-gyp-tests.s3-us-west-1.amazonaws.com/app4/alpha-test/",
"template": "{module_name}-v{major}.{minor}.{patch}-{prerelease}-{node_abi}-{platform}-{arch}.tar.gz"
"module_path": "./lib/binding/{node_abi}-{platform}-{arch}",
"remote_path": "./{module_name}/v{version}",
"package_name": "{node_abi}-{platform}-{arch}.tar.gz",
"host":"https://node-pre-gyp-tests.s3-us-west-1.amazonaws.com"
},

@@ -19,0 +20,0 @@ "scripts": {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc