Socket
Socket
Sign inDemoInstall

mongoose

Package Overview
Dependencies
Maintainers
0
Versions
888
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

mongoose - npm Package Compare versions

Comparing version 1.5.0 to 1.6.0

support/node-mongodb-native/lib/mongodb/gridfs/grid.js

11

History.md
1.6.0 / 2011-07-07
===================
* changed; .save() errors are now emitted on the instances db instead of the instance 9782463fc
* fixed; errors occurring when creating indexes now properly emit on db
* added; $maxDistance support to MongooseArrays
* fixed; RegExps now work with $all
* changed; node-mongodb-native driver to v0.9.6.4
* fixed; model names are now accessible via .modelName
* added; Query#slaveOk support
1.5.0 / 2011-06-27

@@ -3,0 +14,0 @@ ===================

2

lib/mongoose/document.js

@@ -649,3 +649,3 @@

}, function (err) {
this.emit('error', err);
this.db.emit('error', err);
}).pre('save', function validation (next) {

@@ -652,0 +652,0 @@ return self.validate.call(self, next);

@@ -292,3 +292,3 @@

exports.version = '1.5.0';
exports.version = '1.6.0';

@@ -295,0 +295,0 @@ /**

@@ -91,3 +91,3 @@

, schema: self._path(path) };
});
});

@@ -261,3 +261,3 @@ if (this.isNew) {

}, function (err) {
this.emit('error', err);
this.db.emit('error', err);
});

@@ -309,3 +309,3 @@

self.collection.ensureIndex(index[0], index[1], function (err) {
if (err) return self.emit(err);
if (err) return self.db.emit('error', err);
--count || self.emit('index');

@@ -679,3 +679,3 @@ });

model.name = name;
model.modelName = name;
model.__proto__ = Model;

@@ -682,0 +682,0 @@ model.prototype.__proto__ = Model.prototype;

@@ -272,3 +272,3 @@ var utils = require('./utils')

// $lt, $lte, $gt, $gte can be used on Numbers or Dates
['gt', 'gte', 'lt', 'lte', 'ne', 'in', 'nin', 'all', 'size'].forEach( function ($conditional) {
'gt gte lt lte ne in nin all size maxDistance'.split(' ').forEach( function ($conditional) {
Query.prototype['$' + $conditional] =

@@ -288,3 +288,3 @@ Query.prototype[$conditional] = function (path, val) {

['mod', 'near'].forEach( function ($conditional) {
;['mod', 'near'].forEach( function ($conditional) {
Query.prototype['$' + $conditional] =

@@ -351,3 +351,3 @@ Query.prototype[$conditional] = function (path, val) {

['maxscan'].forEach( function (method) {
;['maxscan'].forEach( function (method) {
Query.prototype[method] = function (v) {

@@ -372,3 +372,3 @@ this.options[method] = v;

// To be used idiomatically where Query#box and Query#center
['wherein', '$wherein'].forEach(function (getter) {
;['wherein', '$wherein'].forEach(function (getter) {
Object.defineProperty(Query.prototype, getter, {

@@ -430,3 +430,3 @@ get: function () {

/**
* Chainable method for adding the specified fields to the
* Chainable method for adding the specified fields to the
* object of fields to only include.

@@ -597,3 +597,3 @@ *

['limit', 'skip', 'maxscan', 'snapshot'].forEach( function (method) {
;['limit', 'skip', 'maxscan', 'snapshot'].forEach( function (method) {
Query.prototype[method] = function (v) {

@@ -636,2 +636,17 @@ this.options[method] = v;

/**
* Sets slaveOk option
*
* new Query().slaveOk() <== true
* new Query().slaveOk(true)
* new Query().slaveOk(false)
*
* @param {Boolean} v (defaults to true)
*/
Query.prototype.slaveOk = function (v) {
this.options.slaveOk = arguments.length ? !!v : true;
return this;
};
Query.prototype.execFind = function (callback) {

@@ -642,6 +657,8 @@ var model = this.model

var promise = new Promise(callback);
try {
this.cast(model);
} catch (err) {
return callback(err);
return promise.error(err);
}

@@ -652,6 +669,6 @@

model.collection.find(castQuery, options, function (err, cursor) {
if (err) return callback(err);
if (err) return promise.error(err);
cursor.toArray(function (err, docs) {
if (err) return callback(err);
if (err) return promise.error(err);

@@ -661,3 +678,3 @@ var arr = []

if (!count) return callback(null, []);
if (!count) return promise.complete([]);

@@ -669,4 +686,4 @@ for (var i = 0, l = docs.length; i < l; i++) {

arr[i].init(docs[i], function (err) {
if (err) return callback(err);
--count || callback(null, arr);
if (err) return promise.error(err);
--count || promise.complete(arr);
});

@@ -850,4 +867,2 @@ }

* Casts the query, sends the update command to mongodb.
* If there is an error, the callback handles it. Otherwise,
* we just invoke the callback whereout passing it an error.
*

@@ -854,0 +869,0 @@ * @param {Function} callback fn(err)

@@ -96,10 +96,28 @@ /**

throw new CastError('array', value, caster);
throw new CastError('array', value);
};
SchemaArray.prototype.$conditionalHandlers = {
'$all': function (val) {
return this.cast(val);
'$all': function handle$all (val) {
if (!Array.isArray(val)) {
val = [val];
}
if (!(val instanceof MongooseArray)) {
val = new MongooseArray(val, this.path);
}
// use castForQuery where available
var proto = this.caster.prototype;
var method = proto.castForQuery || proto.cast;
try {
return val.map(function (v) {
return method.call(proto, v);
});
} catch (err) {
// rethrow
throw new CastError(err.type, val);
}
}
// TODO Move elemMatch to documentarray

@@ -133,3 +151,7 @@ , '$elemMatch': function (val) {

}
, '$maxDistance': function (val) {
return ArrayNumberSchema.prototype.cast.call(this, val);
}
};
SchemaArray.prototype.castForQuery = function ($conditional, val) {

@@ -169,2 +191,3 @@ var handler;

}
throw new CastError('number', value);

@@ -171,0 +194,0 @@ };

@@ -6,4 +6,3 @@

var SchemaType = require('../schematype')
, CastError = SchemaType.CastError;
var SchemaType = require('../schematype');

@@ -10,0 +9,0 @@ /**

@@ -113,3 +113,3 @@

throw new CastError('documentarray', value, this.caster);
throw new CastError('documentarray', value);
};

@@ -116,0 +116,0 @@

@@ -289,7 +289,6 @@

function CastError (type, path, type, value) {
function CastError (type, value) {
MongooseError.call(this, 'Cast to ' + type + ' failed for value "' + value + '"');
Error.captureStackTrace(this, arguments.callee);
this.name = 'CastError';
this.path = path;
this.type = type;

@@ -296,0 +295,0 @@ this.value = value;

{
"name": "mongoose"
, "description": "Mongoose MongoDB ORM"
, "version": "1.5.0"
, "version": "1.6.0"
, "author": "Guillermo Rauch <guillermo@learnboost.com>"

@@ -6,0 +6,0 @@ , "keywords": ["mongodb", "mongoose", "orm", "data", "datastore", "nosql"]

@@ -25,4 +25,4 @@ require.paths.unshift('../lib');

// Add documents
// for(var i = 0; i < 100000; i++) {
for(var i = 0; i < 10000; i++) {
for(var i = 0; i < 100000; i++) {
// for(var i = 0; i < 10000; i++) {
collection.save({'i':i, 'a':i, 'c':i, 'd':{'i':i}}, function(err, result){});

@@ -43,4 +43,4 @@ }

count++;
if ((count%1000)==0) sys.puts("recs:" + count + " :: " +
((new Date().getTime() - started_at)/1000) + "seconds");
if ((count%10000)==0) sys.puts("recs:" + count + " :: " +
((new Date().getTime() - started_at)/10000) + "seconds");
});

@@ -47,0 +47,0 @@ });

@@ -11,3 +11,2 @@ GLOBAL.DEBUG = true;

Server = require('../lib/mongodb').Server,
ServerCluster = require('../lib/mongodb').ServerCluster,
// BSON = require('../lib/mongodb').BSONPure;

@@ -14,0 +13,0 @@ ReplSetServers = require('../lib/mongodb').ReplSetServers,

@@ -46,3 +46,2 @@ var Collection = require('./collection').Collection,

var self = this;
var databaseName = this.db.databaseName;

@@ -56,2 +55,23 @@ this.db.databaseName = 'admin';

Admin.prototype.logout = function(options, callback) {
var self = this;
var databaseName = this.db.databaseName;
this.db.databaseName = 'admin';
this.db.logout(options, function(err, result) {
return callback(err, result);
})
self.db.databaseName = databaseName;
}
Admin.prototype.addUser = function(username, password, callback) {
var self = this;
var databaseName = this.db.databaseName;
this.db.databaseName = 'admin';
this.db.addUser(username, password, function(err, result) {
self.db.databaseName = databaseName;
return callback(err, result);
})
}
Admin.prototype.setProfilingLevel = function(level, callback) {

@@ -58,0 +78,0 @@ var self = this;

@@ -24,3 +24,3 @@

if (buffer != null && (buffer instanceof Number)) {
if (buffer != null && !(buffer instanceof Number)) {
this.buffer = buffer;

@@ -27,0 +27,0 @@ this.position = buffer.length;

@@ -81,5 +81,2 @@ /**

// debug("======================================================== options")
// debug(inspect(options))
this.pkFactory = pkFactory == null

@@ -111,2 +108,4 @@ ? db.bson_serializer.ObjectID

if ('function' === typeof options) callback = options, options = {};
if(options == null) options = {};
if(!('function' === typeof callback)) callback = null;

@@ -158,2 +157,6 @@ this.insertAll(Array.isArray(docs) ? docs : [docs], options, callback);

}
// Ensure options
if(options == null) options = {};
if(!('function' === typeof callback)) callback = null;

@@ -165,9 +168,24 @@ var deleteCommand = new DeleteCommand(

var errorOptions = options.safe != null ? options.safe : null;
errorOptions = errorOptions == null && this.opts.safe != null ? this.opts.safe : errorOptions;
errorOptions = errorOptions == null && this.db.strict != null ? this.db.strict : errorOptions;
// Execute the command, do not add a callback as it's async
if (options && options.safe || this.opts.safe != null || this.db.strict) {
var errorOptions = options.safe != null ? options.safe : null;
errorOptions = errorOptions == null && this.opts.safe != null ? this.opts.safe : errorOptions;
errorOptions = errorOptions == null && this.db.strict != null ? this.db.strict : errorOptions;
// Insert options
var commandOptions = {read:false};
// If we have safe set set async to false
if(errorOptions == null) commandOptions['async'] = true;
// Set safe option
commandOptions['safe'] = true;
// If we have an error option
if(typeof errorOptions == 'object') {
var keys = Object.keys(errorOptions);
for(var i = 0; i < keys.length; i++) {
commandOptions[keys[i]] = errorOptions[keys[i]];
}
}
// Execute command with safe options (rolls up both command and safe command into one and executes them on the same connection)
this.db.executeCommand(deleteCommand, {read:false, safe: errorOptions}, function (err, error) {
this.db.executeCommand(deleteCommand, commandOptions, function (err, error) {
error = error && error.documents;

@@ -232,3 +250,5 @@ if(!callback) return;

Collection.prototype.insertAll = function insertAll (docs, options, callback) {
if ('function' === typeof options) callback = options, options = {};
if('function' === typeof options) callback = options, options = {};
if(options == null) options = {};
if(!('function' === typeof callback)) callback = null;

@@ -251,14 +271,26 @@ var insertCommand = new InsertCommand(

// Collect errorOptions
var errorOptions = options.safe != null ? options.safe : null;
errorOptions = errorOptions == null && this.opts.safe != null ? this.opts.safe : errorOptions;
errorOptions = errorOptions == null && this.db.strict != null ? this.db.strict : errorOptions;
// If safe is defined check for error message
if (options != null && (options.safe == true || this.db.strict == true || this.opts.safe == true)) {
var errorOptions = options.safe != null ? options.safe : null;
errorOptions = errorOptions == null && this.opts.safe != null ? this.opts.safe : errorOptions;
errorOptions = errorOptions == null && this.db.strict != null ? this.db.strict : errorOptions;
// if(options != null && (options.safe == true || this.db.strict == true || this.opts.safe == true)) {
if(errorOptions) {
// Insert options
var insertOptions = {read:false, safe: errorOptions};
var commandOptions = {read:false};
// If we have safe set set async to false
if(errorOptions == null) insertOptions['async'] = true;
if(errorOptions == null) commandOptions['async'] = true;
// Set safe option
commandOptions['safe'] = true;
// If we have an error option
if(typeof errorOptions == 'object') {
var keys = Object.keys(errorOptions);
for(var i = 0; i < keys.length; i++) {
commandOptions[keys[i]] = errorOptions[keys[i]];
}
}
// Execute command with safe options (rolls up both command and safe command into one and executes them on the same connection)
this.db.executeCommand(insertCommand, insertOptions, function (err, error) {
this.db.executeCommand(insertCommand, commandOptions, function (err, error) {
error = error && error.documents;

@@ -275,8 +307,8 @@ if(!callback) return;

});
} else {
} else {
var result = this.db.executeCommand(insertCommand);
// If no callback just return
if (!callback) return;
if(!callback) return;
// If error return error
if (result instanceof Error) {
if(result instanceof Error) {
return callback(result);

@@ -340,2 +372,4 @@ }

if('function' === typeof options) callback = options, options = null;
if(options == null) options = {};
if(!('function' === typeof callback)) callback = null;

@@ -349,11 +383,25 @@ var updateCommand = new UpdateCommand(

// Unpack the error options if any
var errorOptions = (options && options.safe != null) ? options.safe : null;
errorOptions = errorOptions == null && this.opts.safe != null ? this.opts.safe : errorOptions;
errorOptions = errorOptions == null && this.db.strict != null ? this.db.strict : errorOptions;
// If we are executing in strict mode or safe both the update and the safe command must happen on the same line
if (options && options.safe || this.db.strict || this.opts.safe) {
// Unpack the error options if any
var errorOptions = (options && options.safe != null) ? options.safe : null;
errorOptions = errorOptions == null && this.opts.safe != null ? this.opts.safe : errorOptions;
errorOptions = errorOptions == null && this.db.strict != null ? this.db.strict : errorOptions;
if(errorOptions) {
// Insert options
var commandOptions = {read:false};
// If we have safe set set async to false
if(errorOptions == null) commandOptions['async'] = true;
// Set safe option
commandOptions['safe'] = true;
// If we have an error option
if(typeof errorOptions == 'object') {
var keys = Object.keys(errorOptions);
for(var i = 0; i < keys.length; i++) {
commandOptions[keys[i]] = errorOptions[keys[i]];
}
}
// Execute command with safe options (rolls up both command and safe command into one and executes them on the same connection)
this.db.executeCommand(updateCommand, {read:false, safe: errorOptions}, function (err, error) {
this.db.executeCommand(updateCommand, commandOptions, function (err, error) {
error = error && error.documents;

@@ -675,2 +723,3 @@ if(!callback) return;

// If we have a timeout set
var timeout = null != options.timeout

@@ -680,2 +729,7 @@ ? options.timeout

// Make sure we can do slaveOk commands
if (options.slaveOk || this.slaveOk || this.db.slaveOk) {
timeout |= QueryCommand.OPTS_SLAVE;
}
var collectionName = (this.db.databaseName ? this.db.databaseName + '.' : '')

@@ -682,0 +736,0 @@ + this.collectionName;

@@ -48,6 +48,3 @@ var QueryCommand = require('./query_command').QueryCommand,

var hash_password = MD5.hex_md5(username + ":mongo:" + password);
// debug("=========================== hash_password :: " + hash_password)
var key = MD5.hex_md5(nonce + username + hash_password);
// debug("=========================== pre_hash_key :: " + (nonce + username + hash_password))
// debug("=========================== key :: " + key)
var selector = {'authenticate':1, 'user':username, 'nonce':nonce, 'key':key};

@@ -95,4 +92,2 @@ // Create db command

// debug("createGetLastErrorCommand :: " + inspect(command))
// Execute command

@@ -103,5 +98,2 @@ return new DbCommand(db, db.databaseName + "." + DbCommand.SYSTEM_COMMAND_COLLECTION, QueryCommand.OPTS_NO_CURSOR_TIMEOUT, 0, -1, command, null);

DbCommand.createGetLastStatusCommand = DbCommand.createGetLastErrorCommand;
// function(db) {
// return new DbCommand(db, db.databaseName + "." + DbCommand.SYSTEM_COMMAND_COLLECTION, QueryCommand.OPTS_NO_CURSOR_TIMEOUT, 0, -1, {'getlasterror':1}, null);
// };

@@ -174,2 +166,6 @@ DbCommand.createGetPreviousErrorsCommand = function(db) {

DbCommand.logoutCommand = function(db, command_hash) {
return new DbCommand(db, db.databaseName + "." + DbCommand.SYSTEM_COMMAND_COLLECTION, QueryCommand.OPTS_NO_CURSOR_TIMEOUT, 0, -1, command_hash, null);
}
DbCommand.createDropIndexCommand = function(db, collectionName, indexName) {

@@ -176,0 +172,0 @@ return new DbCommand(db, db.databaseName + "." + DbCommand.SYSTEM_COMMAND_COLLECTION, QueryCommand.OPTS_NO_CURSOR_TIMEOUT, 0, -1, {'deleteIndexes':collectionName, 'index':indexName}, null);

@@ -56,7 +56,2 @@ var net = require('net'),

// if(conObj == null) {
// debug("================================================================ failed to find connection :: " + this.fd)
// debug(inspect(self.poolByReference))
// }
// Check if we have an unfinished message

@@ -146,2 +141,3 @@ if(conObj != null && conObj.bytesRead > 0 && conObj.sizeOfMessage > 0) {

"stubBuffer": ''});
// Add the listener to the connection

@@ -251,3 +247,3 @@ connection.addListener("data", receiveListener);

// Wait for a reconnect and send all the messages
self.on("reconnect", function() {
self.on("resend", function() {
self.currently_reconnecting = false;

@@ -254,0 +250,0 @@ // Fire the message again

@@ -31,4 +31,4 @@ var Connection = require('../connection').Connection,

// Are we allowing reads from secondaries ?
this.readSecondary = options["read_secondary"] == null ? false : options["read_secondary"];
this.slaveOk = this.readSecondary ? true : false;
this.readSecondary = options["read_secondary"];
this.slaveOk = this.readSecondary;
this.otherErrors = [];

@@ -119,4 +119,2 @@

ReplSetServers.prototype.connect = function(parent, callback) {
// debug("================================================================== ReplSetServers.prototype.connect")
var replSetSelf = this;

@@ -129,7 +127,5 @@ var serverConnections = this.servers;

// Ensure parent can do a slave query if it's set
parent.slaveOk = this.slaveOk;
parent.slaveOk = this.slaveOk ? this.slaveOk : parent.slaveOk;
var initServer = function(server) {
// debug("======= connect :: initServer")
replSetSelf.addresses[server.host + ':' + server.port] = 1;

@@ -140,5 +136,5 @@ server.connection = new Connection(server.host, server.port, server.autoReconnect);

server.connection.on("connect", function() {
// debug("======= connect :: initServer :: connect")
// Create a callback function for a given connection
var connectCallback = function(err, reply) {
if(replSetSelf.otherErrors.length > 0) return;
// Update number of connected servers, ensure we update before any possible errors

@@ -184,5 +180,8 @@ numberOfConnectedServers = numberOfConnectedServers + 1;

if(replSetSelf.replicaSet != node["setName"]) {
if(replSetSelf.replicaSet == null) {
replSetSelf.replicaSet = node["setName"];
} else if(replSetSelf.replicaSet != node["setName"]) {
// Add other error to the list of errors
replSetSelf.otherErrors.push(new Error("configured mongodb replicaset does not match provided replicaset [" + node["setName"] + "] != [" + replSetSelf.replicaSet + "]"));
var errorMessage = new Error("configured mongodb replicaset does not match provided replicaset [" + node["setName"] + "] != [" + replSetSelf.replicaSet + "]");
replSetSelf.otherErrors.push(errorMessage);
// Close all servers and return an error

@@ -192,2 +191,5 @@ for(var i = 0; i < serverConnections.length; i++) {

}
// Return the error message
return callback(errorMessage);
}

@@ -217,4 +219,3 @@ }

// emit a message saying we got a master and are ready to go and change state to reflect it
if(numberOfConnectedServers == serverConnections.length && (parent.state == 'notConnected')) {
// debug("========================================================== 1")
if(numberOfConnectedServers >= serverConnections.length && (parent.state == 'notConnected')) {
parent.isInitializing = false;

@@ -238,10 +239,5 @@ // If we have no master connection

// debug("#############################################################################")
// debug("numberOfConnectedServers :: " + numberOfConnectedServers)
// debug("numberOfErrorServers :: " + numberOfErrorServers)
// debug("serverConnections :: " + serverConnections.length)
// We had some errored out servers, does not matter as long as we have a master server
// we can write to.
if ((numberOfConnectedServers + numberOfErrorServers) == serverConnections.length) {
if((numberOfConnectedServers + numberOfErrorServers) >= serverConnections.length) {
parent.isInitializing = false;

@@ -283,3 +279,3 @@ // If we have no master connection

});
server.connection.on("error", function(err) {

@@ -286,0 +282,0 @@ if(parent.isInitializing) {

@@ -19,2 +19,3 @@ var Connection = require('../connection').Connection,

this.poolSize = this.options.poolSize == null ? 1 : this.options.poolSize;
this.slaveOk = this.options["slave_ok"];
this.auths = [];

@@ -28,2 +29,7 @@ // Setters and getters

this.__defineGetter__("primary", function() { return self; });
// Add handler of resend message
this.on('resend', function(err) {
self.connection.emit("resend");
});
};

@@ -49,2 +55,5 @@

Server.prototype.connect = function(parent, callback) {
// Ensure parent can do a slave query if it's set
parent.slaveOk = this.slaveOk ? this.slaveOk : parent.slaveOk;
// Let's connect
var server = this;

@@ -100,5 +109,5 @@ server.connection = new Connection(this.host, this.port, this.autoReconnect, {poolSize:this.poolSize});

server.connection.on("reconnect", function(err) {
parent.emit("reconnect");
server.emit('reconnect');
});
server.connection.on("error", function(err) {

@@ -105,0 +114,0 @@ if(parent.listeners("error") != null && parent.listeners("error").length > 0) parent.emit("error", err);

@@ -25,2 +25,3 @@ var QueryCommand = require('./commands/query_command').QueryCommand,

try {
this.native_parser = this.options.native_parser;
var serializer = this.options.native_parser ? require('../../external-libs/bson') : require('./bson/bson');

@@ -41,5 +42,31 @@ this.bson_serializer = serializer;

this.notReplied ={};
this.slaveOk = false;
this.isInitializing = true;
this.auths = [];
// Allow slaveOk
this.slaveOk = this.options["slave_ok"] == null ? false : this.options["slave_ok"];
var self = this;
// Add a listener for the reconnect event
this.serverConfig.on("reconnect", function() {
// Number of current auths
var authLength = self.auths.length;
var numberOfReadyAuth = 0;
if(authLength > 0) {
// If we have any auths fire off the auth message to all the connections
for(var i = 0; i < authLength; i++) {
// Execute auth commands
self.authenticate(self.auths[i].username, self.auths[i].password, function(err, result) {
numberOfReadyAuth = numberOfReadyAuth + 1;
if(numberOfReadyAuth == self.auths.length) {
self.serverConfig.emit("resend");
}
});
}
} else {
self.serverConfig.emit("resend");
}
});
};

@@ -50,27 +77,5 @@

Db.prototype.open = function(callback) {
var self = this;
var self = this;
// Set up connections
if(self.serverConfig instanceof Server || self.serverConfig instanceof ReplSetServers) {
// Inner function for authentication
var authenticateFunction = function(self, username, password) {
return function() {
self.authenticate(username, password, function(err, result) {
// Just ignore the result for now
});
}
}
// Add a listener for the reconnect event
self.on("reconnect", function() {
// Number of current auths
var authLength = self.auths.length;
// // If we have any auths fire off the auth message to all the connections
if(self.auths.length > 0) {
for(var i = 0; i < authLength; i++) {
authenticateFunction(self, self.auths[i].username, self.auths[i].password)();
}
}
});
self.serverConfig.connect(self, function(err, result) {

@@ -81,2 +86,3 @@ if(err != null) return callback(err, null);

});
} else {

@@ -232,2 +238,40 @@ return callback(Error("Server parameter must be of type Server or ReplSetServers"), null);

/**
Logout user from server
Fire off on all connections and remove all auth info
**/
Db.prototype.logout = function(options, callback) {
var self = this;
// If the first object is a function
if(typeof options === "function") { callback = options; options = {}}
// Let's generate the logout command object
var logoutCommand = DbCommand.logoutCommand(self, {logout:1, socket:options['socket']});
// For all the connections let's execute the command
var rawConnections = self.serverConfig.allRawConnections();
var numberOfExpectedReturns = rawConnections.length;
for(var index = 0; index < numberOfExpectedReturns; index++) {
// Execute the logout on all raw connections
self.executeCommand(logoutCommand, {writer: rawConnections[index].connection}, function(err, result) {
// Ajust the number of expected results
numberOfExpectedReturns = numberOfExpectedReturns - 1;
// If we are done let's evaluate
if(numberOfExpectedReturns <= 0) {
// Reset auth
self.auths = [];
// Handle any errors
if(err == null && result.documents[0].ok == 1) {
callback(null, true);
} else {
err != null ? callback(err, false) : callback(new Error(result.documents[0].errmsg), false);
}
}
});
}
}
/**
Authenticate against server

@@ -258,3 +302,3 @@ **/

// Nonce used to make authentication request with md5 hash
var nonce = reply.documents[0].nonce;
var nonce = reply.documents[0].nonce;
// Execute command

@@ -316,9 +360,2 @@ self.executeCommand(DbCommand.createAuthenticationCommand(self, username, password, nonce), {writer: rawConnections[index].connection}, function(err, result) {

/**
Logout user (if authenticated)
**/
Db.prototype.logout = function(callback) {
this.executeCommand(DbCommand.createLogoutCommand(this), callback);
};
/**
Create Collection

@@ -545,7 +582,2 @@ **/

// debug("===================================================== executeCommnad")
// debug(" read :: " + read)
// debug(" safe :: " + safe)
// debug(" writer :: " + writer)
var errorCommand = null;

@@ -577,5 +609,2 @@ if(safe == true) {

self.serverConfig.connect(self, function(err, result) {
// debug("============================================================ reconnectAttemp")
// debug("err :: " + inspect(err))
// Initialize

@@ -594,3 +623,2 @@ self.isInitializing = true;

// for the other instances fire the message
// debug("=========================== attempt read :: 2 :: " + read)
var writer = read ? self.serverConfig.checkoutReader() : self.serverConfig.checkoutWriter();

@@ -619,3 +647,4 @@ // If we got safe set

});
}
}
// Force a reconnect after self.serverConfig.reconnectWait seconds

@@ -686,2 +715,2 @@ setTimeout(reconnectAttempt, self.serverConfig.reconnectWait);

});
}
}

@@ -88,16 +88,16 @@ var BinaryParser = require('../bson/binary_parser').BinaryParser,

Chunk.prototype.readSlice = function(length) {
if ((this.length() - this.internalPosition + 1) >= length) {
var data = null;
if (this.data.buffer != null) { //Pure BSON
data = this.data.buffer.slice(this.internalPosition, this.internalPosition + length);
} else { //Native BSON
data = new Buffer(length);
//length = data.write(this.data.read(this.internalPosition, length), 'binary', 0);
length = this.data.readInto(data, this.internalPosition);
}
this.internalPosition = this.internalPosition + length;
return data;
} else {
return null;
if ((this.length() - this.internalPosition + 1) >= length) {
var data = null;
if (this.data.buffer != null) { //Pure BSON
data = this.data.buffer.slice(this.internalPosition, this.internalPosition + length);
} else { //Native BSON
data = new Buffer(length);
//length = data.write(this.data.read(this.internalPosition, length), 'binary', 0);
length = this.data.readInto(data, this.internalPosition);
}
this.internalPosition = this.internalPosition + length;
return data;
} else {
return null;
}
};

@@ -104,0 +104,0 @@

@@ -20,2 +20,5 @@ /**

var REFERENCE_BY_FILENAME = 0,
REFERENCE_BY_ID = 1;
/**

@@ -48,5 +51,11 @@ * A class representation of a file stored in GridFS.

*/
var GridStore = exports.GridStore = function(db, filename, mode, options) {
this.db = db;
this.filename = filename;
var GridStore = exports.GridStore = function(db, fileIdObject, mode, options) {
this.db = db;
// set grid referencetype
this.referenceBy = typeof fileIdObject == 'string' ? 0 : 1;
this.filename = fileIdObject;
this.fileId = fileIdObject;
// Set up the rest
this.mode = mode == null ? "r" : mode;

@@ -56,3 +65,5 @@ this.options = options == null ? {} : options;

this.position = 0;
// Set default chunk size
this.internalChunkSize = Chunk.DEFAULT_CHUNK_SIZE;
/**

@@ -65,3 +76,4 @@ * The chunk size used by this file.

*/
this.__defineGetter__("chunkSize", function() { return this.internalChunkSize; });
this.__defineGetter__("chunkSize", function() {
return this.internalChunkSize; });
this.__defineSetter__("chunkSize", function(value) {

@@ -127,61 +139,100 @@ if(!(this.mode[0] == "w" && this.position == 0 && this.uploadDate == null)) {

}
// Create the query
var query = self.referenceBy == REFERENCE_BY_ID ? {_id:self.fileId} : {filename:self.filename};
query = self.fileId == null && this.filename == null ? null : query;
// Fetch the chunks
self.chunkCollection(function(err, chunkCollection) {
collection.find({'filename':self.filename}, function(err, cursor) {
// Fetch the file
cursor.nextObject(function(err, doc) {
// Chek if the collection for the files exists otherwise prepare the new one
if(doc != null) {
self.fileId = doc._id;
self.contentType = doc.contentType;
self.internalChunkSize = doc.chunkSize;
self.uploadDate = doc.uploadDate;
self.aliases = doc.aliases;
self.length = doc.length;
self.metadata = doc.metadata;
self.internalMd5 = doc.md5;
} else {
self.fileId = new self.db.bson_serializer.ObjectID();
self.contentType = exports.GridStore.DEFAULT_CONTENT_TYPE;
self.internalChunkSize = Chunk.DEFAULT_CHUNK_SIZE;
self.length = 0;
}
if(query != null) {
collection.find(query, function(err, cursor) {
// Fetch the file
cursor.nextObject(function(err, doc) {
// Chek if the collection for the files exists otherwise prepare the new one
if(doc != null) {
self.fileId = doc._id;
self.contentType = doc.contentType;
self.internalChunkSize = doc.chunkSize;
self.uploadDate = doc.uploadDate;
self.aliases = doc.aliases;
self.length = doc.length;
self.metadata = doc.metadata;
self.internalMd5 = doc.md5;
} else {
self.fileId = new self.db.bson_serializer.ObjectID();
self.contentType = exports.GridStore.DEFAULT_CONTENT_TYPE;
self.internalChunkSize = self.internalChunkSize == null ? Chunk.DEFAULT_CHUNK_SIZE : self.internalChunkSize;
self.length = 0;
}
// Process the mode of the object
if(self.mode == "r") {
// chunkCollection.ensureIndex([['files_id', 1], ['n', 1]], function(err, index) {
self.nthChunk(0, function(err, chunk) {
self.currentChunk = chunk;
self.position = 0;
callback(null, self);
});
// });
} else if(self.mode == "w") {
self.chunkCollection(function(err, collection2) {
// Delete any existing chunks
self.deleteChunks(function(err, result) {
self.currentChunk = new Chunk(self, {'n':0});
self.contentType = self.options['content_type'] == null ? self.contentType : self.options['content_type'];
self.internalChunkSize = self.options['chunk_size'] == null ? self.internalChunkSize : self.options['chunk_size'];
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata'];
// Process the mode of the object
if(self.mode == "r") {
self.nthChunk(0, function(err, chunk) {
self.currentChunk = chunk;
self.position = 0;
callback(null, self);
});
} else if(self.mode == "w") {
self.chunkCollection(function(err, collection2) {
// Delete any existing chunks
self.deleteChunks(function(err, result) {
self.currentChunk = new Chunk(self, {'n':0});
self.contentType = self.options['content_type'] == null ? self.contentType : self.options['content_type'];
self.internalChunkSize = self.options['chunk_size'] == null ? self.internalChunkSize : self.options['chunk_size'];
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata'];
self.position = 0;
callback(null, self);
});
});
} else if(self.mode == "w+") {
self.chunkCollection(function(err, collection) {
self.nthChunk(self.lastChunkNumber(), function(err, chunk) {
// Set the current chunk
self.currentChunk = chunk == null ? new Chunk(self, {'n':0}) : chunk;
self.currentChunk.position = self.currentChunk.data.length();
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata'];
self.position = self.length;
callback(null, self);
});
});
} else {
callback(new Error("Illegal mode " + self.mode), null);
}
});
});
} else {
// Write only mode
self.fileId = new self.db.bson_serializer.ObjectID();
self.contentType = exports.GridStore.DEFAULT_CONTENT_TYPE;
self.internalChunkSize = self.internalChunkSize == null ? Chunk.DEFAULT_CHUNK_SIZE : self.internalChunkSize;
self.length = 0;
// No file exists set up write mode
if(self.mode == "w") {
self.chunkCollection(function(err, collection2) {
// Delete any existing chunks
self.deleteChunks(function(err, result) {
self.currentChunk = new Chunk(self, {'n':0});
self.contentType = self.options['content_type'] == null ? self.contentType : self.options['content_type'];
self.internalChunkSize = self.options['chunk_size'] == null ? self.internalChunkSize : self.options['chunk_size'];
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata'];
self.position = 0;
callback(null, self);
});
} else if(self.mode == "w+") {
self.chunkCollection(function(err, collection) {
self.nthChunk(self.lastChunkNumber(), function(err, chunk) {
// Set the current chunk
self.currentChunk = chunk == null ? new Chunk(self, {'n':0}) : chunk;
self.currentChunk.position = self.currentChunk.data.length();
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata'];
self.position = self.length;
callback(null, self);
});
});
} else {
callback(new Error("Illegal mode " + self.mode), null);
}
});
});
});
} else if(self.mode == "w+") {
self.chunkCollection(function(err, collection) {
self.nthChunk(self.lastChunkNumber(), function(err, chunk) {
// Set the current chunk
self.currentChunk = chunk == null ? new Chunk(self, {'n':0}) : chunk;
self.currentChunk.position = self.currentChunk.data.length();
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata'];
self.position = self.length;
callback(null, self);
});
});
} else {
callback(new Error("Illegal mode " + self.mode), null);
}
}
});

@@ -227,5 +278,2 @@ });

self.currentChunk = chunk;
// debug("=========================== err :: " + err)
// debug("=========================== err :: " + inspect(result))
// debug("============================= offset :: " + offset)

@@ -235,3 +283,3 @@ if(offset >= stats.size) {

self.close(function(err, result) {
return callback(null, self);
return callback(null, result);
})

@@ -248,23 +296,2 @@ } else {

process.nextTick(writeChunk);
// var startIndices = [];
// for (var i = 0; i < stats.size; i += self.chunkSize) startIndices.push(i);
//
// startIndices.forEach(function (start, index, startIndices) {
// process.nextTick(function () {
// fs.read(file, self.chunkSize, start, 'binary', function (err, data, bytesRead) {
// var chunk = new Chunk(self, {n: index});
// chunk.write(data, function (err, chunk) {
// chunk.save(function (err, result) {
// if (index == startIndices.length -1) {
// self.currentChunk = chunk;
// self.close(function (err, result) {
// callback(null, self);
// });
// }
// });
// });
// });
// });
// });
});

@@ -292,6 +319,7 @@ });

var finalClose = close == null ? false : close;
string = string instanceof Buffer ? string.toString("binary") : string;
// Check if we are trying to write a buffer and use the right method
if(string instanceof Buffer) return this.writeBuffer(string, close, callback);
// Otherwise let's write the data
if(self.mode[0] != "w") {
callback(new Error(self.filename + " not opened for writing"), null);
callback(new Error((self.referenceBy == REFERENCE_BY_ID ? self.toHexString() : self.filename) + " not opened for writing"), null);
} else {

@@ -341,3 +369,3 @@ if((self.currentChunk.position + string.length) > self.chunkSize) {

if(self.mode[0] != "w") {
callback(new Error(self.filename + " not opened for writing"), null);
callback(new Error((self.referenceBy == REFERENCE_BY_ID ? self.toHexString() : self.filename) + " not opened for writing"), null);
}

@@ -455,3 +483,3 @@ else {

files.save(mongoObject, {safe:true}, function(err, doc) {
callback(err, doc);
callback(err, mongoObject);
});

@@ -464,3 +492,3 @@ });

files.save(mongoObject, {safe:true}, function(err, doc) {
callback(err, doc);
callback(err, mongoObject);
});

@@ -476,3 +504,3 @@ });

files.save(mongoObject, {safe:true}, function(err, doc) {
callback(err, doc);
callback(err, mongoObject);
});

@@ -698,13 +726,9 @@ });

var finalLength = length == null ? self.length - self.position : length;
var numberToRead = finalLength;
if((self.currentChunk.length() - self.currentChunk.position + 1 + finalBuffer.length) >= finalLength) {
finalBuffer = finalBuffer + self.currentChunk.read(finalLength - finalBuffer.length);
numberToRead = numberToRead - finalLength;
self.position = finalBuffer.length;
callback(null, finalBuffer);
} else {
finalBuffer = finalBuffer + self.currentChunk.read(self.currentChunk.length());
numberToRead = numberToRead - self.currentChunk.length();
finalBuffer = finalBuffer + self.currentChunk.read(self.currentChunk.length());
// Load the next chunk and read some more

@@ -719,42 +743,38 @@ self.nthChunk(self.currentChunk.chunkNumber + 1, function(err, chunk) {

GridStore.prototype.readBuffer = function(length, buffer, callback) {
var self = this;
var args = Array.prototype.slice.call(arguments, 0);
callback = args.pop();
length = args.length ? args.shift() : null;
buffer = args.length ? args.shift() : null;
var left = Math.min(self.length - self.position, length);
if(buffer===null) {
buffer = new Buffer(left);
}
var leftInCurrentChunk = self.currentChunk.length()-self.currentChunk.position;
// Everything can be read from this chunk
if((leftInCurrentChunk >= left) && leftInCurrentChunk!==0) {
var slice = self.currentChunk.readSlice(left);
self.position += left;
callback(null, slice);
}
else {
if(leftInCurrentChunk > 0) {
var slice = self.currentChunk.readSlice(leftInCurrentChunk);
self.position += leftInCurrentChunk;
slice.copy(buffer, 0, 0, leftInCurrentChunk);
}
var leftForNextChunk = left - leftInCurrentChunk;
var subBuffer = buffer.slice(leftInCurrentChunk, leftInCurrentChunk + leftForNextChunk);
self.nthChunk(self.currentChunk.chunkNumber+1, function(err, chunk) {
self.currentChunk = chunk;
self.readBuffer(leftForNextChunk, subBuffer, function(err, subb) {
if(subb!==subBuffer) {
// readBuffer returned its own buffer slice
subb.copy(buffer, leftInCurrentChunk, 0, subb.length);
}
callback(err, buffer);
});
});
}
var self = this;
var args = Array.prototype.slice.call(arguments, 0);
callback = args.pop();
length = args.length ? args.shift() : null;
buffer = args.length ? args.shift() : null;
// The data is a c-terminated string and thus the length - 1
var finalLength = length == null ? self.length - self.position : length;
var finalBuffer = buffer == null ? new Buffer(finalLength) : buffer;
// Add a index to buffer to keep track of writing position or apply current index
finalBuffer._index = buffer != null && buffer._index != null ? buffer._index : 0;
if((self.currentChunk.length() - self.currentChunk.position + 1 + finalBuffer._index) >= finalLength) {
var slice = self.currentChunk.readSlice(finalLength - finalBuffer._index);
// Copy content to final buffer
slice.copy(finalBuffer, finalBuffer._index);
// Update internal position
self.position = finalBuffer.length;
// Check if we don't have a file at all
if(finalLength == 0 && finalBuffer.length == 0) return callback(new Error("File does not exist"), null);
// Else return data
callback(null, finalBuffer);
} else {
var slice = self.currentChunk.readSlice(self.currentChunk.length());
// Copy content to final buffer
slice.copy(finalBuffer, finalBuffer._index);
// Update index position
finalBuffer._index += slice.length;
// Load next chunk and read more
self.nthChunk(self.currentChunk.chunkNumber + 1, function(err, chunk) {
self.currentChunk = chunk;
self.readBuffer(length, finalBuffer, callback);
});
}
}

@@ -920,3 +940,3 @@

*/
GridStore.exist = function(db, name, rootCollection, callback) {
GridStore.exist = function(db, fileIdObject, rootCollection, callback) {
var args = Array.prototype.slice.call(arguments, 2);

@@ -926,5 +946,9 @@ callback = args.pop();

// Fetch collection
var rootCollectionFinal = rootCollection != null ? rootCollection : GridStore.DEFAULT_ROOT_COLLECTION;
db.collection(rootCollectionFinal + ".files", function(err, collection) {
collection.find({'filename':name}, function(err, cursor) {
// Build query
var query = typeof fileIdObject == 'string' ? {'filename':fileIdObject} : {'_id':fileIdObject};
// Attempt to locate file
collection.find(query, function(err, cursor) {
cursor.nextObject(function(err, item) {

@@ -947,7 +971,17 @@ callback(null, item == null ? false : true);

*/
GridStore.list = function(db, rootCollection, callback) {
GridStore.list = function(db, rootCollection, options, callback) {
var args = Array.prototype.slice.call(arguments, 1);
callback = args.pop();
rootCollection = args.length ? args.shift() : null;
options = args.length ? args.shift() : {};
// Ensure we have correct values
if(rootCollection != null && typeof rootCollection == 'object') {
options = rootCollection;
rootCollection = null;
}
// Check if we are returning by id not filename
var byId = options['id'] != null ? options['id'] : false;
// Fetch item
var rootCollectionFinal = rootCollection != null ? rootCollection : GridStore.DEFAULT_ROOT_COLLECTION;

@@ -959,3 +993,3 @@ var items = [];

if(item != null) {
items.push(item.filename);
items.push(byId ? item._id : item.filename);
} else {

@@ -1099,79 +1133,79 @@ callback(null, items);

var ReadStream = function(autoclose, gstore) {
if (!(this instanceof ReadStream)) return new ReadStream(autoclose, gstore);
Stream.call(this);
if (!(this instanceof ReadStream)) return new ReadStream(autoclose, gstore);
Stream.call(this);
this.autoclose = !!autoclose;
this.gstore = gstore;
this.autoclose = !!autoclose;
this.gstore = gstore;
this.finalLength = gstore.length - gstore.position;
this.completedLength = 0;
this.finalLength = gstore.length - gstore.position;
this.completedLength = 0;
this.paused = false;
this.readable = true;
this.pendingChunk = null;
this.paused = false;
this.readable = true;
this.pendingChunk = null;
var self = this;
process.nextTick(function() {
self._execute();
});
var self = this;
process.nextTick(function() {
self._execute();
});
};
util.inherits(ReadStream, Stream);
ReadStream.prototype._execute = function() {
if(this.paused === true || this.readable === false) {
return;
}
if (this.paused === true || this.readable === false) {
return;
}
var gstore = this.gstore;
var self = this;
var gstore = this.gstore;
var self = this;
var last = false;
var toRead = 0;
var last = false;
var toRead = 0;
if ((gstore.currentChunk.length() - gstore.currentChunk.position + 1 + self.completedLength) >= self.finalLength) {
toRead = self.finalLength - self.completedLength;
last = true;
} else {
toRead = gstore.currentChunk.length();
}
if ((gstore.currentChunk.length() - gstore.currentChunk.position + 1 + self.completedLength) >= self.finalLength) {
toRead = self.finalLength - self.completedLength;
last = true;
} else {
toRead = gstore.currentChunk.length();
}
var data = gstore.currentChunk.readSlice(toRead);
if (data != null) {
self.completedLength += data.length;
self.pendingChunk = null;
self.emit("data", data);
}
var data = gstore.currentChunk.readSlice(toRead);
if (data != null) {
self.completedLength += data.length;
self.pendingChunk = null;
self.emit("data", data);
if (last === true) {
self.readable = false;
self.emit("end");
if (self.autoclose === true) {
if (gstore.mode[0] == "w") {
gstore.close(function(err, doc) {
if (err) {
self.emit("error", err);
return;
}
self.emit("close", doc);
});
} else {
self.emit("close");
}
}
if (last === true) {
} else {
gstore.nthChunk(gstore.currentChunk.chunkNumber + 1, function(err, chunk) {
if (err) {
self.readable = false;
self.emit("end");
if (self.autoclose === true) {
if (gstore.mode[0] == "w") {
gstore.close(function(err, doc) {
if (err) {
self.emit("error", err);
return;
}
self.emit("close", doc);
});
} else {
self.emit("close");
}
}
} else {
gstore.nthChunk(gstore.currentChunk.chunkNumber + 1, function(err, chunk) {
if (err) {
self.readable = false;
self.emit("error", err);
return;
}
self.pendingChunk = chunk;
if (self.paused === true) {
return;
}
gstore.currentChunk = self.pendingChunk;
self._execute();
});
}
self.emit("error", err);
return;
}
self.pendingChunk = chunk;
if (self.paused === true) {
return;
}
gstore.currentChunk = self.pendingChunk;
self._execute();
});
}
};

@@ -1193,6 +1227,5 @@

GridStore.prototype.stream = function(autoclose) {
return new ReadStream(autoclose, this);
return new ReadStream(autoclose, this);
};
/**

@@ -1202,3 +1235,3 @@ * Pauses this stream, then no farther events will be fired

ReadStream.prototype.pause = function() {
this.paused = true;
this.paused = true;
};

@@ -1210,11 +1243,13 @@

ReadStream.prototype.resume = function() {
this.paused = false;
var self = this;
if (self.pendingChunk) {
self.currentChunk = self.pendingChunk;
process.nextTick(function() {
self._execute();
});
}
this.paused = false;
var self = this;
if (self.pendingChunk) {
self.currentChunk = self.pendingChunk;
process.nextTick(function() {
self._execute();
});
}
};

@@ -10,34 +10,2 @@

[ 'bson/binary_parser'
, 'commands/base_command'
, 'commands/db_command'
, 'commands/delete_command'
, 'commands/get_more_command'
, 'commands/insert_command'
, 'commands/kill_cursor_command'
, 'commands/query_command'
, 'commands/update_command'
, 'responses/mongo_reply'
, 'admin'
, 'collection'
, 'connections/server'
, 'connections/repl_set_servers'
, 'connection'
, 'cursor'
, 'db'
, 'goog/math/long'
, 'crypto/md5'
, 'gridfs/chunk'
, 'gridfs/gridstore'].forEach(function (path) {
var module = require('./' + path);
for (var i in module) {
exports[i] = module[i];
}
});
// Exports all the classes for the NATIVE JS BSON Parser
exports.native = function() {
var classes = {};
// Map all the classes
[ 'bson/binary_parser'
, '../../external-libs/bson/bson'
, 'commands/base_command'

@@ -59,3 +27,5 @@ , 'commands/db_command'

, 'db'
, 'goog/math/long'
, 'crypto/md5'
, 'gridfs/grid'
, 'gridfs/chunk'

@@ -65,4 +35,36 @@ , 'gridfs/gridstore'].forEach(function (path) {

for (var i in module) {
classes[i] = module[i];
exports[i] = module[i];
}
});
// Exports all the classes for the NATIVE JS BSON Parser
exports.native = function() {
var classes = {};
// Map all the classes
[ 'bson/binary_parser'
, '../../external-libs/bson/bson'
, 'commands/base_command'
, 'commands/db_command'
, 'commands/delete_command'
, 'commands/get_more_command'
, 'commands/insert_command'
, 'commands/kill_cursor_command'
, 'commands/query_command'
, 'commands/update_command'
, 'responses/mongo_reply'
, 'admin'
, 'collection'
, 'connections/server'
, 'connections/repl_set_servers'
, 'connection'
, 'cursor'
, 'db'
, 'crypto/md5'
, 'gridfs/grid'
, 'gridfs/chunk'
, 'gridfs/gridstore'].forEach(function (path) {
var module = require('./' + path);
for (var i in module) {
classes[i] = module[i];
}
});

@@ -78,26 +80,27 @@ // Return classes list

[ 'bson/binary_parser'
, './bson/bson'
, 'commands/base_command'
, 'commands/db_command'
, 'commands/delete_command'
, 'commands/get_more_command'
, 'commands/insert_command'
, 'commands/kill_cursor_command'
, 'commands/query_command'
, 'commands/update_command'
, 'responses/mongo_reply'
, 'admin'
, 'collection'
, 'connections/server'
, 'connections/repl_set_servers'
, 'connection'
, 'cursor'
, 'db'
, 'crypto/md5'
, 'gridfs/chunk'
, 'gridfs/gridstore'].forEach(function (path) {
var module = require('./' + path);
for (var i in module) {
classes[i] = module[i];
}
, './bson/bson'
, 'commands/base_command'
, 'commands/db_command'
, 'commands/delete_command'
, 'commands/get_more_command'
, 'commands/insert_command'
, 'commands/kill_cursor_command'
, 'commands/query_command'
, 'commands/update_command'
, 'responses/mongo_reply'
, 'admin'
, 'collection'
, 'connections/server'
, 'connections/repl_set_servers'
, 'connection'
, 'cursor'
, 'db'
, 'crypto/md5'
, 'gridfs/grid'
, 'gridfs/chunk'
, 'gridfs/gridstore'].forEach(function (path) {
var module = require('./' + path);
for (var i in module) {
classes[i] = module[i];
}
});

@@ -104,0 +107,0 @@ // Return classes list

{ "name" : "mongodb"
, "description" : "A node.js driver for MongoDB"
, "version" : "0.9.6-1"
, "version" : "0.9.6-4"
, "author" : "Christian Amor Kvalheim <christkv@gmail.com>"

@@ -5,0 +5,0 @@ , "contributors" : [ "Nathan White <nw@nwhite.net>",

@@ -53,4 +53,6 @@ var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();

automatic_connect_client.open(function(err, automatic_connect_client) {
// Listener for closing event
var closeListener = function(has_error) {
// Remove the listener for the close to avoid loop

@@ -60,2 +62,3 @@ automatic_connect_client.removeListener("close", closeListener);

automatic_connect_client.collection('test_object_id_generation.data2', function(err, collection) {
// Insert another test document and collect using ObjectId

@@ -82,220 +85,219 @@ collection.insert({"name":"Patty", "age":34}, {safe:true}, function(err, ids) {

// Test that error conditions are handled correctly
shouldCorrectlyHandleConnectionErrors : function(test) {
// Test error handling for single server connection
var serverConfig = new Server("127.0.0.1", 21017, {auto_reconnect: true});
var error_client = new Db(MONGODB, serverConfig, {native_parser: (process.env['TEST_NATIVE'] != null) ? true : false});
error_client.on("error", function(err) {});
error_client.on("close", function(connection) {
test.ok(typeof connection == typeof serverConfig);
test.equal("127.0.0.1", connection.host);
test.equal(21017, connection.port);
test.equal(true, connection.autoReconnect);
test.done();
});
error_client.open(function(err, error_client) {});
},
shouldCorrectlyExecuteEvalFunctions : function(test) {
client.eval('function (x) {return x;}', [3], function(err, result) {
test.equal(3, result);
});
client.eval('function (x) {db.test_eval.save({y:x});}', [5], function(err, result) {
// Locate the entry
client.collection('test_eval', function(err, collection) {
collection.findOne(function(err, item) {
test.equal(5, item.y);
});
});
});
client.eval('function (x, y) {return x + y;}', [2, 3], function(err, result) {
test.equal(5, result);
});
client.eval('function () {return 5;}', function(err, result) {
test.equal(5, result);
});
client.eval('2 + 3;', function(err, result) {
test.equal(5, result);
});
client.eval(new client.bson_serializer.Code("2 + 3;"), function(err, result) {
test.equal(5, result);
});
client.eval(new client.bson_serializer.Code("return i;", {'i':2}), function(err, result) {
test.equal(2, result);
});
client.eval(new client.bson_serializer.Code("i + 3;", {'i':2}), function(err, result) {
test.equal(5, result);
});
client.eval("5 ++ 5;", function(err, result) {
test.ok(err instanceof Error);
test.ok(err.message != null);
// Let's close the db
test.done();
});
},
shouldCorrectlyDereferenceDbRef : function(test) {
client.createCollection('test_deref', function(err, collection) {
collection.insert({'a':1}, {safe:true}, function(err, ids) {
collection.remove({}, {safe:true}, function(err, result) {
collection.count(function(err, count) {
test.equal(0, count);
// Execute deref a db reference
client.dereference(new client.bson_serializer.DBRef("test_deref", new client.bson_serializer.ObjectID()), function(err, result) {
collection.insert({'x':'hello'}, {safe:true}, function(err, ids) {
collection.findOne(function(err, document) {
test.equal('hello', document.x);
client.dereference(new client.bson_serializer.DBRef("test_deref", document._id), function(err, result) {
test.equal('hello', document.x);
client.dereference(new client.bson_serializer.DBRef("test_deref", 4), function(err, result) {
var obj = {'_id':4};
collection.insert(obj, {safe:true}, function(err, ids) {
client.dereference(new client.bson_serializer.DBRef("test_deref", 4), function(err, document) {
test.equal(obj['_id'], document._id);
collection.remove({}, {safe:true}, function(err, result) {
collection.insert({'x':'hello'}, {safe:true}, function(err, ids) {
client.dereference(new client.bson_serializer.DBRef("test_deref", null), function(err, result) {
test.equal(null, result);
// Let's close the db
test.done();
});
});
});
});
});
});
});
});
});
});
})
})
})
});
},
shouldCorrectlyRenameCollection : function(test) {
client.createCollection('test_rename_collection', function(err, collection) {
client.createCollection('test_rename_collection2', function(err, collection) {
client.collection('test_rename_collection', function(err, collection1) {
client.collection('test_rename_collection2', function(err, collection2) {
// Assert rename
collection1.rename(5, function(err, collection) {
test.ok(err instanceof Error);
test.equal("collection name must be a String", err.message);
});
collection1.rename("", function(err, collection) {
test.ok(err instanceof Error);
test.equal("collection names cannot be empty", err.message);
});
collection1.rename("te$t", function(err, collection) {
test.ok(err instanceof Error);
test.equal("collection names must not contain '$'", err.message);
});
collection1.rename(".test", function(err, collection) {
test.ok(err instanceof Error);
test.equal("collection names must not start or end with '.'", err.message);
});
collection1.rename("test.", function(err, collection) {
test.ok(err instanceof Error);
test.equal("collection names must not start or end with '.'", err.message);
});
collection1.rename("tes..t", function(err, collection) {
test.equal("collection names cannot be empty", err.message);
});
collection1.count(function(err, count) {
test.equal(0, count);
collection1.insert([{'x':1}, {'x':2}], {safe:true}, function(err, docs) {
collection1.count(function(err, count) {
test.equal(2, count);
collection1.rename('test_rename_collection2', function(err, collection) {
test.ok(err instanceof Error);
test.ok(err.message.length > 0);
collection1.rename('test_rename_collection3', function(err, collection) {
test.equal("test_rename_collection3", collection.collectionName);
// Check count
collection.count(function(err, count) {
test.equal(2, count);
// Let's close the db
test.done();
});
});
});
});
})
})
collection2.count(function(err, count) {
test.equal(0, count);
})
});
});
});
});
},
shouldCorrectlyHandleFailedConnection : function(test) {
var fs_client = new Db(MONGODB, new Server("127.0.0.1", 27117, {auto_reconnect: false}), {native_parser: (process.env['TEST_NATIVE'] != null)});
fs_client.bson_deserializer = client.bson_deserializer;
fs_client.bson_serializer = client.bson_serializer;
fs_client.pkFactory = client.pkFactory;
fs_client.open(function(err, fs_client) {
test.ok(err != null)
test.done();
})
},
shouldCorrectlyResaveDBRef : function(test) {
client.dropCollection('test_resave_dbref', function() {
client.createCollection('test_resave_dbref', function(err, collection) {
test.ifError(err);
collection.insert({'name': 'parent'}, {safe : true}, function(err, objs) {
test.ok(objs && objs.length == 1 && objs[0]._id != null);
var parent = objs[0];
var child = {'name' : 'child', 'parent' : new client.bson_serializer.DBRef("test_resave_dbref", parent._id)};
collection.insert(child, {safe : true}, function(err, objs) {
test.ifError(err);
collection.findOne({'name' : 'child'}, function(err, child) { //Child deserialized
test.ifError(err);
test.ok(child != null);
collection.save(child, {save : true}, function(err) {
test.ifError(err); //Child node with dbref resaved!
collection.findOne({'parent' : new client.bson_serializer.DBRef("test_resave_dbref", parent._id)},
function(err, child) {
test.ifError(err);
test.ok(child != null);//!!!! Main test point!
test.done();
})
});
});
});
});
});
});
}
// // Test that error conditions are handled correctly
// shouldCorrectlyHandleConnectionErrors : function(test) {
// // Test error handling for single server connection
// var serverConfig = new Server("127.0.0.1", 21017, {auto_reconnect: true});
// var error_client = new Db(MONGODB, serverConfig, {native_parser: (process.env['TEST_NATIVE'] != null) ? true : false});
//
// error_client.on("error", function(err) {});
// error_client.on("close", function(connection) {
// test.ok(typeof connection == typeof serverConfig);
// test.equal("127.0.0.1", connection.host);
// test.equal(21017, connection.port);
// test.equal(true, connection.autoReconnect);
// test.done();
// });
// error_client.open(function(err, error_client) {});
// },
//
// shouldCorrectlyExecuteEvalFunctions : function(test) {
// client.eval('function (x) {return x;}', [3], function(err, result) {
// test.equal(3, result);
// });
//
// client.eval('function (x) {db.test_eval.save({y:x});}', [5], function(err, result) {
// // Locate the entry
// client.collection('test_eval', function(err, collection) {
// collection.findOne(function(err, item) {
// test.equal(5, item.y);
// });
// });
// });
//
// client.eval('function (x, y) {return x + y;}', [2, 3], function(err, result) {
// test.equal(5, result);
// });
//
// client.eval('function () {return 5;}', function(err, result) {
// test.equal(5, result);
// });
//
// client.eval('2 + 3;', function(err, result) {
// test.equal(5, result);
// });
//
// client.eval(new client.bson_serializer.Code("2 + 3;"), function(err, result) {
// test.equal(5, result);
// });
//
// client.eval(new client.bson_serializer.Code("return i;", {'i':2}), function(err, result) {
// test.equal(2, result);
// });
//
// client.eval(new client.bson_serializer.Code("i + 3;", {'i':2}), function(err, result) {
// test.equal(5, result);
// });
//
// client.eval("5 ++ 5;", function(err, result) {
// test.ok(err instanceof Error);
// test.ok(err.message != null);
// // Let's close the db
// test.done();
// });
// },
//
// shouldCorrectlyDereferenceDbRef : function(test) {
// client.createCollection('test_deref', function(err, collection) {
// collection.insert({'a':1}, {safe:true}, function(err, ids) {
// collection.remove({}, {safe:true}, function(err, result) {
// collection.count(function(err, count) {
// test.equal(0, count);
//
// // Execute deref a db reference
// client.dereference(new client.bson_serializer.DBRef("test_deref", new client.bson_serializer.ObjectID()), function(err, result) {
// collection.insert({'x':'hello'}, {safe:true}, function(err, ids) {
// collection.findOne(function(err, document) {
// test.equal('hello', document.x);
//
// client.dereference(new client.bson_serializer.DBRef("test_deref", document._id), function(err, result) {
// test.equal('hello', document.x);
//
// client.dereference(new client.bson_serializer.DBRef("test_deref", 4), function(err, result) {
// var obj = {'_id':4};
//
// collection.insert(obj, {safe:true}, function(err, ids) {
// client.dereference(new client.bson_serializer.DBRef("test_deref", 4), function(err, document) {
//
// test.equal(obj['_id'], document._id);
// collection.remove({}, {safe:true}, function(err, result) {
// collection.insert({'x':'hello'}, {safe:true}, function(err, ids) {
// client.dereference(new client.bson_serializer.DBRef("test_deref", null), function(err, result) {
// test.equal(null, result);
// // Let's close the db
// test.done();
// });
// });
// });
// });
// });
// });
// });
// });
// });
// });
// })
// })
// })
// });
// },
//
// shouldCorrectlyRenameCollection : function(test) {
// client.createCollection('test_rename_collection', function(err, collection) {
// client.createCollection('test_rename_collection2', function(err, collection) {
// client.collection('test_rename_collection', function(err, collection1) {
// client.collection('test_rename_collection2', function(err, collection2) {
// // Assert rename
// collection1.rename(5, function(err, collection) {
// test.ok(err instanceof Error);
// test.equal("collection name must be a String", err.message);
// });
//
// collection1.rename("", function(err, collection) {
// test.ok(err instanceof Error);
// test.equal("collection names cannot be empty", err.message);
// });
//
// collection1.rename("te$t", function(err, collection) {
// test.ok(err instanceof Error);
// test.equal("collection names must not contain '$'", err.message);
// });
//
// collection1.rename(".test", function(err, collection) {
// test.ok(err instanceof Error);
// test.equal("collection names must not start or end with '.'", err.message);
// });
//
// collection1.rename("test.", function(err, collection) {
// test.ok(err instanceof Error);
// test.equal("collection names must not start or end with '.'", err.message);
// });
//
// collection1.rename("tes..t", function(err, collection) {
// test.equal("collection names cannot be empty", err.message);
// });
//
// collection1.count(function(err, count) {
// test.equal(0, count);
//
// collection1.insert([{'x':1}, {'x':2}], {safe:true}, function(err, docs) {
// collection1.count(function(err, count) {
// test.equal(2, count);
//
// collection1.rename('test_rename_collection2', function(err, collection) {
// test.ok(err instanceof Error);
// test.ok(err.message.length > 0);
//
// collection1.rename('test_rename_collection3', function(err, collection) {
// test.equal("test_rename_collection3", collection.collectionName);
//
// // Check count
// collection.count(function(err, count) {
// test.equal(2, count);
// // Let's close the db
// test.done();
// });
// });
// });
// });
// })
// })
//
// collection2.count(function(err, count) {
// test.equal(0, count);
// })
// });
// });
// });
// });
// },
//
// shouldCorrectlyHandleFailedConnection : function(test) {
// var fs_client = new Db(MONGODB, new Server("127.0.0.1", 27117, {auto_reconnect: false}), {native_parser: (process.env['TEST_NATIVE'] != null)});
// fs_client.bson_deserializer = client.bson_deserializer;
// fs_client.bson_serializer = client.bson_serializer;
// fs_client.pkFactory = client.pkFactory;
// fs_client.open(function(err, fs_client) {
// test.ok(err != null)
// test.done();
// })
// },
//
// shouldCorrectlyResaveDBRef : function(test) {
// client.dropCollection('test_resave_dbref', function() {
// client.createCollection('test_resave_dbref', function(err, collection) {
// test.ifError(err);
// collection.insert({'name': 'parent'}, {safe : true}, function(err, objs) {
// test.ok(objs && objs.length == 1 && objs[0]._id != null);
// var parent = objs[0];
// var child = {'name' : 'child', 'parent' : new client.bson_serializer.DBRef("test_resave_dbref", parent._id)};
// collection.insert(child, {safe : true}, function(err, objs) {
// test.ifError(err);
// collection.findOne({'name' : 'child'}, function(err, child) { //Child deserialized
// test.ifError(err);
// test.ok(child != null);
// collection.save(child, {save : true}, function(err) {
// test.ifError(err); //Child node with dbref resaved!
// collection.findOne({'parent' : new client.bson_serializer.DBRef("test_resave_dbref", parent._id)},
// function(err, child) {
// test.ifError(err);
// test.ok(child != null);//!!!! Main test point!
// test.done();
// })
// });
// });
// });
// });
// });
// });
// }
})

@@ -302,0 +304,0 @@

@@ -48,4 +48,42 @@ var mongodb = process.env['TEST_NATIVE'] != null ? require('../../lib/mongodb').native() : require('../../lib/mongodb').pure();

},
// Gridstore tests
shouldCorrectlyExecuteGridStoreExistsByObjectId : function(test) {
var gridStore = new GridStore(client, null, "w");
gridStore.open(function(err, gridStore) {
gridStore.write("hello world!", function(err, gridStore) {
gridStore.close(function(err, result) {
GridStore.exist(client, result._id, function(err, result) {
test.equal(true, result);
})
GridStore.exist(client, new client.bson_serializer.ObjectID(), function(err, result) {
test.equal(false, result);
});
GridStore.exist(client, new client.bson_serializer.ObjectID(), 'another_root', function(err, result) {
test.equal(false, result);
test.done();
});
});
});
});
},
shouldCorrectlySafeFileAndReadFileByObjectId : function(test) {
var gridStore = new GridStore(client, null, "w");
gridStore.open(function(err, gridStore) {
gridStore.write("hello world!", function(err, gridStore) {
gridStore.close(function(err, result) {
// Let's read the file using object Id
GridStore.read(client, result._id, function(err, data) {
test.equal('hello world!', data);
test.done();
});
});
});
});
},
shouldCorrectlyExecuteGridStoreExists : function(test) {

@@ -55,3 +93,3 @@ var gridStore = new GridStore(client, "foobar", "w");

gridStore.write("hello world!", function(err, gridStore) {
gridStore.close(function(err, result) {
gridStore.close(function(err, result) {
GridStore.exist(client, 'foobar', function(err, result) {

@@ -88,3 +126,12 @@ test.equal(true, result);

});
GridStore.list(client, {id:true}, function(err, items) {
var found = false;
items.forEach(function(id) {
test.ok(typeof id == 'object');
});
test.ok(items.length >= 1);
});
GridStore.list(client, 'fs', function(err, items) {

@@ -379,3 +426,3 @@ var found = false;

gridStore.open(function(err, gridStore) {
gridStore.writeFile('./test/gridstore/test_gs_weird_bug.png', function(err, gridStore) {
gridStore.writeFile('./test/gridstore/test_gs_weird_bug.png', function(err, doc) {
GridStore.read(client, 'test_gs_writing_file', function(err, fileData) {

@@ -396,2 +443,25 @@ test.equal(data, fileData)

shouldCorrectlyWriteFileToGridStoreUsingObjectId: function(test) {
var gridStore = new GridStore(client, null, 'w');
var fileSize = fs.statSync('./test/gridstore/test_gs_weird_bug.png').size;
var data = fs.readFileSync('./test/gridstore/test_gs_weird_bug.png', 'binary');
gridStore.open(function(err, gridStore) {
gridStore.writeFile('./test/gridstore/test_gs_weird_bug.png', function(err, doc) {
GridStore.read(client, doc._id, function(err, fileData) {
test.equal(data, fileData)
test.equal(fileSize, fileData.length);
// Ensure we have a md5
var gridStore2 = new GridStore(client, doc._id, 'r');
gridStore2.open(function(err, gridStore2) {
test.ok(gridStore2.md5 != null)
test.done();
});
});
});
});
},
shouldCorrectlyPerformWorkingFiledRead : function(test) {

@@ -430,2 +500,117 @@ var gridStore = new GridStore(client, "test_gs_working_field_read", "w");

},
shouldCorrectlyReadAndWriteFileByObjectId : function(test) {
var gridStore = new GridStore(client, null, "w");
var data = fs.readFileSync("./test/gridstore/test_gs_weird_bug.png", 'binary');
gridStore.open(function(err, gridStore) {
gridStore.write(data, function(err, gridStore) {
gridStore.close(function(err, result) {
// Assert that we have overwriten the data
GridStore.read(client, result._id, function(err, fileData) {
test.equal(data.length, fileData.length);
test.done();
});
});
});
});
},
shouldCorrectlyWriteAndReadJpgImage : function(test) {
var data = fs.readFileSync('./test/gridstore/iya_logo_final_bw.jpg').toString('binary');
var gs = new GridStore(client, "test", "w");
gs.open(function(err, gs) {
gs.write(data, function(err, gs) {
gs.close(function(err, gs) {
// Open and read
var gs2 = new GridStore(client, "test", "r");
gs2.open(function(err, gs) {
gs2.seek(0, function() {
gs2.read(0, function(err, data2) {
test.equal(data, data2);
test.done();
});
});
});
});
})
})
},
shouldCorrectlyReadAndWriteBuffersMultipleChunks : function(test) {
var gridStore = new GridStore(client, null, 'w');
// Force multiple chunks to be stored
gridStore.chunkSize = 5000;
var fileSize = fs.statSync('./test/gridstore/test_gs_weird_bug.png').size;
var data = fs.readFileSync('./test/gridstore/test_gs_weird_bug.png');
gridStore.open(function(err, gridStore) {
// Write the file using writeBuffer
gridStore.writeBuffer(data, function(err, doc) {
gridStore.close(function(err, doc) {
// Read the file using readBuffer
new GridStore(client, doc._id, 'r').open(function(err, gridStore) {
gridStore.readBuffer(function(err, data2) {
test.equal(data.toString('base64'), data2.toString('base64'));
test.done();
})
});
});
})
});
},
shouldCorrectlyReadAndWriteBuffersSingleChunks : function(test) {
var gridStore = new GridStore(client, null, 'w');
// Force multiple chunks to be stored
var fileSize = fs.statSync('./test/gridstore/test_gs_weird_bug.png').size;
var data = fs.readFileSync('./test/gridstore/test_gs_weird_bug.png');
gridStore.open(function(err, gridStore) {
// Write the file using writeBuffer
gridStore.writeBuffer(data, function(err, doc) {
gridStore.close(function(err, doc) {
// Read the file using readBuffer
new GridStore(client, doc._id, 'r').open(function(err, gridStore) {
gridStore.readBuffer(function(err, data2) {
test.equal(data.toString('base64'), data2.toString('base64'));
test.done();
})
});
});
})
});
},
shouldCorrectlyReadAndWriteBuffersUsingNormalWriteWithMultipleChunks : function(test) {
var gridStore = new GridStore(client, null, 'w');
// Force multiple chunks to be stored
gridStore.chunkSize = 5000;
var fileSize = fs.statSync('./test/gridstore/test_gs_weird_bug.png').size;
var data = fs.readFileSync('./test/gridstore/test_gs_weird_bug.png');
gridStore.open(function(err, gridStore) {
// Write the buffer using the .write method that should use writeBuffer correctly
gridStore.write(data, function(err, doc) {
gridStore.close(function(err, doc) {
// Read the file using readBuffer
new GridStore(client, doc._id, 'r').open(function(err, gridStore) {
gridStore.readBuffer(function(err, data2) {
test.equal(data.toString('base64'), data2.toString('base64'));
test.done();
})
});
});
})
});
},
})

@@ -432,0 +617,0 @@

@@ -160,2 +160,25 @@ var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();

},
shouldCorrectlyExecuteSaveInsertUpdate: function(test) {
client.createCollection('shouldCorrectlyExecuteSaveInsertUpdate', function(err, collection) {
collection.save({ email : 'save' }, {safe:true}, function() {
collection.insert({ email : 'insert' }, {safe:true}, function() {
collection.update(
{ email : 'update' },
{ email : 'update' },
{ upsert: true, safe:true},
function() {
collection.find(function(e, c) {
c.toArray(function(e, a) {
test.equal(3, a.length)
test.done();
});
});
}
);
});
});
});
},

@@ -521,3 +544,3 @@ shouldCorrectlyInsertAndRetrieveLargeIntegratedArrayDocument : function(test) {

test.equal(null, err);
collection.update({ '_id': "12345678123456781234567812345678" }, { '$set': { 'field': 0 }}, function(err, numberOfUpdates) {

@@ -544,3 +567,3 @@ test.equal(null, err);

doc3.ref = new client.bson_serializer.DBRef('shouldCorrectlyInsertDBRefWithDbNotDefined', doc._id, MONGODB);
collection.insert([doc2, doc3], {safe:true}, function(err, result) {

@@ -552,7 +575,7 @@ // Get all items

test.equal(null, items[1].ref.db);
test.equal("shouldCorrectlyInsertDBRefWithDbNotDefined", items[2].ref.namespace);
test.equal(doc._id.toString(), items[2].ref.oid.toString());
test.equal(MONGODB, items[2].ref.db);
test.done();

@@ -563,2 +586,24 @@ })

});
},
shouldCorrectlyInsertUpdateRemoveWithNoOptions : function(test) {
var db = new Db(MONGODB, new Server('localhost', 27017, {auto_reconnect: true}), {native_parser: (process.env['TEST_NATIVE'] != null)});
db.bson_deserializer = client.bson_deserializer;
db.bson_serializer = client.bson_serializer;
db.pkFactory = client.pkFactory;
db.open(function(err, db) {
db.collection('shouldCorrectlyInsertUpdateRemoveWithNoOptions', function(err, collection) {
collection.insert({a:1});//, function(err, result) {
collection.update({a:1}, {a:2});//, function(err, result) {
collection.remove({a:2});//, function(err, result) {
collection.count(function(err, count) {
test.equal(0, count);
db.close();
test.done();
})
});
});
}

@@ -565,0 +610,0 @@ })

@@ -61,3 +61,20 @@ var testCase = require('../../deps/nodeunit').testCase,

},
shouldCorrectlyConnectWithDefaultReplicaset : function(test) {
// Replica configuration
var replSet = new ReplSetServers([
new Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
new Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
new Server( RS.host, RS.ports[2], { auto_reconnect: true } )
],
{}
);
var db = new Db('integration_test_', replSet);
db.open(function(err, p_db) {
test.equal(null, err);
test.done();
})
},
shouldCorrectlyPassErrorWhenWrongReplicaSet : function(test) {

@@ -72,3 +89,3 @@ // Replica configuration

);
var db = new Db('integration_test_', replSet);

@@ -101,3 +118,3 @@ db.open(function(err, p_db) {

if(err != null) debug("shouldConnectWithThirdNodeKilled :: " + inspect(err));
RS.kill(node, function(err, result) {

@@ -218,3 +235,3 @@ if(err != null) debug("shouldConnectWithThirdNodeKilled :: " + inspect(err));

if(err != null) debug("shouldCorrectlyConnect :: " + inspect(err));
test.notEqual(null, primary);

@@ -226,3 +243,3 @@ test.equal(primary, p_db.serverConfig.primary.host + ":" + p_db.serverConfig.primary.port);

if(err != null) debug("shouldCorrectlyConnect :: " + inspect(err));
// Test if we have the right secondaries

@@ -236,3 +253,3 @@ test.deepEqual(items.sort(), p_db.serverConfig.secondaries.map(function(item) {

if(err != null) debug("shouldCorrectlyConnect :: " + inspect(err));
test.deepEqual(items.sort(), p_db.serverConfig.arbiters.map(function(item) {

@@ -246,3 +263,3 @@ return item.host + ":" + item.port;

if(err != null) debug("shouldCorrectlyConnect :: " + inspect(err));
test.equal(true, p_db2.serverConfig.isConnected());

@@ -249,0 +266,0 @@

@@ -22,2 +22,3 @@ var debug = require('util').debug,

this.durable = options["durable"] != null ? options["durable"] : false;
this.auth = options['auth'] != null ? options['auth'] : false;
this.path = path.resolve("data");

@@ -29,2 +30,4 @@

this.primaryCount = 1;
this.keyPath = [process.cwd(), "test", "tools", "keyfile.txt"].join("/");
fs.chmodSync(this.keyPath, 0600);

@@ -502,5 +505,7 @@ this.count = this.primaryCount + this.passiveCount + this.arbiterCount + this.secondaryCount;

" --dbpath " + this.mongods[n]['db_path'] + " --port " + this.mongods[n]['port'] + " --fork";
this.mongods[n]["start"] = this.durable ? this.mongods[n]["start"] + " --dur" : this.mongods[n]["start"];
// debug("================================================== start server")
// debug(this.mongods[n]["start"])
this.mongods[n]["start"] = this.durable ? this.mongods[n]["start"] + " --dur" : this.mongods[n]["start"];
if(this.auth) {
this.mongods[n]["start"] = this.auth ? this.mongods[n]["start"] + " --keyFile " + this.keyPath : this.mongods[n]["start"];
}
return this.mongods[n]["start"];

@@ -507,0 +512,0 @@ }

@@ -15,6 +15,9 @@ var debug = require('util').debug,

this.path = path.resolve("data");
this.port = options["start_port"] || 27017;
this.port = options["start_port"] != null ? options["start_port"] : 27017;
this.db_path = getPath(this, "data-" + this.port);
this.log_path = getPath(this, "log-" + this.port);
this.durable = options["durable"] || false;
this.durable = options["durable"] != null ? options["durable"] : false;
this.auth = options['auth'] != null ? options['auth'] : false;
this.purgedirectories = options['purgedirectories'] != null ? options['purgedirectories'] : true;
// Server status values

@@ -34,31 +37,53 @@ this.up = false;

var startCmd = generateStartCmd({log_path: self.log_path,
db_path: self.db_path, port: self.port, durable: self.durable});
db_path: self.db_path, port: self.port, durable: self.durable, auth:self.auth});
exec(killall ? 'killall mongod' : '', function(err, stdout, stderr) {
// Remove directory
exec("rm -rf " + self.db_path, function(err, stdout, stderr) {
if(err != null) return callback(err, null);
// Create directory
exec("mkdir -p " + self.db_path, function(err, stdout, stderr) {
if(err != null) return callback(err, null);
// Start up mongod process
var mongodb = exec(startCmd,
function (error, stdout, stderr) {
// console.log('stdout: ' + stdout);
// console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
if(self.purgedirectories) {
// Remove directory
exec("rm -rf " + self.db_path, function(err, stdout, stderr) {
if(err != null) return callback(err, null);
// Create directory
exec("mkdir -p " + self.db_path, function(err, stdout, stderr) {
if(err != null) return callback(err, null);
// Start up mongod process
var mongodb = exec(startCmd,
function (error, stdout, stderr) {
// console.log('stdout: ' + stdout);
// console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
// Wait for a half a second then save the pids
setTimeout(function() {
// Mark server as running
self.up = true;
self.pid = fs.readFileSync(path.join(self.db_path, "mongod.lock"), 'ascii').trim();
// Callback
callback();
}, 500);
});
});
// Wait for a half a second then save the pids
setTimeout(function() {
// Mark server as running
self.up = true;
self.pid = fs.readFileSync(path.join(self.db_path, "mongod.lock"), 'ascii').trim();
// Callback
callback();
}, 500);
});
});
} else {
// Ensure we remove the lock file as we are not purging the directory
fs.unlinkSync(path.join(self.db_path, "mongod.lock"));
// Start up mongod process
var mongodb = exec(startCmd,
function (error, stdout, stderr) {
if (error !== null) {
console.log('exec error: ' + error);
}
});
// Wait for a half a second then save the pids
setTimeout(function() {
// Mark server as running
self.up = true;
self.pid = fs.readFileSync(path.join(self.db_path, "mongod.lock"), 'ascii').trim();
// Callback
callback();
}, 500);
}
});

@@ -101,3 +126,4 @@ }

startCmd = options['durable'] ? startCmd + " --dur" : startCmd;
startCmd = options['auth'] ? startCmd + " --auth" : startCmd;
return startCmd;
}

@@ -75,3 +75,3 @@ var nodeunit = require('../deps/nodeunit'),

function startSingleServer() {
serverManager.start(true, this);
serverManager.start(true, {purgedirectories:true}, this);
},

@@ -105,3 +105,3 @@ // Run all the integration tests using the pure js bson parser

function startSingleServer() {
serverManager.start(true, this);
serverManager.start(true, {purgedirectories:true}, this);
},

@@ -108,0 +108,0 @@ function runPureJS() {

@@ -55,2 +55,4 @@

var geoSchema = new Schema({ loc: { type: [Number], index: '2d'}});
module.exports = {

@@ -843,2 +845,19 @@ 'test that find returns a Query': function () {

// GH-389
'find nested doc using string id': function () {
var db = start()
, BlogPostB = db.model('BlogPostB', collection);
BlogPostB.create({comments: [{title: 'i should be queryable by _id'}, {title:'me too me too!'}]}, function (err, created) {
should.strictEqual(err, null);
var id = created.comments[1]._id.id;
BlogPostB.findOne({'comments._id': id}, function (err, found) {
db.close();
should.strictEqual(err, null);
should.strictEqual(!! found, true, 'Find by nested doc id hex string fails');
found._id.should.eql(created._id);
});
});
},
'test finding where $elemMatch': function () {

@@ -1002,8 +1021,39 @@ var db = start()

BlogPostB.find({numbers: {$all: [0, -1]}}, function (err, found) {
db.close();
should.strictEqual(err, null);
found.should.have.length(1);
});
});
});
});
},
'test finding a document whose arrays contain at least $all string values': function () {
var db = start()
, BlogPostB = db.model('BlogPostB', collection);
var post = new BlogPostB();
post.tags.push('onex');
post.tags.push('twox');
post.tags.push('threex');
post.save(function (err) {
should.strictEqual(err, null);
BlogPostB.findById(post._id, function (err, post) {
should.strictEqual(err, null);
BlogPostB.find({tags: { '$all': [/^onex/i]}}, function (err, docs) {
should.strictEqual(err, null);
docs.length.should.equal(1);
BlogPostB.findOne({tags: { '$all': /^two/ }}, function (err, doc) {
db.close();
should.strictEqual(err, null);
doc.id.should.eql(post.id);
});
});
});
});

@@ -1283,6 +1333,7 @@ },

var db = start()
, Test = db.model('TestDateQuery', new Schema({ date: Date }), collection)
, Test = db.model('TestDateQuery', new Schema({ date: Date }), 'datetest_' + random())
, now = new Date;
Test.create({ date: now }, function (err) {
Test.create({ date: now }, { date: new Date(now-10000) }, function (err, a, b) {
should.strictEqual(err, null);
Test.find({ date: now }, function (err, docs) {

@@ -1299,4 +1350,3 @@ db.close();

var db = start()
, schema = new Schema({ loc: { type: Array, index: '2d'}})
, Test = db.model('GeoSpatialArrayQuery', schema, collection);
, Test = db.model('Geo1', geoSchema, collection);

@@ -1311,6 +1361,26 @@ Test.create({ loc: [ 10, 20 ]}, { loc: [ 40, 90 ]}, function (err) {

});
}, 400);
}, 700);
});
},
'using $maxDistance with Array works (geo-spatial)': function () {
var db = start()
, Test = db.model('Geo2', geoSchema, "x"+random());
Test.create({ loc: [ 20, 80 ]}, { loc: [ 25, 30 ]}, function (err, docs) {
should.strictEqual(!!err, false);
setTimeout(function () {
Test.find({ loc: { $near: [25, 31], $maxDistance: 1 }}, function (err, docs) {
should.strictEqual(err, null);
docs.length.should.equal(1);
Test.find({ loc: { $near: [25, 32], $maxDistance: 1 }}, function (err, docs) {
db.close();
should.strictEqual(err, null);
docs.length.should.equal(0);
});
});
}, 500);
});
}
};

@@ -11,2 +11,3 @@

, Schema = mongoose.Schema
, should = require('should')

@@ -311,2 +312,11 @@ var Comment = new Schema({

'test Query#maxDistance via where': function () {
var query = new Query();
query.where('checkin').near([40, -72]).maxDistance(1);
query._conditions.should.eql({checkin: {$near: [40, -72], $maxDistance: 1}});
query = new Query();
query.where('checkin').near([40, -72]).$maxDistance(1);
query._conditions.should.eql({checkin: {$near: [40, -72], $maxDistance: 1}});
},
'test Query#wherein.box not via where': function () {

@@ -562,2 +572,19 @@ var query = new Query();

//'throwing inside a query callback should not execute the callback again': function () {
//var query = new Query();
//var db = start();
//var Product = db.model('Product');
//var threw = false;
//Product.find({}, function (err) {
//if (!threw) {
//db.close();
//threw = true;
//throw new Error("Double callback");
//}
//should.strictEqual(err, null, 'Double callback detected');
//});
//},
'Query#find $ne should not cast single value to array for schematype of Array': function () {

@@ -667,2 +694,16 @@ var query = new Query();

'test Query#slaveOk': function () {
var query = new Query();
query.slaveOk();
query.options.slaveOk.should.be.true;
var query = new Query();
query.slaveOk(true);
query.options.slaveOk.should.be.true;
var query = new Query();
query.slaveOk(false);
query.options.slaveOk.should.be.false;
},
'test Query#hint': function () {

@@ -669,0 +710,0 @@ var query = new Query();

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc