Comparing version 0.9.6-18 to 0.9.6-19
@@ -72,2 +72,5 @@ /*! | ||
var name = path.basename(file); | ||
// Ensure no caching | ||
delete require.cache[file]; | ||
// Run module | ||
exports.runModule(name, require(file), options, cb); | ||
@@ -74,0 +77,0 @@ }, |
@@ -20,3 +20,6 @@ GridStore | ||
* `filename` is the name of the file in GridFS that needs to be accessed/created | ||
* `mode` indicates if this is a read (value `"r"`), write (`"w"`) or append (`"w+"`) operation | ||
* `mode` indicated the operaion, can be one of: | ||
* "r" (Read): Looks for the file information in fs.files collection, or creates a new id for this object. | ||
* "w" (Write): Erases all chunks if the file already exist. | ||
* "w+" (Append): Finds the last chunk, and keeps writing after it. | ||
* `options` can be used to specify some metadata for the file, for example `content_type`, `metadata` and `chunk_size` | ||
@@ -34,3 +37,3 @@ | ||
When GridStore object is created, it needs to be opened | ||
When GridStore object is created, it needs to be opened. | ||
@@ -65,2 +68,14 @@ gs.open(callback); | ||
## Writing a file to GridStore | ||
This functions opens the gridstore, streams the contents of the file into gridstore, and closes the gridstore. | ||
gs.writeFile( file, callback ) | ||
where | ||
* `file` is a file descriptior, or a string file path | ||
* `callback` is a function with two parameters - error object (if error occured) and the GridStore object. | ||
## Reading from GridStore | ||
@@ -78,2 +93,14 @@ | ||
## Streaming from GridStore | ||
You can stream data as it comes from the database using `stream` | ||
gs.stream([autoclose=false]) | ||
where | ||
* `autoclose` If true current GridStore will be closed when EOF and 'close' event will be fired | ||
The function returns [read stream](http://nodejs.org/docs/v0.4.12/api/streams.html#readable_Stream) based on this GridStore file. It supports the events 'read', 'error', 'close' and 'end'. | ||
## Delete a GridStore | ||
@@ -80,0 +107,0 @@ |
@@ -89,3 +89,2 @@ var sys = require('util'), | ||
var simple_string_serialized = BSON.serialize(doc, false, true); | ||
assert.deepEqual(simple_string_serialized, BSONJS.serialize(doc, false, true)); | ||
assert.deepEqual(BSONJS.deserialize(new Buffer(simple_string_serialized, 'binary')), BSON.deserialize(simple_string_serialized)); | ||
@@ -298,10 +297,18 @@ | ||
// var doc = { | ||
// start:1309323402727,end:1309323402727 | ||
// }; | ||
var simple_string_serialized = BSONJS.serialize(doc, false, true); | ||
var simple_string_serialized_2 = BSON.serialize(doc, false, true); | ||
for(var i = 0; i < simple_string_serialized_2.length; i++) { | ||
// debug(i + "[" + simple_string_serialized_2[i] + "] = [" + simple_string_serialized[i] + "]") | ||
assert.equal(simple_string_serialized_2[i], simple_string_serialized[i]); | ||
} | ||
// debug("------------------------------------------------------------------------------------") | ||
// debug(inspect(simple_string_serialized)) | ||
// debug(inspect(simple_string_serialized_2)) | ||
// for(var i = 0; i < simple_string_serialized_2.length; i++) { | ||
// // debug(i + "[" + simple_string_serialized_2[i] + "] = [" + simple_string_serialized[i] + "]") | ||
// assert.equal(simple_string_serialized_2[i], simple_string_serialized[i]); | ||
// } | ||
// Deserialize the string | ||
@@ -311,2 +318,4 @@ var doc1 = BSONJS.deserialize(new Buffer(simple_string_serialized_2)); | ||
assert.deepEqual(doc2, doc1) | ||
assert.deepEqual(doc, doc2) | ||
assert.deepEqual(doc, doc1) | ||
@@ -313,0 +322,0 @@ // Force garbage collect |
@@ -11,2 +11,3 @@ /** | ||
, debug = require('util').debug | ||
, crypto = require('crypto') | ||
, inspect = require('util').inspect | ||
@@ -95,3 +96,3 @@ , inherits = require('util').inherits | ||
if(value >= BSON.BSON_INT32_MAX || value < BSON.BSON_INT32_MIN || | ||
value !== parseInt(value, 10)) { | ||
value !== parseInt(value, 10)) { | ||
// Long and Number take same number of bytes. | ||
@@ -157,2 +158,5 @@ totalLength += (name != null ? (Buffer.byteLength(name) + 1) : 0) + (8 + 1); | ||
totalLength += (name != null ? (Buffer.byteLength(name) + 1) : 0) + (Buffer.byteLength(value.value, 'utf8') + 4 + 1 + 1); | ||
} else if(typeof value == 'function') { | ||
// Calculate the length of the code string | ||
totalLength += (name != null ? (Buffer.byteLength(name) + 1) : 0) + 4 + (Buffer.byteLength(value.toString(), 'utf8') + 1 + 1); | ||
} else if(typeof value == 'object') { | ||
@@ -582,2 +586,26 @@ // Calculate the object | ||
buffer[index++] = 0; | ||
} else if(typeof value == 'function') { | ||
// Write the type | ||
buffer[index++] = BSON.BSON_DATA_CODE; | ||
// Write the name | ||
if(name != null) { | ||
index = index + buffer.write(name, index, 'utf8') + 1; | ||
buffer[index - 1] = 0; | ||
} | ||
// Calculate size | ||
size = Buffer.byteLength(value.toString()) + 1; | ||
// Write the size of the string to buffer | ||
buffer[index + 3] = (size >> 24) & 0xff; | ||
buffer[index + 2] = (size >> 16) & 0xff; | ||
buffer[index + 1] = (size >> 8) & 0xff; | ||
buffer[index] = size & 0xff; | ||
// Ajust the index | ||
index = index + 4; | ||
// Write the string | ||
buffer.write(value.toString(), index, 'utf8'); | ||
// Update index | ||
index = index + size - 1; | ||
// Write zero | ||
buffer[index++] = 0; | ||
} else if(typeof value == 'object') { | ||
@@ -666,2 +694,30 @@ // Write the type of either Array or object | ||
// | ||
// Contains the function cache if we have that enable to allow for avoiding the eval step on each | ||
// deserialization, comparison is by md5 | ||
// | ||
var functionCache = BSON.functionCache = {}; | ||
// Crc state variables shared by function | ||
// var table = "00000000 77073096 EE0E612C 990951BA 076DC419 706AF48F E963A535 9E6495A3 0EDB8832 79DCB8A4 E0D5E91E 97D2D988 09B64C2B 7EB17CBD E7B82D07 90BF1D91 1DB71064 6AB020F2 F3B97148 84BE41DE 1ADAD47D 6DDDE4EB F4D4B551 83D385C7 136C9856 646BA8C0 FD62F97A 8A65C9EC 14015C4F 63066CD9 FA0F3D63 8D080DF5 3B6E20C8 4C69105E D56041E4 A2677172 3C03E4D1 4B04D447 D20D85FD A50AB56B 35B5A8FA 42B2986C DBBBC9D6 ACBCF940 32D86CE3 45DF5C75 DCD60DCF ABD13D59 26D930AC 51DE003A C8D75180 BFD06116 21B4F4B5 56B3C423 CFBA9599 B8BDA50F 2802B89E 5F058808 C60CD9B2 B10BE924 2F6F7C87 58684C11 C1611DAB B6662D3D 76DC4190 01DB7106 98D220BC EFD5102A 71B18589 06B6B51F 9FBFE4A5 E8B8D433 7807C9A2 0F00F934 9609A88E E10E9818 7F6A0DBB 086D3D2D 91646C97 E6635C01 6B6B51F4 1C6C6162 856530D8 F262004E 6C0695ED 1B01A57B 8208F4C1 F50FC457 65B0D9C6 12B7E950 8BBEB8EA FCB9887C 62DD1DDF 15DA2D49 8CD37CF3 FBD44C65 4DB26158 3AB551CE A3BC0074 D4BB30E2 4ADFA541 3DD895D7 A4D1C46D D3D6F4FB 4369E96A 346ED9FC AD678846 DA60B8D0 44042D73 33031DE5 AA0A4C5F DD0D7CC9 5005713C 270241AA BE0B1010 C90C2086 5768B525 206F85B3 B966D409 CE61E49F 5EDEF90E 29D9C998 B0D09822 C7D7A8B4 59B33D17 2EB40D81 B7BD5C3B C0BA6CAD EDB88320 9ABFB3B6 03B6E20C 74B1D29A EAD54739 9DD277AF 04DB2615 73DC1683 E3630B12 94643B84 0D6D6A3E 7A6A5AA8 E40ECF0B 9309FF9D 0A00AE27 7D079EB1 F00F9344 8708A3D2 1E01F268 6906C2FE F762575D 806567CB 196C3671 6E6B06E7 FED41B76 89D32BE0 10DA7A5A 67DD4ACC F9B9DF6F 8EBEEFF9 17B7BE43 60B08ED5 D6D6A3E8 A1D1937E 38D8C2C4 4FDFF252 D1BB67F1 A6BC5767 3FB506DD 48B2364B D80D2BDA AF0A1B4C 36034AF6 41047A60 DF60EFC3 A867DF55 316E8EEF 4669BE79 CB61B38C BC66831A 256FD2A0 5268E236 CC0C7795 BB0B4703 220216B9 5505262F C5BA3BBE B2BD0B28 2BB45A92 5CB36A04 C2D7FFA7 B5D0CF31 2CD99E8B 5BDEAE1D 9B64C2B0 EC63F226 756AA39C 026D930A 9C0906A9 EB0E363F 72076785 05005713 95BF4A82 E2B87A14 7BB12BAE 0CB61B38 92D28E9B E5D5BE0D 7CDCEFB7 0BDBDF21 86D3D2D4 F1D4E242 68DDB3F8 1FDA836E 81BE16CD F6B9265B 6FB077E1 18B74777 88085AE6 FF0F6A70 66063BCA 11010B5C 8F659EFF F862AE69 616BFFD3 166CCF45 A00AE278 D70DD2EE 4E048354 3903B3C2 A7672661 D06016F7 4969474D 3E6E77DB AED16A4A D9D65ADC 40DF0B66 37D83BF0 A9BCAE53 DEBB9EC5 47B2CF7F 30B5FFE9 BDBDF21C CABAC28A 53B39330 24B4A3A6 BAD03605 CDD70693 54DE5729 23D967BF B3667A2E C4614AB8 5D681B02 2A6F2B94 B40BBE37 C30C8EA1 5A05DF1B 2D02EF8D".split(" "); | ||
var table = [0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3, 0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988, 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91, 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, 0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, 0x14015C4F, 0x63066CD9, 0xFA0F3D63, 0x8D080DF5, 0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172, 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, 0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, 0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116, 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, 0xB8BDA50F, 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, 0x76DC4190, 0x01DB7106, 0x98D220BC, 0xEFD5102A, 0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433, 0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818, 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, 0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457, 0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 0xFCB9887C, 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65, 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, 0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0, 0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9, 0x5005713C, 0x270241AA, 0xBE0B1010, 0xC90C2086, 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, 0x59B33D17, 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, 0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A, 0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683, 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, 0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE, 0xF762575D, 0x806567CB, 0x196C3671, 0x6E6B06E7, 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC, 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, 0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B, 0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60, 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, 0x4669BE79, 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, 0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, 0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D, 0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A, 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, 0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, 0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 0x1FDA836E, 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777, 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, 0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2, 0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB, 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 0x37D83BF0, 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, 0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF, 0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94, 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D]; | ||
// CRC32 hash method | ||
// Fast and enough versitility for our usage | ||
var crc32 = function(string, start, end) { | ||
var crc = 0 | ||
var x = 0; | ||
var y = 0; | ||
crc = crc ^ (-1); | ||
for(var i = start, iTop = end; i < iTop;i++) { | ||
y = (crc ^ string[i]) & 0xFF; | ||
x = table[y]; | ||
crc = (crc >>> 8) ^ x; | ||
} | ||
return crc ^ (-1); | ||
} | ||
/** | ||
@@ -677,3 +733,3 @@ * Deserialize `data` as BSON. | ||
BSON.deserialize = function(data) { | ||
BSON.deserialize = function(data, options) { | ||
if(!(data instanceof Buffer)) throw new Error("data stream not a buffer object"); | ||
@@ -698,2 +754,8 @@ // Finial object returned to user | ||
// Options | ||
options = options == null ? {} : options; | ||
var evalFunctions = options['evalFunctions'] == null ? false : options['evalFunctions']; | ||
var cacheFunctions = options['cacheFunctions'] == null ? false : options['cacheFunctions']; | ||
var cacheFunctionsCrc32 = options['cacheFunctionsCrc32'] == null ? false : options['cacheFunctionsCrc32']; | ||
// Decode | ||
@@ -710,3 +772,3 @@ var size = data[index] | data[index + 1] << 8 | data[index + 2] << 16 | data[index + 3] << 24; | ||
var type = data[index]; | ||
// Adjust for the type of element | ||
@@ -890,2 +952,48 @@ index = index + 1; | ||
currentObject[Array.isArray(currentObject) ? parseInt(string_name, 10) : string_name] = value; | ||
} else if(type === BSON.BSON_DATA_CODE) { | ||
// Read the null terminated string (indexof until first 0) | ||
string_end_index = index; | ||
while(data[string_end_index++] !== 0); | ||
string_end_index = string_end_index - 1; | ||
// Fetch the string name | ||
string_name = data.toString('utf8', index, string_end_index); | ||
// Ajust index to point to the end of the string | ||
index = string_end_index + 1; | ||
var string_size = data[index] | data[index + 1] << 8 | data[index + 2] << 16 | data[index + 3] << 24; | ||
index = index + 4; | ||
// Establish start and end of code string | ||
var codeStartIndex = index; | ||
var codeEndIndex = index + string_size - 1; | ||
// Read the string + terminating null | ||
var code_string = data.toString('utf8', codeStartIndex, codeEndIndex); | ||
// Adjust index passed code string | ||
index = index + string_size; | ||
// Final value | ||
var value = null; | ||
// If we are evaluating the functions | ||
if(evalFunctions) { | ||
// If we have cache enabled let's look for the md5 of the function in the cache | ||
if(cacheFunctions) { | ||
var hash = cacheFunctionsCrc32 ? crc32(data, codeStartIndex, codeEndIndex) : code_string; | ||
// Check for cache hit, eval if missing and return cached function | ||
if(functionCache[hash] == null) { | ||
eval("value = " + code_string); | ||
functionCache[hash] = value; | ||
} | ||
value = functionCache[hash]; | ||
} else { | ||
eval("value = " + code_string); | ||
} | ||
} else { | ||
value = new Code(code_string, {}); | ||
} | ||
// Set object property | ||
currentObject[Array.isArray(currentObject) ? parseInt(string_name, 10) : string_name] = value; | ||
} else if(type === BSON.BSON_DATA_CODE_W_SCOPE) { | ||
@@ -941,2 +1049,3 @@ // Read the null terminated string (indexof until first 0) | ||
var value; | ||
if (type === BSON.BSON_DATA_LONG) { | ||
@@ -1101,2 +1210,6 @@ value = new Long(low_bits, high_bits); | ||
Symbol.prototype.inspect = function() { | ||
return this.value; | ||
} | ||
/** | ||
@@ -1103,0 +1216,0 @@ * DBRef constructor. |
@@ -22,6 +22,2 @@ var net = require('net'), | ||
// Status messages | ||
this.sizeOfMessage = 0; | ||
this.bytesRead = 0; | ||
this.buffer = ''; | ||
this.stubBuffer = ''; | ||
this.connected = false; | ||
@@ -42,14 +38,10 @@ this.closedConnectionCount = 0; | ||
// Setup the connection pool | ||
var setupConnectionPool = function(self, poolSize, reconnect) { | ||
// Pool off connections and status variables | ||
var connectionPool = []; | ||
var connectedTo = 0; | ||
var errors = 0; | ||
var connectionError = null; | ||
// Receive listener curry function | ||
// Make it easy to test the logic for the parsing | ||
// It's on purpose :) | ||
var receiveListenerCreator = exports.Connection._receiveListenerCreator = function(self) { | ||
// Return receive Listener | ||
return function(result, fd) { | ||
fd = fd == null ? this.fd : fd; | ||
// Receive listener | ||
var receiveListener = function(result, fd) { | ||
fd = fd == null ? this.fd : fd; | ||
// Fetch the pool reference | ||
@@ -61,3 +53,3 @@ var conObj = self.poolByReference[fd]; | ||
// Calculate remaing bytes to fetch | ||
var remainingBytes = conObj.sizeOfMessage - conObj.bytesRead; | ||
var remainingBytes = conObj.sizeOfMessage - conObj.bytesRead; | ||
// Check if we have multiple packet messages and save the pieces otherwise emit the message | ||
@@ -84,3 +76,3 @@ if(remainingBytes > result.length) { | ||
if(remainingBytes < result.length) { | ||
receiveListener(result.slice(remainingBytes, result.length), fd); | ||
arguments.callee(result.slice(remainingBytes, result.length), fd); | ||
} | ||
@@ -106,3 +98,3 @@ } | ||
var buffer = new Buffer(conObj.buffer.length + result.length); | ||
conObj.buffer.copy(buffer, 0, 0, self.buffer.length); | ||
conObj.buffer.copy(buffer, 0, 0, conObj.buffer.length); | ||
result.copy(buffer, conObj.buffer.length, 0, result.length); | ||
@@ -116,3 +108,3 @@ conObj.buffer = buffer; | ||
self.emit("data", result.slice(0, sizeOfMessage)); | ||
receiveListener(result.slice(sizeOfMessage, result.length), fd); | ||
arguments.callee(result.slice(sizeOfMessage, result.length), fd); | ||
} | ||
@@ -123,4 +115,16 @@ } else { | ||
} | ||
}; | ||
}; | ||
} | ||
// Setup the connection pool | ||
var setupConnectionPool = function(self, poolSize, reconnect) { | ||
// Pool off connections and status variables | ||
var connectionPool = []; | ||
var connectedTo = 0; | ||
var errors = 0; | ||
var connectionError = null; | ||
// Receive listener | ||
var receiveListener = receiveListenerCreator(self); | ||
// Fill the pool | ||
@@ -164,3 +168,3 @@ for(var i = 0; i < poolSize; i++) { | ||
"buffer": new Buffer(0), | ||
"stubBuffer": ''}); | ||
"stubBuffer": new Buffer(0)}); | ||
@@ -198,3 +202,2 @@ // Add the listener to the connection | ||
} else { | ||
// process.nextTick(waitForConnections); | ||
setTimeout(waitForConnections, 100); | ||
@@ -201,0 +204,0 @@ } |
@@ -195,3 +195,4 @@ var Connection = require('../connection').Connection, | ||
// that the driver has not explicitly added | ||
for(var i in replicas) { | ||
// for(var i in replicas) { | ||
for(var i = 0; i < replicas.length; i++) { | ||
var replica = replicas[i]; | ||
@@ -198,0 +199,0 @@ // Make sure we don't have duplicate entries |
@@ -266,2 +266,6 @@ // Licensed under the Apache License, Version 2.0 (the "License"); | ||
exports.Long.prototype.inspect = function() { | ||
return this.toString(); | ||
} | ||
/** | ||
@@ -268,0 +272,0 @@ * @param {number} opt_radix The radix in which the text should be written. |
@@ -64,3 +64,3 @@ /** | ||
// Set default chunk size | ||
this.internalChunkSize = Chunk.DEFAULT_CHUNK_SIZE; | ||
this.internalChunkSize = this.options['chunkSize'] == null ? Chunk.DEFAULT_CHUNK_SIZE : this.options['chunkSize']; | ||
@@ -106,2 +106,7 @@ /** | ||
GridStore.prototype.open = function(callback) { | ||
if( this.mode != "w" && this.mode != "w+" && this.mode != "r"){ | ||
callback(new Error("Illegal mode " + this.mode), null); | ||
return; | ||
} | ||
var self = this; | ||
@@ -119,3 +124,3 @@ | ||
chunkCollection.ensureIndex([['files_id', 1], ['n', 1]], function(err, index) { | ||
self._open(callback); | ||
_open(self, callback); | ||
}); | ||
@@ -126,9 +131,8 @@ }); | ||
} else { | ||
self._open(callback); | ||
_open(self, callback); | ||
} | ||
} | ||
GridStore.prototype._open = function(callback) { | ||
var self = this; | ||
// Hidding the _open function | ||
var _open = function(self, callback) { | ||
self.collection(function(err, collection) { | ||
@@ -145,69 +149,31 @@ if(err!==null) { | ||
// Fetch the chunks | ||
self.chunkCollection(function(err, chunkCollection) { | ||
if(query != null) { | ||
collection.find(query, function(err, cursor) { | ||
// Fetch the file | ||
cursor.nextObject(function(err, doc) { | ||
// Chek if the collection for the files exists otherwise prepare the new one | ||
if(doc != null) { | ||
self.fileId = doc._id; | ||
self.contentType = doc.contentType; | ||
self.internalChunkSize = doc.chunkSize; | ||
self.uploadDate = doc.uploadDate; | ||
self.aliases = doc.aliases; | ||
self.length = doc.length; | ||
self.metadata = doc.metadata; | ||
self.internalMd5 = doc.md5; | ||
} else { | ||
self.fileId = self.fileId instanceof self.db.bson_serializer.ObjectID ? self.fileId : new self.db.bson_serializer.ObjectID(); | ||
self.contentType = exports.GridStore.DEFAULT_CONTENT_TYPE; | ||
self.internalChunkSize = self.internalChunkSize == null ? Chunk.DEFAULT_CHUNK_SIZE : self.internalChunkSize; | ||
self.length = 0; | ||
} | ||
if(query != null) { | ||
collection.find(query, function(err, cursor) { | ||
// Fetch the file | ||
cursor.nextObject(function(err, doc) { | ||
// Chek if the collection for the files exists otherwise prepare the new one | ||
if(doc != null) { | ||
self.fileId = doc._id; | ||
self.contentType = doc.contentType; | ||
self.internalChunkSize = doc.chunkSize; | ||
self.uploadDate = doc.uploadDate; | ||
self.aliases = doc.aliases; | ||
self.length = doc.length; | ||
self.metadata = doc.metadata; | ||
self.internalMd5 = doc.md5; | ||
} else { | ||
self.fileId = self.fileId instanceof self.db.bson_serializer.ObjectID ? self.fileId : new self.db.bson_serializer.ObjectID(); | ||
self.contentType = exports.GridStore.DEFAULT_CONTENT_TYPE; | ||
self.internalChunkSize = self.internalChunkSize == null ? Chunk.DEFAULT_CHUNK_SIZE : self.internalChunkSize; | ||
self.length = 0; | ||
} | ||
// Process the mode of the object | ||
if(self.mode == "r") { | ||
self.nthChunk(0, function(err, chunk) { | ||
self.currentChunk = chunk; | ||
self.position = 0; | ||
callback(null, self); | ||
}); | ||
} else if(self.mode == "w") { | ||
self.chunkCollection(function(err, collection2) { | ||
// Delete any existing chunks | ||
self.deleteChunks(function(err, result) { | ||
self.currentChunk = new Chunk(self, {'n':0}); | ||
self.contentType = self.options['content_type'] == null ? self.contentType : self.options['content_type']; | ||
self.internalChunkSize = self.options['chunk_size'] == null ? self.internalChunkSize : self.options['chunk_size']; | ||
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata']; | ||
self.position = 0; | ||
callback(null, self); | ||
}); | ||
}); | ||
} else if(self.mode == "w+") { | ||
self.chunkCollection(function(err, collection) { | ||
self.nthChunk(self.lastChunkNumber(), function(err, chunk) { | ||
// Set the current chunk | ||
self.currentChunk = chunk == null ? new Chunk(self, {'n':0}) : chunk; | ||
self.currentChunk.position = self.currentChunk.data.length(); | ||
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata']; | ||
self.position = self.length; | ||
callback(null, self); | ||
}); | ||
}); | ||
} else { | ||
callback(new Error("Illegal mode " + self.mode), null); | ||
} | ||
}); | ||
}); | ||
} else { | ||
// Write only mode | ||
self.fileId = new self.db.bson_serializer.ObjectID(); | ||
self.contentType = exports.GridStore.DEFAULT_CONTENT_TYPE; | ||
self.internalChunkSize = self.internalChunkSize == null ? Chunk.DEFAULT_CHUNK_SIZE : self.internalChunkSize; | ||
self.length = 0; | ||
// No file exists set up write mode | ||
if(self.mode == "w") { | ||
self.chunkCollection(function(err, collection2) { | ||
// Process the mode of the object | ||
if(self.mode == "r") { | ||
self.nthChunk(0, function(err, chunk) { | ||
self.currentChunk = chunk; | ||
self.position = 0; | ||
callback(null, self); | ||
}); | ||
} else if(self.mode == "w") { | ||
// Delete any existing chunks | ||
@@ -222,5 +188,3 @@ self.deleteChunks(function(err, result) { | ||
}); | ||
}); | ||
} else if(self.mode == "w+") { | ||
self.chunkCollection(function(err, collection) { | ||
} else if(self.mode == "w+") { | ||
self.nthChunk(self.lastChunkNumber(), function(err, chunk) { | ||
@@ -233,9 +197,37 @@ // Set the current chunk | ||
callback(null, self); | ||
}); | ||
}); | ||
} else { | ||
callback(new Error("Illegal mode " + self.mode), null); | ||
} | ||
} | ||
}); | ||
}); | ||
} | ||
}); | ||
}); | ||
} else { | ||
// Write only mode | ||
self.fileId = new self.db.bson_serializer.ObjectID(); | ||
self.contentType = exports.GridStore.DEFAULT_CONTENT_TYPE; | ||
self.internalChunkSize = self.internalChunkSize == null ? Chunk.DEFAULT_CHUNK_SIZE : self.internalChunkSize; | ||
self.length = 0; | ||
self.chunkCollection(function(err, collection2) { | ||
// No file exists set up write mode | ||
if(self.mode == "w") { | ||
// Delete any existing chunks | ||
self.deleteChunks(function(err, result) { | ||
self.currentChunk = new Chunk(self, {'n':0}); | ||
self.contentType = self.options['content_type'] == null ? self.contentType : self.options['content_type']; | ||
self.internalChunkSize = self.options['chunk_size'] == null ? self.internalChunkSize : self.options['chunk_size']; | ||
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata']; | ||
self.position = 0; | ||
callback(null, self); | ||
}); | ||
} else if(self.mode == "w+") { | ||
self.nthChunk(self.lastChunkNumber(), function(err, chunk) { | ||
// Set the current chunk | ||
self.currentChunk = chunk == null ? new Chunk(self, {'n':0}) : chunk; | ||
self.currentChunk.position = self.currentChunk.data.length(); | ||
self.metadata = self.options['metadata'] == null ? self.metadata : self.options['metadata']; | ||
self.position = self.length; | ||
callback(null, self); | ||
}); | ||
} | ||
}); | ||
}; | ||
}); | ||
@@ -1165,2 +1157,3 @@ }; | ||
var data = gstore.currentChunk.readSlice(toRead); | ||
if (data != null) { | ||
@@ -1167,0 +1160,0 @@ self.completedLength += data.length; |
try { | ||
exports.BSONPure = require('./bson/bson'); | ||
exports.BSONNative = require('../../external-libs/bson/bson'); | ||
exports.BSONNative = require('../../external-libs/bson'); | ||
} catch(err) { | ||
@@ -6,0 +6,0 @@ // do nothing |
{ "name" : "mongodb" | ||
, "description" : "A node.js driver for MongoDB" | ||
, "keywords" : ["mongodb", "mongo", "driver", "db"] | ||
, "version" : "0.9.6-18" | ||
, "version" : "0.9.6-19" | ||
, "author" : "Christian Amor Kvalheim <christkv@gmail.com>" | ||
@@ -6,0 +6,0 @@ , "contributors" : [ "Aaron Heckmann", |
@@ -317,4 +317,2 @@ Install | ||
2. You can only findAndModify a single item, not multiple items. | ||
3. The callback does not get an error when the item doesn't exist, just | ||
an `undefined` object. | ||
@@ -321,0 +319,0 @@ Signature: |
@@ -79,3 +79,3 @@ var mongodb = process.env['TEST_NATIVE'] != null ? require('../../lib/mongodb').native() : require('../../lib/mongodb').pure(); | ||
}, | ||
'Should Correctly Deserialize object with all types' : function(test) { | ||
@@ -112,5 +112,5 @@ var bytes = [26,1,0,0,7,95,105,100,0,161,190,98,75,118,169,3,0,0,3,0,0,4,97,114,114,97,121,0,26,0,0,0,16,48,0,1,0,0,0,16,49,0,2,0,0,0,16,50,0,3,0,0,0,0,2,115,116,114,105,110,103,0,6,0,0,0,104,101,108,108,111,0,3,104,97,115,104,0,19,0,0,0,16,97,0,1,0,0,0,16,98,0,2,0,0,0,0,9,100,97,116,101,0,161,190,98,75,0,0,0,0,7,111,105,100,0,161,190,98,75,90,217,18,0,0,1,0,0,5,98,105,110,97,114,121,0,7,0,0,0,2,3,0,0,0,49,50,51,16,105,110,116,0,42,0,0,0,1,102,108,111,97,116,0,223,224,11,147,169,170,64,64,11,114,101,103,101,120,112,0,102,111,111,98,97,114,0,105,0,8,98,111,111,108,101,97,110,0,1,15,119,104,101,114,101,0,25,0,0,0,12,0,0,0,116,104,105,115,46,120,32,61,61,32,51,0,5,0,0,0,0,3,100,98,114,101,102,0,37,0,0,0,2,36,114,101,102,0,5,0,0,0,116,101,115,116,0,7,36,105,100,0,161,190,98,75,2,180,1,0,0,2,0,0,0,10,110,117,108,108,0,0]; | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_string)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_string, false, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_string, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
test.deepEqual(test_string, BSONDE.BSON.deserialize(serialized_data)); | ||
@@ -120,2 +120,13 @@ test.done(); | ||
'Should Serialize and Deserialize Empty String' : function(test) { | ||
var test_string = {hello: ''}; | ||
var serialized_data = BSONSE.BSON.serialize(test_string, false, true); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_string)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_string, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
test.deepEqual(test_string, BSONDE.BSON.deserialize(serialized_data)); | ||
test.done(); | ||
}, | ||
'Should Correctly Serialize and Deserialize Integer' : function(test) { | ||
@@ -126,4 +137,4 @@ var test_number = {doc: 5}; | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_number)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_number, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_number, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -139,4 +150,4 @@ test.deepEqual(test_number, BSONDE.BSON.deserialize(serialized_data)); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_null)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_null, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_null, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -153,4 +164,4 @@ var object = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_number)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_number, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_number, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -166,4 +177,4 @@ test.deepEqual(test_number, BSONDE.BSON.deserialize(serialized_data)); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_int)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
test.deepEqual(test_int.doc, BSONDE.BSON.deserialize(serialized_data).doc); | ||
@@ -175,4 +186,4 @@ | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_int)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
test.deepEqual(test_int.doc, BSONDE.BSON.deserialize(serialized_data).doc); | ||
@@ -184,4 +195,4 @@ | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_int)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
test.deepEqual(test_int.doc, BSONDE.BSON.deserialize(serialized_data).doc); | ||
@@ -193,4 +204,4 @@ | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_int)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
test.deepEqual(test_int.doc, BSONDE.BSON.deserialize(serialized_data).doc); | ||
@@ -205,4 +216,4 @@ test.done(); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -220,4 +231,4 @@ test.deepEqual(doc.doc.age, BSONDE.BSON.deserialize(serialized_data).doc.age); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -238,4 +249,4 @@ var deserialized = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -255,4 +266,4 @@ var deserialized = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -268,4 +279,4 @@ test.deepEqual(doc.doc.doc, BSONDE.BSON.deserialize(serialized_data).doc.doc); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -289,4 +300,4 @@ test.equal(doc.doc, BSONDE.BSON.deserialize(serialized_data).doc); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -316,4 +327,4 @@ test.equal(doc.date, BSONDE.BSON.deserialize(serialized_data).doc.date); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -329,4 +340,4 @@ test.done(); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -342,4 +353,4 @@ test.deepEqual(doc, BSONDE.BSON.deserialize(serialized_data)); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -355,4 +366,4 @@ test.deepEqual(doc, BSONDE.BSON.deserialize(serialized_data)); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -373,4 +384,4 @@ var decoded_hash = BSONDE.BSON.deserialize(serialized_data).doc; | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -393,4 +404,4 @@ var doc2 = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -410,4 +421,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -425,4 +436,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -441,4 +452,4 @@ var doc2 = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -457,4 +468,4 @@ var doc2 = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -471,4 +482,4 @@ var doc2 = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -496,4 +507,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -521,13 +532,18 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data = BSONSE.BSON.serialize(doc, false, true); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
test.deepEqual(doc.doc, deserialized_data.doc); | ||
}; | ||
var long1 = require('../../lib/mongodb').pure().Long.fromNumber(Math.pow(2,53)) | ||
.add(require('../../lib/mongodb').pure().Long.ONE); | ||
var long2 = require('../../lib/mongodb').pure().Long.fromNumber(-Math.pow(2,53)) | ||
.subtract(require('../../lib/mongodb').pure().Long.ONE); | ||
roundTrip(Long.fromNumber(Math.pow(2,53)).add(Long.ONE)); | ||
roundTrip(Long.fromNumber(-Math.pow(2,53)).subtract(Long.ONE)); | ||
roundTrip(long1); | ||
roundTrip(long2); | ||
test.done(); | ||
@@ -548,4 +564,4 @@ }, | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(test_int)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(test_int, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -562,4 +578,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(hash)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(hash, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(hash, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -589,4 +605,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
@@ -604,4 +620,4 @@ | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -622,4 +638,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -638,4 +654,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -652,4 +668,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -666,4 +682,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -680,4 +696,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -692,12 +708,13 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
if(Symbol != null) { | ||
var doc = { b: [ new Symbol('test') ], _id: new BSONSE.ObjectID() }; | ||
var doc = { b: [ new Symbol('test') ]}; | ||
var serialized_data = BSONSE.BSON.serialize(doc, false, true); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
test.deepEqual(doc.b, deserialized_data.b) | ||
test.deepEqual(doc, deserialized_data); | ||
test.deepEqual(doc, deserialized_data); | ||
test.ok(deserialized_data.b[0] instanceof Symbol); | ||
} | ||
@@ -713,4 +730,4 @@ | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -775,4 +792,4 @@ var deserialized_data = BSONDE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -803,4 +820,4 @@ var serialized_data2 = BSONDE.BSON.serialize(doc2, false, true); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -832,4 +849,4 @@ var doc2 = doc; | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -846,4 +863,4 @@ var serialized_data2 = BSONDE.BSON.serialize(doc2, false, true); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -865,4 +882,4 @@ var serialized_data2 = BSONDE.BSON.serialize(doc, false, true); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -888,4 +905,4 @@ var doc2 = BSONSE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -911,4 +928,4 @@ var doc2 = BSONSE.BSON.deserialize(serialized_data); | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -942,3 +959,3 @@ var doc2 = BSONSE.BSON.deserialize(serialized_data); | ||
}, | ||
'Should Correctly handle Forced Doubles to ensure we allocate enough space for cap collections' : function(test) { | ||
@@ -953,4 +970,4 @@ if(Double != null) { | ||
var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2); | ||
assertBuffersEqual(test, serialized_data, serialized_data2); | ||
BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
@@ -968,3 +985,3 @@ var doc2 = BSONSE.BSON.deserialize(serialized_data); | ||
var binaryData = new Buffer(hexStringToBinary(data)); | ||
var doc2 = new MongoReply(parent, binaryData); | ||
@@ -975,7 +992,49 @@ test.deepEqual([], doc2.documents); | ||
noGlobalsLeaked : function(test) { | ||
var leaks = gleak.detectNew(); | ||
test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', ')); | ||
'Should Correctly Function' : function(test) { | ||
// var doc = {b:1, func:function() { | ||
// this.b = 2; | ||
// }}; | ||
// | ||
// var serialized_data = BSONSE.BSON.serialize(doc, false, true); | ||
// | ||
// debug("----------------------------------------------------------------------") | ||
// debug(inspect(serialized_data)) | ||
// | ||
// // var serialized_data2 = new Buffer(BSONSE.BSON.calculateObjectSize(doc)); | ||
// // BSONSE.BSON.serializeWithBufferAndIndex(doc, false, serialized_data2, 0); | ||
// // assertBuffersEqual(test, serialized_data, serialized_data2, 0); | ||
// var COUNT = 10000; | ||
// | ||
// console.log(COUNT + "x (objectBSON = BSON.serialize(object))") | ||
// start = new Date | ||
// | ||
// for (i=COUNT; --i>=0; ) { | ||
// var doc2 = BSONSE.BSON.deserialize(serialized_data, {evalFunctions: true, cacheFunctions:true, cacheFunctionsCrc32:true}); | ||
// } | ||
// | ||
// end = new Date | ||
// console.log("time = ", end - start, "ms -", COUNT * 1000 / (end - start), " ops/sec") | ||
// | ||
// debug(inspect(BSONSE.BSON.functionCache)) | ||
// | ||
// var doc2 = BSONSE.BSON.deserialize(serialized_data, {evalFunctions: true, cacheFunctions:true}); | ||
// // test.deepEqual(doc, doc2) | ||
// // | ||
// debug(inspect(doc2)) | ||
// doc2.func() | ||
// debug(inspect(doc2)) | ||
// | ||
// var key = "0" | ||
// for(var i = 1; i < 10000; i++) { | ||
// key = key + " " + i | ||
// } | ||
test.done(); | ||
} | ||
}, | ||
// noGlobalsLeaked : function(test) { | ||
// var leaks = gleak.detectNew(); | ||
// test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', ')); | ||
// test.done(); | ||
// } | ||
}); | ||
@@ -982,0 +1041,0 @@ |
@@ -19,2 +19,3 @@ var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure(); | ||
var MONGODB = 'integration_tests'; | ||
var client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4}), {native_parser: (process.env['TEST_NATIVE'] != null)}); | ||
@@ -39,42 +40,45 @@ function connectionTester(test, testName, callback) { | ||
var console = require('console'); | ||
exports.testCloseNoCallback = function(test) { | ||
var db = new Db(MONGODB, | ||
new Server("127.0.0.1", 27017, | ||
{auto_reconnect: true, poolSize: 4}), | ||
{native_parser: (process.env['TEST_NATIVE'] != null)}); | ||
db.open(connectionTester(test, 'testCloseNoCallback', function() { | ||
var dbCloseCount = 0, connectionCloseCount = 0, poolCloseCount = 0; | ||
db.on('close', function() { ++dbCloseCount; }); | ||
var connection = db.serverConfig.connection; | ||
connection.on('close', function() { ++connectionCloseCount; }); | ||
connection.pool.forEach(function(poolMember) { | ||
poolMember.connection.on('close', function() { ++poolCloseCount; }); | ||
// Define the tests, we want them to run as a nested test so we only clean up the | ||
// db connection once | ||
var tests = testCase({ | ||
setUp: function(callback) { | ||
client.open(function(err, db_p) { | ||
if(numberOfTestsRun == Object.keys(tests).length) { | ||
// If first test drop the db | ||
client.dropDatabase(function(err, done) { | ||
callback(); | ||
}); | ||
} else { | ||
return callback(); | ||
} | ||
}); | ||
db.close(); | ||
setTimeout(function() { | ||
test.equal(dbCloseCount, 1); | ||
test.equal(connectionCloseCount, 1); | ||
test.equal(poolCloseCount, 4); | ||
test.done(); | ||
}, 250); | ||
})); | ||
}; | ||
exports.testCloseWithCallback = function(test) { | ||
var db = new Db(MONGODB, | ||
new Server("127.0.0.1", 27017, | ||
{auto_reconnect: true, poolSize: 4}), | ||
{native_parser: (process.env['TEST_NATIVE'] != null)}); | ||
db.open(connectionTester(test, 'testCloseWithCallback', function() { | ||
var dbCloseCount = 0, connectionCloseCount = 0, poolCloseCount = 0; | ||
db.on('close', function() { ++dbCloseCount; }); | ||
var connection = db.serverConfig.connection; | ||
connection.on('close', function() { ++connectionCloseCount; }); | ||
connection.pool.forEach(function(poolMember) { | ||
poolMember.connection.on('close', function() { ++poolCloseCount; }); | ||
}); | ||
db.close(function() { | ||
// Let all events fire. | ||
process.nextTick(function() { | ||
}, | ||
tearDown: function(callback) { | ||
numberOfTestsRun = numberOfTestsRun - 1; | ||
// Drop the database and close it | ||
if(numberOfTestsRun <= 0) { | ||
// client.dropDatabase(function(err, done) { | ||
client.close(); | ||
callback(); | ||
// }); | ||
} else { | ||
client.close(); | ||
callback(); | ||
} | ||
}, | ||
testCloseNoCallback : function(test) { | ||
var db = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4}), | ||
{native_parser: (process.env['TEST_NATIVE'] != null)}); | ||
db.open(connectionTester(test, 'testCloseNoCallback', function() { | ||
var dbCloseCount = 0, connectionCloseCount = 0, poolCloseCount = 0; | ||
db.on('close', function() { ++dbCloseCount; }); | ||
var connection = db.serverConfig.connection; | ||
connection.on('close', function() { ++connectionCloseCount; }); | ||
connection.pool.forEach(function(poolMember) { | ||
poolMember.connection.on('close', function() { ++poolCloseCount; }); | ||
}); | ||
db.close(); | ||
setTimeout(function() { | ||
test.equal(dbCloseCount, 1); | ||
@@ -84,12 +88,40 @@ test.equal(connectionCloseCount, 1); | ||
test.done(); | ||
}, 250); | ||
})); | ||
}, | ||
testCloseWithCallback : function(test) { | ||
var db = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4}), | ||
{native_parser: (process.env['TEST_NATIVE'] != null)}); | ||
db.open(connectionTester(test, 'testCloseWithCallback', function() { | ||
var dbCloseCount = 0, connectionCloseCount = 0, poolCloseCount = 0; | ||
db.on('close', function() { ++dbCloseCount; }); | ||
var connection = db.serverConfig.connection; | ||
connection.on('close', function() { ++connectionCloseCount; }); | ||
connection.pool.forEach(function(poolMember) { | ||
poolMember.connection.on('close', function() { ++poolCloseCount; }); | ||
}); | ||
}); | ||
})); | ||
}; | ||
// run this last | ||
exports.noGlobalsLeaked = function(test) { | ||
var leaks = gleak.detectNew(); | ||
test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', ')); | ||
test.done(); | ||
} | ||
db.close(function() { | ||
// Let all events fire. | ||
process.nextTick(function() { | ||
test.equal(dbCloseCount, 1); | ||
test.equal(connectionCloseCount, 1); | ||
test.equal(poolCloseCount, 4); | ||
test.done(); | ||
}); | ||
}); | ||
})); | ||
}, | ||
noGlobalsLeaked : function(test) { | ||
var leaks = gleak.detectNew(); | ||
test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', ')); | ||
test.done(); | ||
} | ||
}); | ||
// Stupid freaking workaround due to there being no way to run setup once for each suite | ||
var numberOfTestsRun = Object.keys(tests).length; | ||
// Assign out tests | ||
module.exports = tests; |
@@ -102,3 +102,3 @@ var mongodb = process.env['TEST_NATIVE'] != null ? require('../../lib/mongodb').native() : require('../../lib/mongodb').pure(); | ||
}, | ||
shouldCorrectlyWriteLargeFileBufferAndReadBack : function(test) { | ||
@@ -109,3 +109,3 @@ var db = client; | ||
gridStore.chunkSize = 5000; | ||
gridStore.open(function(err, gridStore) { | ||
@@ -119,3 +119,3 @@ Step( | ||
} | ||
for(var i = 0; i < 15000; i += 5000) { | ||
@@ -125,3 +125,3 @@ gridStore.write(d, false, group()); | ||
}, | ||
function readAsStream() { | ||
@@ -131,7 +131,7 @@ gridStore.close(function(err, result) { | ||
var endLen = 0; | ||
var gridStore = new GridStore(db, fileId, "r"); | ||
gridStore.open(function(err, gridStore) { | ||
var stream = gridStore.stream(true); | ||
stream.on("data", function(chunk) { | ||
@@ -144,7 +144,7 @@ endLen += chunk.length | ||
}); | ||
stream.on("end", function() { | ||
gotEnd = true; | ||
}); | ||
stream.on("close", function() { | ||
@@ -161,3 +161,3 @@ test.equal(15000, endLen); | ||
}, | ||
shouldCorrectlyReadFileUsingStream : function(test) { | ||
@@ -192,3 +192,62 @@ var gridStoreR = new GridStore(client, "test_gs_read_stream", "r"); | ||
}, | ||
'Should return same data for streaming as for direct read' : function(test) { | ||
var gridStoreR = new GridStore(client, "test_gs_read_stream", "r"); | ||
var gridStoreW = new GridStore(client, "test_gs_read_stream", "w", {chunkSize:56}); | ||
// var data = fs.readFileSync("./test/gridstore/test_gs_weird_bug.png"); | ||
var data = new Buffer(100); | ||
for(var i = 0; i < 100; i++) { | ||
data[i] = i; | ||
} | ||
var readLen = 0; | ||
var gotEnd = 0; | ||
gridStoreW.open(function(err, gs) { | ||
gs.write(data, function(err, gs) { | ||
gs.close(function(err, result) { | ||
gridStoreR.open(function(err, gs) { | ||
var chunks = []; | ||
var stream = gs.stream(true); | ||
stream.on("data", function(chunk) { | ||
readLen += chunk.length; | ||
chunks.push(chunk); | ||
}); | ||
stream.on("end", function() { | ||
++gotEnd; | ||
}); | ||
stream.on("close", function() { | ||
test.equal(data.length, readLen); | ||
test.equal(1, gotEnd); | ||
// Read entire file in one go and compare | ||
var gridStoreRead = new GridStore(client, "test_gs_read_stream", "r"); | ||
gridStoreRead.open(function(err, gs) { | ||
gridStoreRead.readBuffer(function(err, data2) { | ||
// Put together all the chunks | ||
var streamData = new Buffer(data.length); | ||
var index = 0; | ||
for(var i = 0; i < chunks.length; i++) { | ||
chunks[i].copy(streamData, index, 0); | ||
index = index + chunks[i].length; | ||
} | ||
// Compare data | ||
for(var i = 0; i < data.length; i++) { | ||
// debug(" i = " + i) | ||
test.equal(data2[i], data[i]) | ||
test.equal(streamData[i], data[i]) | ||
} | ||
test.done(); | ||
}) | ||
}) | ||
}); | ||
}); | ||
}); | ||
}); | ||
}); | ||
}, | ||
noGlobalsLeaked : function(test) { | ||
@@ -195,0 +254,0 @@ var leaks = gleak.detectNew(); |
@@ -508,8 +508,8 @@ var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure(); | ||
client.createCollection('test_should_throw_error_if_serializing_function', function(err, collection) { | ||
var func = function() { return 1}; | ||
// Insert the update | ||
collection.insert({i:1, z:function() { return 1} }, {safe:true}, function(err, result) { | ||
collection.insert({i:1, z:func }, {safe:true}, function(err, result) { | ||
collection.findOne({_id:result[0]._id}, function(err, object) { | ||
test.equal(null, object.z); | ||
test.equal(1, object.i); | ||
test.equal(func.toString(), object.z.code); | ||
test.equal(1, object.i); | ||
test.done(); | ||
@@ -618,3 +618,3 @@ }) | ||
}, | ||
shouldCorrectlyExecuteMultipleFetches : function(test) { | ||
@@ -636,6 +636,6 @@ var db = new Db(MONGODB, new Server('localhost', 27017, {auto_reconnect: true}), {native_parser: (process.env['TEST_NATIVE'] != null)}); | ||
test.equal(to, doc.addresses.localPart); | ||
db.close(); | ||
test.done(); | ||
}); | ||
}); | ||
}); | ||
@@ -656,3 +656,3 @@ }); | ||
}, | ||
noGlobalsLeaked : function(test) { | ||
@@ -659,0 +659,0 @@ var leaks = gleak.detectNew(); |
@@ -16,4 +16,8 @@ var nodeunit = require('../deps/nodeunit'), | ||
var directories = [{dir: __dirname + "/../test", path: "/test/"}, | ||
{dir: __dirname + "/../test/gridstore", path: "/test/gridstore/"}]; | ||
{dir: __dirname + "/../test/gridstore", path: "/test/gridstore/"}, | ||
{dir: __dirname + "/../test/bson", path: "/test/bson/"}]; | ||
// var directories = [ | ||
// {dir: __dirname + "/../test/bson", path: "/test/bson/"}]; | ||
// Generate a list of tests | ||
@@ -46,3 +50,2 @@ directories.forEach(function(dirEntry) { | ||
var noReplicaSet = specifedParameter(process.argv, '--noreplicaset', false); | ||
// Basic default test runner | ||
@@ -49,0 +52,0 @@ var runner = nodeunit.reporters.default; |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
1650769
197
26150
398
177
11