simple-oracledb
Advanced tools
Comparing version 0.1.1 to 0.1.2
@@ -12,2 +12,4 @@ ## Classes | ||
<dd></dd> | ||
<dt><a href="#ResultSetReadStream">ResultSetReadStream</a></dt> | ||
<dd></dd> | ||
<dt><a href="#ResultSetReader">ResultSetReader</a></dt> | ||
@@ -93,4 +95,5 @@ <dd></dd> | ||
| [options] | <code>object</code> | | Optional execute options | | ||
| [options.splitResults] | <code>object</code> | | True to enable to split the results into bulks, each bulk will invoke the provided callback (last callback invocation will have empty results) | | ||
| [options.bulkRowsAmount] | <code>number</code> | <code>100</code> | The amount of rows to fetch (for streaming, thats the max rows that the callback will get for each streaming invocation) | | ||
| [options.splitResults] | <code>object</code> | <code>false</code> | True to enable to split the results into bulks, each bulk will invoke the provided callback (last callback invocation will have empty results). See also bulkRowsAmount option. | | ||
| [options.streamResults] | <code>object</code> | <code>false</code> | True to enable to stream the results, the callback will receive a read stream object which can be piped or used with standard stream events (ignored if splitResults=true). | | ||
| [options.bulkRowsAmount] | <code>number</code> | <code>100</code> | The amount of rows to fetch (for splitting results, that is the max rows that the callback will get for each callback invocation) | | ||
| callback | <code>[AsyncCallback](#AsyncCallback)</code> | | Invoked with an error or the query results object holding all data including LOBs | | ||
@@ -110,3 +113,3 @@ | ||
//read all rows in bulks (split results) | ||
//read all rows in bulks (split results via option.splitResults) | ||
connection.query('SELECT * FROM departments WHERE manager_id > :id', [110], { | ||
@@ -124,2 +127,18 @@ splitResults: true, | ||
}); | ||
//stream all rows (options.streamResults) | ||
connection.query('SELECT * FROM departments WHERE manager_id > :id', [110], { | ||
streamResults: true | ||
}, function onResults(error, stream) { | ||
if (error) { | ||
//handle error... | ||
} else { | ||
//listen to fetched rows via data event or just pipe to another handler | ||
stream.on('data', function (row) { | ||
//use row object | ||
}); | ||
//listen to other events such as end/close/error.... | ||
} | ||
}); | ||
``` | ||
@@ -571,2 +590,26 @@ <a name="Connection+insert"></a> | ||
<a name="ResultSetReadStream"></a> | ||
## ResultSetReadStream | ||
**Kind**: global class | ||
**Access:** public | ||
**Author:** Sagie Gur-Ari | ||
* [ResultSetReadStream](#ResultSetReadStream) | ||
* [new ResultSetReadStream(next)](#new_ResultSetReadStream_new) | ||
* [#_read()](#ResultSetReadStream+_read) ℗ | ||
<a name="new_ResultSetReadStream_new"></a> | ||
### new ResultSetReadStream(next) | ||
A node.js read stream for resultsets. | ||
| Param | Type | Description | | ||
| --- | --- | --- | | ||
| next | <code>function</code> | The read next row function | | ||
<a name="ResultSetReadStream+_read"></a> | ||
### ResultSetReadStream#_read() ℗ | ||
The stream _read implementation which fetches the next row from the resultset. | ||
**Access:** private | ||
<a name="ResultSetReader"></a> | ||
@@ -580,6 +623,7 @@ ## ResultSetReader | ||
* [new ResultSetReader()](#new_ResultSetReader_new) | ||
* [#readNextRows(columnNames, resultSet, [options], callback)](#ResultSetReader+readNextRows) ℗ | ||
* [#readNextRows(columnNames, resultSet, [options], callback)](#ResultSetReader+readNextRows) | ||
* [#readAllRows(columnNames, resultSet, options, callback, [jsRowsBuffer])](#ResultSetReader+readAllRows) ℗ | ||
* [#readFully(columnNames, resultSet, options, callback)](#ResultSetReader+readFully) | ||
* [#readBulks(columnNames, resultSet, options, callback)](#ResultSetReader+readBulks) | ||
* [#stream(columnNames, resultSet, callback)](#ResultSetReader+stream) | ||
@@ -591,6 +635,6 @@ <a name="new_ResultSetReader_new"></a> | ||
<a name="ResultSetReader+readNextRows"></a> | ||
### ResultSetReader#readNextRows(columnNames, resultSet, [options], callback) ℗ | ||
### ResultSetReader#readNextRows(columnNames, resultSet, [options], callback) | ||
Reads the next rows data from the provided oracle ResultSet object. | ||
**Access:** private | ||
**Access:** public | ||
@@ -646,2 +690,15 @@ | Param | Type | Default | Description | | ||
<a name="ResultSetReader+stream"></a> | ||
### ResultSetReader#stream(columnNames, resultSet, callback) | ||
Reads all data from the provided oracle ResultSet object to the callback in bulks.<br> | ||
The last callback call will have an empty result. | ||
**Access:** public | ||
| Param | Type | Description | | ||
| --- | --- | --- | | ||
| columnNames | <code>Array</code> | Array of strings holding the column names of the results | | ||
| resultSet | <code>Array</code> | The oracle ResultSet object | | ||
| callback | <code>[AsyncCallback](#AsyncCallback)</code> | called for each read bulk of rows or in case of an error | | ||
<a name="RowsReader"></a> | ||
@@ -648,0 +705,0 @@ ## RowsReader |
@@ -57,4 +57,5 @@ 'use strict'; | ||
* @param {object} [options] - Optional execute options | ||
* @param {object} [options.splitResults] - True to enable to split the results into bulks, each bulk will invoke the provided callback (last callback invocation will have empty results) | ||
* @param {number} [options.bulkRowsAmount=100] - The amount of rows to fetch (for streaming, thats the max rows that the callback will get for each streaming invocation) | ||
* @param {object} [options.splitResults=false] - True to enable to split the results into bulks, each bulk will invoke the provided callback (last callback invocation will have empty results). See also bulkRowsAmount option. | ||
* @param {object} [options.streamResults=false] - True to enable to stream the results, the callback will receive a read stream object which can be piped or used with standard stream events (ignored if splitResults=true). | ||
* @param {number} [options.bulkRowsAmount=100] - The amount of rows to fetch (for splitting results, that is the max rows that the callback will get for each callback invocation) | ||
* @param {AsyncCallback} callback - Invoked with an error or the query results object holding all data including LOBs | ||
@@ -73,3 +74,3 @@ * @example | ||
* | ||
* //read all rows in bulks (split results) | ||
* //read all rows in bulks (split results via option.splitResults) | ||
* connection.query('SELECT * FROM departments WHERE manager_id > :id', [110], { | ||
@@ -87,2 +88,18 @@ * splitResults: true, | ||
* }); | ||
* | ||
* //stream all rows (options.streamResults) | ||
* connection.query('SELECT * FROM departments WHERE manager_id > :id', [110], { | ||
* streamResults: true | ||
* }, function onResults(error, stream) { | ||
* if (error) { | ||
* //handle error... | ||
* } else { | ||
* //listen to fetched rows via data event or just pipe to another handler | ||
* stream.on('data', function (row) { | ||
* //use row object | ||
* }); | ||
* | ||
* //listen to other events such as end/close/error.... | ||
* } | ||
* }); | ||
* ``` | ||
@@ -99,6 +116,8 @@ */ | ||
var splitResults = false; | ||
var streamResults = false; | ||
if (typeof options === 'object') { | ||
splitResults = options.splitResults; | ||
streamResults = options.streamResults; | ||
if (splitResults) { | ||
if (splitResults || streamResults) { | ||
options.resultSet = true; | ||
@@ -114,2 +133,4 @@ } | ||
resultSetReader.readBulks(results.metaData, results.resultSet, options, callback); | ||
} else if (streamResults) { | ||
resultSetReader.stream(results.metaData, results.resultSet, callback); | ||
} else { | ||
@@ -116,0 +137,0 @@ resultSetReader.readFully(results.metaData, results.resultSet, options, callback); |
'use strict'; | ||
var rowsReader = require('./rows-reader'); | ||
var ResultSetReadStream = require('./resultset-read-stream'); | ||
@@ -23,3 +24,3 @@ /*jslint debug: true */ | ||
* @memberof! ResultSetReader | ||
* @private | ||
* @public | ||
* @param {Array} columnNames - Array of strings holding the column names of the results | ||
@@ -118,2 +119,28 @@ * @param {Array} resultSet - The oracle ResultSet object | ||
/** | ||
* Reads all data from the provided oracle ResultSet object to the callback in bulks.<br> | ||
* The last callback call will have an empty result. | ||
* | ||
* @function | ||
* @memberof! ResultSetReader | ||
* @public | ||
* @param {Array} columnNames - Array of strings holding the column names of the results | ||
* @param {Array} resultSet - The oracle ResultSet object | ||
* @param {AsyncCallback} callback - called for each read bulk of rows or in case of an error | ||
*/ | ||
ResultSetReader.prototype.stream = function (columnNames, resultSet, callback) { | ||
var self = this; | ||
var readOptions = { | ||
bulkRowsAmount: 1 | ||
}; | ||
//create new read stream | ||
var readStream = new ResultSetReadStream(function readNextRow(streamCallback) { | ||
self.readNextRows(columnNames, resultSet, readOptions, streamCallback); | ||
}); | ||
callback(null, readStream); | ||
}; | ||
module.exports = new ResultSetReader(); |
{ | ||
"name": "simple-oracledb", | ||
"version": "0.1.1", | ||
"version": "0.1.2", | ||
"description": "Extend capabilities of oracledb with simplified API for quicker development.", | ||
@@ -5,0 +5,0 @@ "author": { |
@@ -202,2 +202,24 @@ # simple-oracledb | ||
In order to stream results into a read stream, you can provide the streamResults = true option.<br> | ||
The callback will be called with a read stream instance which can be used to fetch/pipe the data.<br> | ||
Once all rows are read, the proper stream events will be called. | ||
```js | ||
//stream all rows (options.streamResults) | ||
connection.query('SELECT * FROM departments WHERE manager_id > :id', [110], { | ||
streamResults: true | ||
}, function onResults(error, stream) { | ||
if (error) { | ||
//handle error... | ||
} else { | ||
//listen to fetched rows via data event or just pipe to another handler | ||
stream.on('data', function (row) { | ||
//use row object | ||
}); | ||
//listen to other events such as end/close/error.... | ||
} | ||
}); | ||
``` | ||
<a name="usage-insert"></a> | ||
@@ -373,2 +395,3 @@ ## 'connection.insert(sql, bindVariables, options, callback)' | ||
| ----------- | ------- | ----------- | | ||
| 2015-12-22 | v0.1.2 | Added streaming of query results with new option streamResults=true | | ||
| 2015-12-21 | v0.1.1 | Rename streamResults to splitResults | | ||
@@ -375,0 +398,0 @@ | 2015-12-21 | v0.0.36 | Maintenance | |
@@ -259,3 +259,3 @@ 'use strict'; | ||
it('resultset - split', function (done) { | ||
var table = 'TEST_ORA5'; | ||
var table = 'TEST_ORA6'; | ||
initDB(table, [ | ||
@@ -311,4 +311,55 @@ { | ||
it('resultset - stream', function (done) { | ||
var table = 'TEST_ORA7'; | ||
var dbData = [ | ||
{ | ||
COL1: 'PK1', | ||
COL2: 2, | ||
COL3: 30, | ||
COL4: '123' | ||
}, | ||
{ | ||
COL1: 'PK2', | ||
COL2: 200, | ||
COL3: 30, | ||
COL4: 'SOME TEST HERE' | ||
}, | ||
{ | ||
COL1: 'PK3', | ||
COL2: 5000, | ||
COL3: 1, | ||
COL4: 'MORE DATA HERE!!!', | ||
LOB1: 'THIS IS SOME CLOB TEST TEXT', | ||
LOB2: new Buffer('BLOB - 123456') | ||
} | ||
]; | ||
initDB(table, dbData, function (pool) { | ||
pool.getConnection(function (err, connection) { | ||
assert.isUndefined(err); | ||
connection.query('SELECT * FROM ' + table, [], { | ||
streamResults: true | ||
}, function (error, stream) { | ||
assert.isNull(error); | ||
var eventCounter = 0; | ||
stream.on('data', function (row) { | ||
assert.deepEqual(dbData[eventCounter], row); | ||
eventCounter++; | ||
}); | ||
stream.on('end', function () { | ||
assert.equal(eventCounter, dbData.length); | ||
end(done, connection); | ||
}); | ||
}); | ||
}); | ||
}); | ||
}); | ||
it('rows - lob data', function (done) { | ||
var table = 'TEST_ORA6'; | ||
var table = 'TEST_ORA8'; | ||
initDB(table, [ | ||
@@ -315,0 +366,0 @@ { |
@@ -782,2 +782,208 @@ 'use strict'; | ||
}); | ||
describe('stream tests', function () { | ||
it('empty', function (done) { | ||
ResultSetReader.stream(columnNames, { | ||
getRows: function (number, callback) { | ||
assert.equal(number, 1); | ||
callback(null, []); | ||
} | ||
}, function (error, stream) { | ||
assert.isNull(error); | ||
stream.on('data', function () { | ||
assert.fail(); | ||
}); | ||
stream.on('end', done); | ||
}); | ||
}); | ||
it('array - all types', function (done) { | ||
var date = new Date(); | ||
var lob1 = helper.createCLOB(); | ||
var lob2 = helper.createCLOB(); | ||
var dbData = [ | ||
[ | ||
['first row', 1, false, date] | ||
], | ||
[ | ||
[1, 'test', 50, lob1] | ||
], | ||
[ | ||
['a', date, undefined, null] | ||
], | ||
[ | ||
[10, true, lob2, 100] | ||
] | ||
]; | ||
var dbEvents = [null, function () { | ||
lob1.emit('data', 'test1'); | ||
lob1.emit('data', '\ntest2'); | ||
lob1.emit('end'); | ||
}, function () { | ||
lob2.emit('data', '123'); | ||
lob2.emit('data', '456'); | ||
lob2.emit('end'); | ||
}]; | ||
var resultData = [ | ||
{ | ||
COL1: 'first row', | ||
COL2: 1, | ||
COL3: false, | ||
COL4: date | ||
}, | ||
{ | ||
COL1: 1, | ||
COL2: 'test', | ||
COL3: 50, | ||
COL4: 'test1\ntest2' | ||
}, | ||
{ | ||
COL1: 'a', | ||
COL2: date, | ||
COL3: undefined, | ||
COL4: undefined | ||
}, | ||
{ | ||
COL1: 10, | ||
COL2: true, | ||
COL3: '123456', | ||
COL4: 100 | ||
} | ||
]; | ||
ResultSetReader.stream(columnNames, { | ||
getRows: function (number, callback) { | ||
assert.equal(number, 1); | ||
var events = dbEvents.shift(); | ||
if (events) { | ||
setTimeout(events, 10); | ||
} | ||
callback(null, dbData.shift()); | ||
} | ||
}, function (error, stream) { | ||
assert.isNull(error); | ||
var eventCounter = 0; | ||
stream.on('data', function (row) { | ||
assert.deepEqual(resultData[eventCounter], row); | ||
eventCounter++; | ||
}); | ||
stream.on('end', function () { | ||
assert.equal(eventCounter, resultData.length); | ||
done(); | ||
}); | ||
}); | ||
}); | ||
it('array - error', function (done) { | ||
var date = new Date(); | ||
var lob1 = helper.createCLOB(); | ||
var lob2 = helper.createCLOB(); | ||
var dbData = [ | ||
[ | ||
['first row', 1, false, date] | ||
], | ||
[ | ||
[1, 'test', 50, lob1] | ||
], | ||
[ | ||
['a', date, undefined, null] | ||
], | ||
[ | ||
[10, true, lob2, 100] | ||
] | ||
]; | ||
var dbEvents = [null, function () { | ||
lob1.emit('data', 'test1'); | ||
lob1.emit('data', '\ntest2'); | ||
lob1.emit('end'); | ||
}, function () { | ||
lob2.emit('data', '123'); | ||
lob2.emit('data', '456'); | ||
lob2.emit('error', new Error('lob2 error')); | ||
}]; | ||
var resultData = [ | ||
{ | ||
COL1: 'first row', | ||
COL2: 1, | ||
COL3: false, | ||
COL4: date | ||
}, | ||
{ | ||
COL1: 1, | ||
COL2: 'test', | ||
COL3: 50, | ||
COL4: 'test1\ntest2' | ||
}, | ||
{ | ||
COL1: 'a', | ||
COL2: date, | ||
COL3: undefined, | ||
COL4: undefined | ||
} | ||
]; | ||
ResultSetReader.stream(columnNames, { | ||
getRows: function (number, callback) { | ||
assert.equal(number, 1); | ||
var events = dbEvents.shift(); | ||
if (events) { | ||
setTimeout(events, 10); | ||
} | ||
callback(null, dbData.shift()); | ||
} | ||
}, function (error, stream) { | ||
assert.isNull(error); | ||
var eventCounter = 0; | ||
stream.on('data', function (row) { | ||
assert.deepEqual(resultData[eventCounter], row); | ||
eventCounter++; | ||
}); | ||
stream.on('error', function (streamError) { | ||
assert.equal(eventCounter, resultData.length); | ||
assert.equal(streamError.message, 'lob2 error'); | ||
done(); | ||
}); | ||
}); | ||
}); | ||
it('error getRows', function (done) { | ||
ResultSetReader.stream(columnNames, { | ||
getRows: function (number, callback) { | ||
assert.equal(number, 1); | ||
process.nextTick(function () { | ||
callback(new Error('getrows')); | ||
}); | ||
} | ||
}, function (error, stream) { | ||
assert.isNull(error); | ||
stream.on('error', function (streamError) { | ||
assert.equal(streamError.message, 'getrows'); | ||
done(); | ||
}); | ||
stream.on('data', function () { | ||
assert.fail(); | ||
}); | ||
}); | ||
}); | ||
}); | ||
}); |
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
337382
55
7302
417