Comparing version
361
lib/index.js
@@ -1,359 +0,18 @@ | ||
// Generated by CoffeeScript 1.7.0 | ||
// Generated by CoffeeScript 1.7.1 | ||
var generate, parse, stringify, transform; | ||
/* | ||
generate = require('csv-generate'); | ||
Node CSV | ||
======== | ||
parse = require('csv-parse'); | ||
This project provides CSV parsing and has been tested and used | ||
on large input files. | ||
transform = require('stream-transform'); | ||
* Follow the Node.js streaming API | ||
* Async and event based | ||
* Support delimiters, quotes, escape characters and comments | ||
* Line breaks discovery: detected in source and reported to destination | ||
* Data transformation | ||
* Support for large datasets | ||
* Complete test coverage as sample and inspiration | ||
* no external dependencies | ||
stringify = require('csv-stringify'); | ||
Important: this documentation covers the current version (0.2.x) of | ||
`node-csv-parser`. The documentation for the previous version (0.1.0) is | ||
available [here](https://github.com/wdavidw/node-csv-parser/tree/v0.1). | ||
module.exports.generate = generate; | ||
Installation | ||
------------ | ||
module.exports.parse = parse; | ||
```bash | ||
npm install csv | ||
``` | ||
module.exports.transform = transform; | ||
Quick example | ||
------------- | ||
This take a string with a comment and convert it to an array: | ||
// node samples/string.js | ||
csv() | ||
.from.string( | ||
'#Welcome\n"1","2","3","4"\n"a","b","c","d"', | ||
{comment: '#'} ) | ||
.to.array( function(data){ | ||
console.log(data) | ||
} ); | ||
// [ [ '1', '2', '3', '4' ], [ 'a', 'b', 'c', 'd' ] ] | ||
Advanced example | ||
---------------- | ||
The following example illustrates 4 usages of the library: | ||
1. Plug a readable stream from a file | ||
2. Direct output to a file path | ||
3. Transform each row | ||
4. Listen to events | ||
// node samples/sample.js | ||
var csv = require('csv'); | ||
var fs = require('fs'); | ||
csv() | ||
.from.stream(fs.createReadStream(__dirname+'/sample.in')) | ||
.to.path(__dirname+'/sample.out') | ||
.transform( function(row){ | ||
row.unshift(row.pop()); | ||
return row; | ||
}) | ||
.on('record', function(row,index){ | ||
console.log('#'+index+' '+JSON.stringify(row)); | ||
}) | ||
.on('end', function(count){ | ||
console.log('Number of lines: '+count); | ||
}) | ||
.on('error', function(error){ | ||
console.log(error.message); | ||
}); | ||
// #0 ["2000-01-01","20322051544","1979.0","8.8017226E7","ABC","45"] | ||
// #1 ["2050-11-27","28392898392","1974.0","8.8392926E7","DEF","23"] | ||
// Number of lines: 2 | ||
Pipe example | ||
------------ | ||
The module follow a Stream architecture. At its core, the parser and | ||
the stringifier utilities provide a [Stream Writer][writable_stream] | ||
and a [Stream Reader][readable_stream] implementation available in the CSV API. | ||
+--------+ +----------+----------+ +--------+ | ||
| | | | | | | | ||
| | | CSV | | | | ||
| | | | | | | | ||
| Stream | | Writer | Reader | | Stream | | ||
| Reader |.pipe(| API | API |).pipe(| Writer |) | ||
| | | | | | | | ||
| | | | | | | | ||
+--------+ +----------+----------+ +--------+ | ||
Here's a quick example: | ||
in = fs.createReadStream('./in') | ||
out = fs.createWriteStream('./out') | ||
in.pipe(csv()).pipe(out) | ||
Installing | ||
---------- | ||
Via [npm](http://github.com/isaacs/npm): | ||
```bash | ||
npm install csv | ||
``` | ||
Via git (or downloaded tarball): | ||
```bash | ||
git clone http://github.com/wdavidw/node-csv-parser.git | ||
``` | ||
Events | ||
------ | ||
The library extends Node [EventEmitter][event] class and emit all | ||
the events of the Writable and Readable [Stream API][stream]. Additionally, the useful "records" event | ||
is emitted. | ||
* *record* | ||
Emitted by the stringifier when a new row is parsed and transformed. The data is | ||
the value returned by the user `transform` callback if any. Note however that the event won't | ||
be called if `transform` returns `null` since the record is skipped. | ||
The callback provides two arguments. `row` is the CSV line being processed (an array or an object) | ||
and `index` is the index number of the line starting at zero | ||
* *data* | ||
Emitted by the stringifier on each line once the data has been transformed and stringified. | ||
* *drain* | ||
* *end* | ||
Emitted when the CSV content has been parsed. | ||
* *finish* | ||
Emitted when all data has been flushed to the underlying system. For example, when writting to a file with `csv().to.path()`, the event will be called once the writing process is complete and the file closed. | ||
* *error* | ||
Thrown whenever an error occured. | ||
Architecture | ||
------------ | ||
The code is organized mainly around 5 main components. | ||
The "from" properties provide convenient functions | ||
to write CSV text or to plug a Stream Reader. The "parser" | ||
takes this CSV content and transform it into an array or | ||
an object for each line. The "transformer" provides the ability | ||
to work on each line in a synchronous or asynchronous mode. | ||
The "stringifier" takes an array or an object and serializes it into | ||
CSV text. Finally, the "to" properties provide convenient | ||
functions to retrieve the text or to write to plug a Stream Writer. | ||
+-------+--------+--------------+--------------+-----+ | ||
| | | | | | | ||
| from -> parser -> transformer -> stringifier -> to | | ||
| | | | | | | ||
+-------+--------+--------------+--------------+-----+ | ||
Note, even though the "parser", "transformer" and "singifier" are available | ||
as properties, you won't have to interact with those. | ||
*/ | ||
var CSV, from, options, parser, state, stream, stringifier, to, transformer, utils; | ||
stream = require('stream'); | ||
state = require('./state'); | ||
options = require('./options'); | ||
from = require('./from'); | ||
to = require('./to'); | ||
stringifier = require('./stringifier'); | ||
parser = require('./parser'); | ||
transformer = require('./transformer'); | ||
utils = require('./utils'); | ||
CSV = function() { | ||
var self; | ||
self = this; | ||
this.paused = false; | ||
this.readable = true; | ||
this.writable = true; | ||
this.state = state(); | ||
this.options = options(); | ||
this.from = from(this); | ||
this.to = to(this); | ||
this.parser = parser(this); | ||
this.parser.on('row', function(row) { | ||
return self.transformer.write(row); | ||
}); | ||
this.parser.on('end', function() { | ||
if (self.state.count === 0) { | ||
self.transformer.headers(); | ||
} | ||
return self.transformer.end(); | ||
}); | ||
this.parser.on('error', function(e) { | ||
return self.error(e); | ||
}); | ||
this.stringifier = stringifier(this); | ||
this.transformer = transformer(this); | ||
this.transformer.on('end', function() { | ||
var eof; | ||
eof = self.options.to.eof; | ||
if (eof) { | ||
if (eof === true) { | ||
eof = '\n'; | ||
} | ||
self.stringifier.write(eof); | ||
} | ||
return self.emit('end', self.state.count); | ||
}); | ||
return this; | ||
}; | ||
CSV.prototype.__proto__ = stream.prototype; | ||
/* | ||
`pause()` | ||
--------- | ||
Implementation of the Readable Stream API, requesting that no further data | ||
be sent until resume() is called. | ||
*/ | ||
CSV.prototype.pause = function() { | ||
this.paused = true; | ||
return this; | ||
}; | ||
/* | ||
`resume()` | ||
---------- | ||
Implementation of the Readable Stream API, resuming the incoming 'data' | ||
events after a pause(). | ||
*/ | ||
CSV.prototype.resume = function() { | ||
this.paused = false; | ||
this.emit('drain'); | ||
return this; | ||
}; | ||
/* | ||
`write(data, [preserve])` | ||
------------------------- | ||
Implementation of the Writable Stream API with a larger signature. Data | ||
may be a string, a buffer, an array or an object. | ||
If data is a string or a buffer, it could span multiple lines. If data | ||
is an object or an array, it must represent a single line. | ||
Preserve is for line which are not considered as CSV data. | ||
*/ | ||
CSV.prototype.write = function(chunk, preserve) { | ||
var csv; | ||
if (!this.writable) { | ||
return this.emit('error', new Error('CSV no longer writable')); | ||
} | ||
if (chunk instanceof Buffer) { | ||
chunk = chunk.toString(); | ||
} | ||
if (typeof chunk === 'string' && !preserve) { | ||
this.parser.write(chunk); | ||
} else if (Array.isArray(chunk) && !this.state.transforming) { | ||
csv = this; | ||
this.transformer.write(chunk); | ||
} else { | ||
if (preserve || this.state.transforming) { | ||
this.stringifier.write(chunk); | ||
} else { | ||
this.transformer.write(chunk); | ||
} | ||
} | ||
return !this.paused; | ||
}; | ||
/* | ||
`end()` | ||
------- | ||
Terminate the parsing. Call this method when no more csv data is | ||
to be parsed. It implement the StreamWriter API by setting the `writable` | ||
property to "false" and emitting the `end` event. | ||
*/ | ||
CSV.prototype.end = function() { | ||
if (!this.writable) { | ||
return; | ||
} | ||
this.readable = false; | ||
this.writable = false; | ||
this.parser.end(); | ||
return this; | ||
}; | ||
/* | ||
`transform(callback, [options])` | ||
--------------------- | ||
Register the transformer callback. The callback is a user provided | ||
function call on each line to filter, enrich or modify the | ||
dataset. More information in the "transforming data" section. | ||
*/ | ||
CSV.prototype.transform = function(callback, options) { | ||
this.transformer.callback = callback; | ||
if (options) { | ||
utils.merge(this.transformer.options, options); | ||
} | ||
return this; | ||
}; | ||
/* | ||
`error(error)` | ||
-------------- | ||
Unified mechanism to handle error, emit the error and mark the | ||
stream as non readable and non writable. | ||
*/ | ||
CSV.prototype.error = function(e) { | ||
this.readable = false; | ||
this.writable = false; | ||
this.emit('error', e); | ||
if (this.readStream) { | ||
this.readStream.destroy(); | ||
} | ||
return this; | ||
}; | ||
module.exports = function() { | ||
return new CSV; | ||
}; | ||
/* | ||
[event]: http://nodejs.org/api/events.html | ||
[stream]: http://nodejs.org/api/stream.html | ||
[writable_stream]: http://nodejs.org/api/stream.html#stream_writable_stream | ||
[readable_stream]: http://nodejs.org/api/stream.html#stream_readable_stream | ||
*/ | ||
module.exports.stringify = stringify; |
{ | ||
"name": "csv", | ||
"version": "0.3.7", | ||
"version": "0.4.0", | ||
"description": "CSV parser with simple api, full of options and tested against large datasets.", | ||
@@ -9,16 +9,3 @@ "homepage": "http://www.adaltas.com/projects/node-csv/", | ||
"contributors": [ | ||
"David Worms <david@adaltas.com> (http://www.adaltas.com)", | ||
"Will White (https://github.com/willwhite)", | ||
"Justin Latimer (https://github.com/justinlatimer)", | ||
"jonseymour (https://github.com/jonseymour)", | ||
"pascalopitz (https://github.com/pascalopitz)", | ||
"Josh Pschorr (https://github.com/jpschorr)", | ||
"Elad Ben-Israel (https://github.com/eladb)", | ||
"Philippe Plantier (https://github.com/phipla)", | ||
"Tim Oxley (https://github.com/timoxley)", | ||
"Damon Oehlman (https://github.com/DamonOehlman)", | ||
"Alexandru Topliceanu (https://github.com/topliceanu)", | ||
"Visup (https://github.com/visup)", | ||
"Edmund von der Burg (https://github.com/evdb)", | ||
"Douglas Christopher Wilson (https://github.com/dougwilson)" | ||
"David Worms <david@adaltas.com> (http://www.adaltas.com)" | ||
], | ||
@@ -32,2 +19,3 @@ "licenses": [{ | ||
}, | ||
"main": "./lib", | ||
"keywords": [ | ||
@@ -42,15 +30,11 @@ "node", | ||
}, | ||
"devDependencies": { | ||
"coffee-script": "latest", | ||
"mocha": "latest", | ||
"should": "latest", | ||
"each": "latest", | ||
"mecano": "latest", | ||
"iconv": "latest" | ||
"dependencies": { | ||
"csv-generate": "*", | ||
"csv-parse": "*", | ||
"stream-transform": "*", | ||
"csv-stringify": "*" | ||
}, | ||
"dependencies": {}, | ||
"devDependencies": {}, | ||
"optionalDependencies": {}, | ||
"scripts": { | ||
"test": "make test" | ||
} | ||
"scripts": {} | ||
} |
172
README.md
@@ -1,2 +0,2 @@ | ||
[](http://travis-ci.org/wdavidw/node-csv-parser) | ||
[](http://travis-ci.org/wdavidw/node-csv) | ||
@@ -13,62 +13,110 @@ <pre> | ||
This project provides CSV parsing and has been tested and used | ||
on large input files. It provides every option you would expect from an | ||
advanced CSV parser and stringifier. | ||
This project provides CSV generation, parsing, transformation and serialization. | ||
It has been tested and used on by a large community over the years and should be | ||
considered reliable. It provides every option you would expect from an advanced | ||
CSV parser and stringifier. | ||
[The full documentation of the CSV parser is available here](http://www.adaltas.com/projects/node-csv/). | ||
[](https://nodei.co/npm/csv/) [](https://nodei.co/npm/csv/) | ||
Usage | ||
----- | ||
The project is splitted into 4 packages: | ||
* [`csv-generate`](https://github.com/wdavidw/node-csv-generate), a flexible generator of CSV string and Javascript objects. | ||
* [`csv-parse`](https://github.com/wdavidw/node-csv-parse), a parser converting CSV text into arrays or objects. | ||
* [`stream-transform`](https://github.com/wdavidw/node-stream-transform), a transformation framework. | ||
* [`csv-stringify`](https://github.com/wdavidw/node-csv-stringify), a stringifier converting records into a CSV text. | ||
The full documentation for the current version 0.4 isn't yet available other | ||
then the links to the README provided just above. The | ||
[official documentation][website] still cover the version 0.3. | ||
## Call for feedback | ||
The redesign is an important step forward for this package. A lot of sugar has | ||
been removed in favor of straightforward implementations of the Stream API into | ||
the 4 sub-packages. | ||
We now need your input. Help us with the documentation, write your impressions | ||
discuss additionnal APIs. | ||
## Usage | ||
Installation command is `npm install csv`. | ||
### Quick example | ||
Each modules are fully be compatible with the stream 2 and 3 specifications. | ||
Also, a simple callback-based API is alwasy provided for conveniency. | ||
### Callback example | ||
Execute this script with the command `node samples/callback.js`. | ||
```javascript | ||
// node samples/string.js | ||
csv() | ||
.from.string( | ||
'#Welcome\n"1","2","3","4"\n"a","b","c","d"', | ||
{comment: '#'} ) | ||
.to.array( function(data){ | ||
console.log(data) | ||
} ); | ||
// [ [ '1', '2', '3', '4' ], [ 'a', 'b', 'c', 'd' ] ] | ||
var csv = require('csv'); | ||
csv.generate({seed: 1, columns: 2, length: 20}, function(err, data){ | ||
csv.parse(data, function(err, data){ | ||
csv.transform(data, function(data){ | ||
return data.map(function(value){return value.toUpperCase()}); | ||
}, function(err, data){ | ||
csv.stringify(data, function(err, data){ | ||
process.stdout.write(data); | ||
}); | ||
}); | ||
}); | ||
}); | ||
``` | ||
### Advanced example | ||
### Stream example | ||
Execute this script with the command `node samples/stream.js`. | ||
```javascript | ||
// node samples/sample.js | ||
var fs = require('fs'); | ||
var csv = require('csv'); | ||
// opts is optional | ||
var opts = ; | ||
var generator = csv.generate({seed: 1, columns: 2, length: 20}); | ||
var parser = csv.parse(); | ||
var transformer = csv.transform(function(data){ | ||
return data.map(function(value){return value.toUpperCase()}); | ||
}); | ||
var stringifier = csv.stringify(); | ||
csv() | ||
.from.path(__dirname+'/sample.in', { delimiter: ',', escape: '"' }) | ||
.to.stream(fs.createWriteStream(__dirname+'/sample.out')) | ||
.transform( function(row){ | ||
row.unshift(row.pop()); | ||
return row; | ||
}) | ||
.on('record', function(row,index){ | ||
console.log('#'+index+' '+JSON.stringify(row)); | ||
}) | ||
.on('close', function(count){ | ||
// when writing to a file, use the 'close' event | ||
// the 'end' event may fire before the file has been written | ||
console.log('Number of lines: '+count); | ||
}) | ||
.on('error', function(error){ | ||
console.log(error.message); | ||
generator.on('readable', function(){ | ||
while(data = generator.read()){ | ||
parser.write(data); | ||
} | ||
}); | ||
// Output: | ||
// #0 ["2000-01-01","20322051544","1979.0","8.8017226E7","ABC","45"] | ||
// #1 ["2050-11-27","28392898392","1974.0","8.8392926E7","DEF","23"] | ||
// Number of lines: 2 | ||
parser.on('readable', function(){ | ||
while(data = parser.read()){ | ||
transformer.write(data); | ||
} | ||
}); | ||
transformer.on('readable', function(){ | ||
while(data = transformer.read()){ | ||
stringifier.write(data); | ||
} | ||
}); | ||
stringifier.on('readable', function(){ | ||
while(data = stringifier.read()){ | ||
process.stdout.write(data); | ||
} | ||
}); | ||
``` | ||
### Pipe example | ||
Execute this script with the command `node samples/pipe.js`. | ||
```javascript | ||
var csv = require('csv'); | ||
csv.generate ({seed: 1, columns: 2, length: 20}).pipe( | ||
csv.parse ()).pipe( | ||
csv.transform (function(record){ | ||
return record.map(function(value){return value.toUpperCase()}); | ||
})).pipe( | ||
csv.stringify ()).pipe(process.stdout); | ||
``` | ||
Migration | ||
@@ -79,13 +127,18 @@ --------- | ||
csv `parser. The documentation for the previous version (0.1.0) is | ||
available [here](https://github.com/wdavidw/node-csv-parser/tree/v0.1). | ||
available [here](https://github.com/wdavidw/node-csv/tree/v0.1). The | ||
documentation for the incoming 0.3.x version is not yet released. | ||
The functions 'from*' and 'to*' are now rewritten as 'from.*' and 'to.*'. The 'data' | ||
event is now the 'record' event. The 'data' now receives a stringified version of | ||
the 'record' event. | ||
event is now the 'record' event. The 'data' now receives a stringified version | ||
of the 'record' event. | ||
The documentation for olders version are available on GitHub: | ||
[0.1.x](https://github.com/wdavidw/node-csv/tree/v0.1), | ||
[0.2.x](https://github.com/wdavidw/node-csv/tree/v0.2). | ||
Development | ||
----------- | ||
Tests are executed with mocha. To install it, simple run `npm install`, it will install | ||
mocha and its dependencies in your project "node_modules" directory. | ||
Tests are executed with mocha. To install it, simple run `npm install`, it will | ||
install mocha and its dependencies in your project "node_modules" directory. | ||
@@ -109,17 +162,3 @@ To run the tests: | ||
* David Worms: <https://github.com/wdavidw> | ||
* Will White: <https://github.com/willwhite> | ||
* Justin Latimer: <https://github.com/justinlatimer> | ||
* jonseymour: <https://github.com/jonseymour> | ||
* pascalopitz: <https://github.com/pascalopitz> | ||
* Josh Pschorr: <https://github.com/jpschorr> | ||
* Elad Ben-Israel: <https://github.com/eladb> | ||
* Philippe Plantier: <https://github.com/phipla> | ||
* Tim Oxley: <https://github.com/timoxley> | ||
* Damon Oehlman: <https://github.com/DamonOehlman> | ||
* Alexandru Topliceanu: <https://github.com/topliceanu> | ||
* Visup: <https://github.com/visup> | ||
* Edmund von der Burg: <https://github.com/evdb> | ||
* Douglas Christopher Wilson: <https://github.com/dougwilson> | ||
* Chris Khoo: <https://github.com/khoomeister> | ||
* David Worms: <https://github.com/wdavidw> | ||
@@ -132,3 +171,4 @@ Related projects | ||
[travis]: https://travis-ci.org/#!/wdavidw/node-csv-parser | ||
[travis]: https://travis-ci.org/#!/wdavidw/node-csv | ||
[website]: http://www.adaltas.com/projects/node-csv/ | ||
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Wildcard dependency
QualityPackage has a dependency with a floating version range. This can cause issues if the dependency publishes a new major version.
Found 4 instances in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
0
-100%171
30.53%0
-100%212396
-43.5%4
Infinity%19
-77.11%56
-96.63%4
Infinity%1
Infinity%+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added