Socket
Socket
Sign inDemoInstall

csv-parse

Package Overview
Dependencies
Maintainers
1
Versions
141
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

csv-parse - npm Package Compare versions

Comparing version 0.0.3 to 0.0.4

LICENSE

46

lib/index.js
// Generated by CoffeeScript 1.7.1
var Parser, stream, util;
stream = require('stream');
util = require('util');
module.exports = function() {

@@ -16,5 +20,13 @@ var callback, called, chunks, data, options, parser;

}
callback = arguments[1];
if (typeof arguments[1] === 'function') {
callback = arguments[1];
} else {
options = arguments[1];
}
} else if (arguments.length === 1) {
options = arguments[0];
if (typeof arguments[0] === 'function') {
callback = arguments[0];
} else {
options = arguments[0];
}
}

@@ -25,11 +37,11 @@ if (options == null) {

parser = new Parser(options);
if (data || callback) {
if (data) {
process.nextTick(function() {
parser.write(data);
return parser.end();
});
}
if (callback) {
called = false;
chunks = options.objname ? {} : [];
if (data) {
process.nextTick(function() {
parser.write(data);
return parser.end();
});
}
parser.on('readable', function() {

@@ -60,6 +72,2 @@ var chunk, _results;

stream = require('stream');
util = require('util');
Parser = function(options) {

@@ -189,3 +197,3 @@ var _base, _base1, _base10, _base2, _base3, _base4, _base5, _base6, _base7, _base8, _base9;

Parser.prototype.__write = function(chars, end, callback) {
var areNextCharsRowDelimiters, char, delimLength, escapeIsQuote, i, isDelimiter, isEscape, isQuote, isRowDelimiter, l, ltrim, nextNextCharPas, nextNextCharPos, rowDelimiter, rtrim, _results;
var areNextCharsRowDelimiters, char, delimLength, escapeIsQuote, i, isDelimiter, isEscape, isQuote, isRowDelimiter, l, ltrim, nextCharPos, rowDelimiter, rtrim, _results;
ltrim = this.options.trim || this.options.ltrim;

@@ -213,12 +221,12 @@ rtrim = this.options.trim || this.options.rtrim;

rowDelimiter = char;
nextNextCharPos = i + 1;
nextCharPos = i + 1;
} else if (this.nextChar === '\n' || this.nextChar === '\r') {
rowDelimiter = this.nextChar;
nextNextCharPas = i + 2;
nextCharPos = i + 2;
}
if (rowDelimiter) {
if (rowDelimiter === '\r' && chars.charAt(nextCharPos) === '\n') {
rowDelimiter += '\n';
}
this.options.rowDelimiter = rowDelimiter;
if (rowDelimiter === '\r' && chars.charAt(nextNextCharPas) === '\n') {
this.options.rowDelimiter += '\n';
}
delimLength = this.options.rowDelimiter.length;

@@ -225,0 +233,0 @@ }

{
"version": "0.0.3",
"version": "0.0.4",
"name": "csv-parse",
"description": "CSV parsing implementing the Node.js `stream.Transform` API",
"keywords": [ "csv", "parse", "parser" ],
"contributors": [
"David Worms <david@adaltas.com> (http://www.adaltas.com)",
"Will White (https://github.com/willwhite)",
"Justin Latimer (https://github.com/justinlatimer)",
"jonseymour (https://github.com/jonseymour)",
"pascalopitz (https://github.com/pascalopitz)",
"Josh Pschorr (https://github.com/jpschorr)",
"Elad Ben-Israel (https://github.com/eladb)",
"Philippe Plantier (https://github.com/phipla)",
"Tim Oxley (https://github.com/timoxley)",
"Damon Oehlman (https://github.com/DamonOehlman)",
"Alexandru Topliceanu (https://github.com/topliceanu)",
"Visup (https://github.com/visup)",
"Edmund von der Burg (https://github.com/evdb)",
"Douglas Christopher Wilson (https://github.com/dougwilson)",
"Joe Eaves (https://github.com/Joeasaurus)",
"Mark Stosberg (https://github.com/markstos)"
],
"licenses": [{
"type": "BSD",
"url": "https://github.com/wdavidw/node-csv-parse/blob/master/LICENSE"
}],
"repository": {

@@ -11,4 +34,6 @@ "type": "git",

"devDependencies": {
"each": "latest",
"coffee-script": "latest",
"csv-generate": "latest",
"csv-spectrum": "latest",
"mocha": "latest",

@@ -15,0 +40,0 @@ "should": "latest"

@@ -5,20 +5,21 @@ [![Build Status](https://secure.travis-ci.org/wdavidw/node-csv-parse.png)](http://travis-ci.org/wdavidw/node-csv-parse)

parser converting CSV text input into arrays or objects. It implements the
Node.js `stream.Transform` API. It also provides a simple callback-base API for
converniency. It is both extremely easy to use and powerfull. It was first
released in 2010 and is used against big datasets by a large community.
Node.js [stream.Transform`API](http://nodejs.org/api/stream.html#stream_class_stream_transform). It also provides a simple callback-base API for convenience. It is both extremely easy to use and powerful. It was first
released in 2010 and is used against big data sets by a large community.
[The full documentation of the CSV parser is available here](http://www.adaltas.com/projects/node-csv/).
Note
----
## Features
This module is to be considered in beta stage. It is part of an ongoing effort
to split the current CSV module into complementary modules with a cleaner design
and the latest stream implementation. However, the code has been imported with
very little changes and you should feel confident to use it in your code.
* Follow the Node.js streaming API
* Support delimiters, quotes, escape characters and comments
* Line breaks discovery
* Support big datasets
* Complete test coverage and samples for inspiration
* no external dependencies
* to be used conjointly with `csv-generate`, `stream-transform` and `csv-stringify`
Usage
-----
Run `npm install csv` to install the full csv module or run
## Usage
Run `npm install csv` to install the full CSV package or run
`npm install csv-parse` if you are only interested by the CSV parser.

@@ -34,9 +35,12 @@

The parser receive a string and return an array inside a user-provided
The parser receive a string and returns an array inside a user-provided
callback. This example is available with the command `node samples/callback.js`.
See the full list of supported parsing options below.
```javascript
var parse = require('csv-parse');
require('should');
input = '#Welcome\n"1","2","3","4"\n"a","b","c","d"';
var input = '#Welcome\n"1","2","3","4"\n"a","b","c","d"';
parse(input, {comment: '#'}, function(err, output){

@@ -48,17 +52,29 @@ output.should.eql([ [ '1', '2', '3', '4' ], [ 'a', 'b', 'c', 'd' ] ]);

### Using the stream API
The CSV parser implements the [stream.Transform`API][stream_transform].
CSV data is send through the `write` function and the resulted data is obtained
within the "readable" event by calling the `read` function. This example is
available with the command `node samples/stream.js`.
See the full list of supported parser options below.
```javascript
// node samples/stream.js
var parse = require('csv-parse');
require('should');
output = [];
parser = parse({delimiter: ':'})
var output = [];
// Create the parser
var parser = parse({delimiter: ':'});
// Use the writable stream api
parser.on('readable', function(){
while(row = parser.read()){
output.push(row)
while(record = parser.read()){
output.push(record);
}
});
// Catch any error
parser.on('error', function(err){
consol.log(err.message);
console.log(err.message);
});
// When we are done, test that the parsed output matched what expected
parser.on('finish', function(){

@@ -70,5 +86,7 @@ output.should.eql([

});
// Now that setup is done, write data to the stream
parser.write("root:x:0:0:root:/root:/bin/bash\n");
parser.write("someone:x:1022:1022:a funny cat:/home/someone:/bin/bash\n");
parser.end()
// Close the readable stream
parser.end();
```

@@ -78,14 +96,18 @@

One usefull function part of the Stream API is `pipe` to interact between
One useful function part of the Stream API is `pipe` to interact between
multiple streams. You may use this function to pipe a `stream.Readable` string
source to a `stream.Writable` object destination. The next example available as
source to a `stream.Writable` object destination. This example available as
`node samples/pipe.js` read the file, parse its content and transform it.
```javascript
output = [];
parser = parse({delimiter: ':'})
input = fs.createReadStream('/etc/passwd');
transformer = transform(function(row, callback){
var fs = require('fs');
var parse = require('csv-parse');
var transform = require('stream-transform');
var output = [];
var parser = parse({delimiter: ':'})
var input = fs.createReadStream('/etc/passwd');
var transformer = transform(function(record, callback){
setTimeout(function(){
callback(null, row.join(' ')+'\n');
callback(null, record.join(' ')+'\n');
}, 500);

@@ -96,5 +118,18 @@ }, {parallel: 10});

Migration
---------
## Parser options
* `delimiter` Set the field delimiter. One character only, defaults to comma.
* `rowDelimiter` String used to delimit record rows or a special value; special values are 'auto', 'unix', 'mac', 'windows', 'unicode'; defaults to 'auto' (discovered in source or 'unix' if no source is specified).
* `quote` Optionnal character surrounding a field, one character only, defaults to double quotes.
* `escape` Set the escape character, one character only, defaults to double quotes.
* `columns` List of fields as an array, a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line, default to null, affect the result data set in the sense that records will be objects instead of arrays.
* `comment` Treat all the characteres after this one as a comment, default to '#'.
* `objname` Name of header-record title to name objects by.
* `trim` If true, ignore whitespace immediately around the delimiter, defaults to false.
* `ltrim` If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* `rtrim` If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
* `auto_parse` If true, the parser will attempt to convert read data types to native types.
## Migration
Most of the generator is imported from its parent project [CSV][csv] in a effort

@@ -104,5 +139,7 @@ to split it between the generator, the parser, the transformer and the

Development
-----------
The "record" has disappeared, you are encouraged to use the "readable" event conjointly
with the "read" function as documented above and in the [Stream API][stream_transform].
## Development
Tests are executed with mocha. To install it, simple run `npm install`

@@ -116,3 +153,3 @@ followed by `npm test`. It will install mocha and its dependencies in your

The test suite is run online with [Travis][travis] against the versions
0.9, 0.10 and 0.11 of Node.js.
0.10 and 0.11 of Node.js.

@@ -138,13 +175,9 @@ Contributors

* Joeasaurus: <https://github.com/Joeasaurus>
* Mark Stosberg: <https://github.com/markstos>
Related projects
----------------
* Pavel Kolesnikov "ya-csv": <http://github.com/koles/ya-csv>
* Chris Williams "node-csv": <http://github.com/voodootikigod/node-csv>
[csv]: https://github.com/wdavidw/node-csv
[csv-samples]: https://github.com/wdavidw/node-csv-parse/tree/master/samples
[csv-test]: https://github.com/wdavidw/node-csv-parse/tree/master/test
[stream_transform]: http://nodejs.org/api/stream.html#stream_class_stream_transform
[travis]: https://travis-ci.org/#!/wdavidw/node-csv-parse
should = require('should');
parse = require('../lib');
var parse = require('../lib');
require('should');
input = '#Welcome\n"1","2","3","4"\n"a","b","c","d"';
var input = '#Welcome\n"1","2","3","4"\n"a","b","c","d"';
parse(input, {comment: '#'}, function(err, output){
output.should.eql([ [ '1', '2', '3', '4' ], [ 'a', 'b', 'c', 'd' ] ]);
});
var fs = require('fs');
var parse = require('../lib');
var parse = require('..');
var transform = require('../../stream-transform');
output = [];
parser = parse({delimiter: ':'})
input = fs.createReadStream('/etc/passwd');
transformer = transform(function(row, callback){
var output = [];
var parser = parse({delimiter: ':'})
var input = fs.createReadStream('/etc/passwd');
var transformer = transform(function(record, callback){
setTimeout(function(){
callback(null, row.join(' ')+'\n');
callback(null, record.join(' ')+'\n');
}, 500);

@@ -13,0 +13,0 @@ }, {parallel: 10});

var parse = require('../lib');
require('should');
var parse = require('../lib');
output = [];
parser = parse({delimiter: ':'})
var output = [];
// Create the parser
var parser = parse({delimiter: ':'});
// Use the writable stream api
parser.on('readable', function(){
while(row = parser.read()){
output.push(row)
while(record = parser.read()){
output.push(record);
}
});
// Catch any error
parser.on('error', function(err){
consol.log(err.message);
console.log(err.message);
});
// When we are done, test that the parsed output matched what expected
parser.on('finish', function(){

@@ -21,5 +25,7 @@ output.should.eql([

});
// Now that setup is done, write data to the stream
parser.write("root:x:0:0:root:/root:/bin/bash\n");
parser.write("someone:x:1022:1022:a funny cat:/home/someone:/bin/bash\n");
parser.end()
// Close the readable stream
parser.end();
# `Parser([options])`
# CSV Parser

@@ -16,26 +16,18 @@ This module provides a CSV parser tested and used against large datasets. Over

## Legacy
Please look at the [README], the [samples] and the [tests] for additional
information.
Important: this documentation covers the latest version (0.3.x) of
`node-csv-parser`. Older version are available on GitHub:
[0.1.x](https://github.com/wdavidw/node-csv/tree/v0.1),
[0.2.x](https://github.com/wdavidw/node-csv/tree/v0.2).
stream = require 'stream'
util = require 'util'
## Installation
## Usage
```bash
npm install csv
```
Callback approach, for ease of use:
## Usage
`parse(data, [options], callback)`
There are two ways to use the parser:
Stream API, for maximum of power:
* `parse(data, [options], callback)`
Callback approach, for ease of use.
* `parse([options], [callback])`
Stream API, for maximum of power.
`parse([options], [callback])`
Look at the examples below to discover the usage.
module.exports = ->

@@ -48,17 +40,20 @@ if arguments.length is 3

if typeof arguments[0] is 'string'
data = arguments[0]
else
options = arguments[0]
callback = arguments[1]
then data = arguments[0]
else options = arguments[0]
if typeof arguments[1] is 'function'
then callback = arguments[1]
else options = arguments[1]
else if arguments.length is 1
options = arguments[0]
if typeof arguments[0] is 'function'
then callback = arguments[0]
else options = arguments[0]
options ?= {}
parser = new Parser options
if data or callback
if data
process.nextTick ->
parser.write data
parser.end()
if callback
called = false
chunks = if options.objname then {} else []
if data
process.nextTick ->
parser.write data
parser.end()
parser.on 'readable', ->

@@ -77,4 +72,3 @@ while chunk = parser.read()

stream = require 'stream'
util = require 'util'
## `Parser([options])`

@@ -222,9 +216,9 @@ * `delimiter` Set the field delimiter. One character only, defaults to comma.

rowDelimiter = char
nextNextCharPos = i+1
nextCharPos = i+1
else if @nextChar is '\n' or @nextChar is '\r'
rowDelimiter = @nextChar
nextNextCharPas = i+2
nextCharPos = i+2
if rowDelimiter
rowDelimiter += '\n' if rowDelimiter is '\r' and chars.charAt(nextCharPos) is '\n'
@options.rowDelimiter = rowDelimiter
@options.rowDelimiter += '\n' if rowDelimiter is '\r' and chars.charAt(nextNextCharPas) is '\n'
delimLength = @options.rowDelimiter.length

@@ -288,3 +282,2 @@ # Parse that damn char

@field = @field.trimRight()
if (@options.auto_parse and @floatRegexp.test(@field))

@@ -317,2 +310,5 @@ @line.push parseFloat(@field)

[readme]: https://github.com/wdavidw/node-csv-parse
[samples]: https://github.com/wdavidw/node-csv-parse/tree/master/samples
[tests]: https://github.com/wdavidw/node-csv-parse/tree/master/test
[transform]: (http://nodejs.org/api/stream.html#stream_class_stream_transform_1)

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc