Socket
Socket
Sign inDemoInstall

csv-parse

Package Overview
Dependencies
Maintainers
1
Versions
141
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

csv-parse - npm Package Compare versions

Comparing version 0.0.6 to 0.0.8

89

lib/index.js

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.7.1
// Generated by CoffeeScript 1.8.0
var Parser, stream, util;

@@ -119,3 +119,2 @@

this.field = '';
this.lastC = '';
this.nextChar = null;

@@ -125,3 +124,4 @@ this.closingQuote = 0;

this.chunks = [];
this.floatRegexp = /^(\-|\+)?([0-9]+(\.[0-9]+)?|Infinity)$/;
this.intRegexp = /^(\-|\+)?([0-9]+)$/;
this.floatRegexp = /^(\-|\+)?([0-9]+(\.[0-9]+)?([eE][0-9]+)?|Infinity)$/;
return this;

@@ -156,9 +156,2 @@ };

}
if (this.field || this.lastC === this.options.delimiter || this.lastC === this.options.quote) {
if (this.options.trim || this.options.rtrim) {
this.field = this.field.trimRight();
}
this.line.push(this.field);
this.field = '';
}
if (this.line.length > 0) {

@@ -200,3 +193,3 @@ this.__push(this.line);

Parser.prototype.__write = function(chars, end, callback) {
var areNextCharsRowDelimiters, char, delimLength, escapeIsQuote, i, isDelimiter, isEscape, isQuote, isRowDelimiter, l, ltrim, nextCharPos, rowDelimiter, rtrim, wasCommenting, _results;
var acceptedLength, areNextCharsRowDelimiters, char, escapeIsQuote, i, isDelimiter, isEscape, isNextCharAComment, isNextCharADelimiter, isQuote, isRowDelimiter, l, ltrim, nextCharPos, rowDelimiter, rowDelimiterLength, rtrim, wasCommenting, _results;
ltrim = this.options.trim || this.options.ltrim;

@@ -206,3 +199,3 @@ rtrim = this.options.trim || this.options.rtrim;

l = chars.length;
delimLength = this.options.rowDelimiter ? this.options.rowDelimiter.length : 0;
rowDelimiterLength = this.options.rowDelimiter ? this.options.rowDelimiter.length : 0;
i = 0;

@@ -213,10 +206,10 @@ if (this.lines === 0 && 0xFEFF === chars.charCodeAt(0)) {

while (i < l) {
if ((i + delimLength >= l && chars.substr(i, this.options.rowDelimiter.length) !== this.options.rowDelimiter) && !end) {
break;
acceptedLength = rowDelimiterLength + this.options.comment.length + this.options.escape.length + this.options.delimiter.length;
if (this.quoting) {
acceptedLength += this.options.quote.length;
}
if ((i + this.options.escape.length >= l && chars.substr(i, this.options.escape.length) === this.options.escape) && !end) {
if (!end && (i + acceptedLength >= l)) {
break;
}
char = this.nextChar ? this.nextChar : chars.charAt(i);
this.lastC = char;
this.nextChar = chars.charAt(i + 1);

@@ -236,6 +229,6 @@ if (this.options.rowDelimiter == null) {

this.options.rowDelimiter = rowDelimiter;
delimLength = this.options.rowDelimiter.length;
rowDelimiterLength = this.options.rowDelimiter.length;
}
}
if (char === this.options.escape) {
if (!this.commenting && char === this.options.escape) {
escapeIsQuote = this.options.escape === this.options.quote;

@@ -253,12 +246,23 @@ isEscape = this.nextChar === this.options.escape;

}
if (char === this.options.quote) {
if (!this.commenting && char === this.options.quote) {
if (this.quoting) {
areNextCharsRowDelimiters = this.options.rowDelimiter && chars.substr(i + 1, this.options.rowDelimiter.length) === this.options.rowDelimiter;
if (!this.options.relax && this.nextChar && !areNextCharsRowDelimiters && this.nextChar !== this.options.delimiter && this.nextChar !== this.options.comment) {
throw new Error("Invalid closing quote at line " + (this.lines + 1) + "; found " + (JSON.stringify(this.nextChar)) + " instead of delimiter " + (JSON.stringify(this.options.delimiter)));
isNextCharADelimiter = chars.substr(i + 1, this.options.delimiter.length) === this.options.delimiter;
isNextCharAComment = this.nextChar === this.options.comment;
if (this.nextChar && !areNextCharsRowDelimiters && !isNextCharADelimiter && !isNextCharAComment) {
if (this.options.relax) {
this.quoting = false;
this.field = "" + this.options.quote + this.field;
} else {
throw new Error("Invalid closing quote at line " + (this.lines + 1) + "; found " + (JSON.stringify(this.nextChar)) + " instead of delimiter " + (JSON.stringify(this.options.delimiter)));
}
} else {
this.quoting = false;
this.closingQuote = this.options.quote.length;
i++;
if (end && i === l) {
this.line.push(this.field);
}
continue;
}
this.quoting = false;
this.closingQuote = i;
i++;
continue;
} else if (!this.field) {

@@ -272,5 +276,5 @@ this.quoting = true;

}
isDelimiter = char === this.options.delimiter;
isRowDelimiter = this.options.rowDelimiter && chars.substr(i, this.options.rowDelimiter.length) === this.options.rowDelimiter;
if (!this.commenting && !this.quoting && char === this.options.comment) {
wasCommenting = false;
if (!this.commenting && !this.quoting && this.options.comment && chars.substr(i, this.options.comment.length) === this.options.comment) {
this.commenting = true;

@@ -281,2 +285,3 @@ } else if (this.commenting && isRowDelimiter) {

}
isDelimiter = chars.substr(i, this.options.delimiter.length) === this.options.delimiter;
if (!this.commenting && !this.quoting && (isDelimiter || isRowDelimiter)) {

@@ -291,9 +296,9 @@ if (isRowDelimiter && this.line.length === 0 && this.field === '') {

if (rtrim) {
if (this.closingQuote) {
this.field = this.field.substr(0, this.closingQuote);
} else {
if (!this.closingQuote) {
this.field = this.field.trimRight();
}
}
if (this.options.auto_parse && this.floatRegexp.test(this.field)) {
if (this.options.auto_parse && this.intRegexp.test(this.field)) {
this.line.push(parseInt(this.field));
} else if (this.options.auto_parse && this.floatRegexp.test(this.field)) {
this.line.push(parseFloat(this.field));

@@ -305,2 +310,10 @@ } else {

this.field = '';
if (isDelimiter) {
i += this.options.delimiter.length;
this.nextChar = chars.charAt(i);
if (end && !this.nextChar) {
isRowDelimiter = true;
this.line.push('');
}
}
if (isRowDelimiter) {

@@ -318,6 +331,18 @@ this.__push(this.line);

}
if (end && i + 1 === l) {
if (this.options.trim || this.options.rtrim) {
this.field = this.field.trimRight();
}
this.line.push(this.field);
}
i++;
} else if (!this.commenting) {
this.field += char;
i++;
if (end && i === l) {
this.line.push(this.field);
}
} else {
i++;
}
i++;
}

@@ -324,0 +349,0 @@ this.buf = '';

{
"version": "0.0.6",
"version": "0.0.8",
"name": "csv-parse",

@@ -32,10 +32,11 @@ "description": "CSV parsing implementing the Node.js `stream.Transform` API",

},
"homepage": "http://csv.adaltas.com/parse/",
"dependencies": {},
"devDependencies": {
"each": "latest",
"coffee-script": "latest",
"csv-generate": "latest",
"csv-spectrum": "latest",
"mocha": "latest",
"should": "latest"
"each": "0.5.0",
"coffee-script": "1.9.1",
"csv-generate": "0.0.4",
"csv-spectrum": "0.2.0",
"mocha": "2.1.0",
"should": "5.0.0"
},

@@ -42,0 +43,0 @@ "optionalDependencies": {},

@@ -1,9 +0,11 @@

[![Build Status](https://secure.travis-ci.org/wdavidw/node-csv-parse.png)](http://travis-ci.org/wdavidw/node-csv-parse)
[![Build Status](https://secure.travis-ci.org/wdavidw/node-csv-parse.png)][travis]
Part of the [CSV module](https://github.com/wdavidw/node-csv), this project is a
parser converting CSV text input into arrays or objects. It implements the
Node.js [stream.Transform`API](http://nodejs.org/api/stream.html#stream_class_stream_transform). It also provides a simple callback-base API for convenience. It is both extremely easy to use and powerful. It was first
released in 2010 and is used against big data sets by a large community.
Part of the [CSV module][csv_home], this project is a parser converting CSV text
input into arrays or objects. It implements the Node.js
[`stream.Transform` API][stream_transform]. It also provides a simple
callback-based API for convenience. It is both extremely easy to use and
powerful. It was first released in 2010 and is used against big data sets by a
large community.
[The full documentation of the CSV parser is available here](http://www.adaltas.com/projects/node-csv/).
[Documentation for the "csv-parse" package is available here][home].

@@ -13,2 +15,3 @@ ## Features

* Follow the Node.js streaming API
* Simplicity with the optional callback API
* Support delimiters, quotes, escape characters and comments

@@ -20,161 +23,8 @@ * Line breaks discovery

* to be used conjointly with `csv-generate`, `stream-transform` and `csv-stringify`
* BSD License
## Usage
Run `npm install csv` to install the full CSV package or run
`npm install csv-parse` if you are only interested by the CSV parser.
Use the callback style API for simplicity or the stream based API for
scalability. You may also mix the two styles. For example, the
[fs_read.js example][fs_read] pipe a file stream reader and get the results
inside a callback.
For examples, refer to [the "samples" folder][csv-samples],
the documentation or [the "test" folder][csv-test].
### Using the callback API
The parser receive a string and returns an array inside a user-provided
callback. This example is available with the command `node samples/callback.js`.
See the full list of supported parsing options below.
```javascript
var parse = require('csv-parse');
require('should');
var input = '#Welcome\n"1","2","3","4"\n"a","b","c","d"';
parse(input, {comment: '#'}, function(err, output){
output.should.eql([ [ '1', '2', '3', '4' ], [ 'a', 'b', 'c', 'd' ] ]);
});
```
### Using the stream API
The CSV parser implements the [stream.Transform`API][stream_transform].
CSV data is send through the `write` function and the resulted data is obtained
within the "readable" event by calling the `read` function. This example is
available with the command `node samples/stream.js`.
See the full list of supported parser options below.
```javascript
var parse = require('csv-parse');
require('should');
var output = [];
// Create the parser
var parser = parse({delimiter: ':'});
// Use the writable stream api
parser.on('readable', function(){
while(record = parser.read()){
output.push(record);
}
});
// Catch any error
parser.on('error', function(err){
console.log(err.message);
});
// When we are done, test that the parsed output matched what expected
parser.on('finish', function(){
output.should.eql([
[ 'root','x','0','0','root','/root','/bin/bash' ],
[ 'someone','x','1022','1022','a funny cat','/home/someone','/bin/bash' ]
]);
});
// Now that setup is done, write data to the stream
parser.write("root:x:0:0:root:/root:/bin/bash\n");
parser.write("someone:x:1022:1022:a funny cat:/home/someone:/bin/bash\n");
// Close the readable stream
parser.end();
```
### Using the pipe function
One useful function part of the Stream API is `pipe` to interact between
multiple streams. You may use this function to pipe a `stream.Readable` string
source to a `stream.Writable` object destination. This example available as
`node samples/pipe.js` read the file, parse its content and transform it.
```javascript
var fs = require('fs');
var parse = require('csv-parse');
var transform = require('stream-transform');
var output = [];
var parser = parse({delimiter: ':'})
var input = fs.createReadStream('/etc/passwd');
var transformer = transform(function(record, callback){
setTimeout(function(){
callback(null, record.join(' ')+'\n');
}, 500);
}, {parallel: 10});
input.pipe(parser).pipe(transformer).pipe(process.stdout);
```
## Parser options
* `delimiter` Set the field delimiter. One character only, defaults to comma.
* `rowDelimiter` String used to delimit record rows or a special value; special values are 'auto', 'unix', 'mac', 'windows', 'unicode'; defaults to 'auto' (discovered in source or 'unix' if no source is specified).
* `quote` Optional character surrounding a field, one character only, defaults to double quotes.
* `escape` Set the escape character, one character only, defaults to double quotes.
* `columns` List of fields as an array, a user defined callback accepting the first line and returning the column names or true if autodiscovered in the first CSV line, default to null, affect the result data set in the sense that records will be objects instead of arrays.
* `comment` Treat all the characteres after this one as a comment, default to '#'.
* `objname` Name of header-record title to name objects by.
* `skip_empty_lines` Dont generate empty values for empty lines.
* `trim` If true, ignore whitespace immediately around the delimiter, defaults to false.
* `ltrim` If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* `rtrim` If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
* `auto_parse` If true, the parser will attempt to convert read data types to native types.
## Migration
Most of the generator is imported from its parent project [CSV][csv] in a effort
to split it between the generator, the parser, the transformer and the
stringifier.
The "record" has disappeared, you are encouraged to use the "readable" event conjointly
with the "read" function as documented above and in the [Stream API][stream_transform].
## Development
Tests are executed with mocha. To install it, simple run `npm install`
followed by `npm test`. It will install mocha and its dependencies in your
project "node_modules" directory and run the test suite. The tests run
against the CoffeeScript source files.
To generate the JavaScript files, run `make build`.
The test suite is run online with [Travis][travis] against the versions
0.10 and 0.11 of Node.js.
Contributors
------------
* David Worms: <https://github.com/wdavidw>
* Will White: <https://github.com/willwhite>
* Justin Latimer: <https://github.com/justinlatimer>
* jonseymour: <https://github.com/jonseymour>
* pascalopitz: <https://github.com/pascalopitz>
* Josh Pschorr: <https://github.com/jpschorr>
* Elad Ben-Israel: <https://github.com/eladb>
* Philippe Plantier: <https://github.com/phipla>
* Tim Oxley: <https://github.com/timoxley>
* Damon Oehlman: <https://github.com/DamonOehlman>
* Alexandru Topliceanu: <https://github.com/topliceanu>
* Visup: <https://github.com/visup>
* Edmund von der Burg: <https://github.com/evdb>
* Douglas Christopher Wilson: <https://github.com/dougwilson>
* Chris Khoo: <https://github.com/khoomeister>
* Joeasaurus: <https://github.com/Joeasaurus>
* Mark Stosberg: <https://github.com/markstos>
[csv]: https://github.com/wdavidw/node-csv
[csv-samples]: https://github.com/wdavidw/node-csv-parse/tree/master/samples
[fs_read]: https://github.com/wdavidw/node-csv-parse/tree/master/samples/fs_read.js
[csv-test]: https://github.com/wdavidw/node-csv-parse/tree/master/test
[home]: http://csv.adaltas.com/parse/
[csv_home]: https://github.com/wdavidw/node-csv
[stream_transform]: http://nodejs.org/api/stream.html#stream_class_stream_transform
[travis]: https://travis-ci.org/#!/wdavidw/node-csv-parse
[travis]: http://travis-ci.org/wdavidw/node-csv-parse
// The package "should" must be installed:
// `npm install should`
var parse = require('../lib');

@@ -3,0 +6,0 @@ require('should');

@@ -0,1 +1,5 @@

// The package "should" must be installed:
// `npm install should`
fs = require('fs');

@@ -2,0 +6,0 @@ parse = require('..');

// The package "should" must be installed:
// `npm install should`
var parse = require('..');

@@ -3,0 +6,0 @@ should = require('should')

@@ -0,1 +1,2 @@

var fs = require('fs');

@@ -2,0 +3,0 @@ var parse = require('..');

// The package "should" must be installed:
// `npm install should`
var parse = require('..');

@@ -3,0 +6,0 @@ require('should');

// The package "stream-transform" must be installed:
// `npm install stream-transform`
var fs = require('fs');
var parse = require('..');
var transform = require('../../stream-transform');
var transform = require('stream-transform');

@@ -6,0 +9,0 @@ var parser = parse({delimiter: ':'})

// The package "should" must be installed:
// `npm install should`
var parse = require('../lib');

@@ -3,0 +6,0 @@ require('should');

// The package "should" must be installed:
// `npm install should`
parse = require('..');

@@ -3,0 +6,0 @@ should = require('should');

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc