Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

csv

Package Overview
Dependencies
Maintainers
1
Versions
99
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

csv - npm Package Compare versions

Comparing version 0.2.9 to 0.3.0

21

doc/from.md

@@ -5,3 +5,3 @@ ---

title: "Reading data from a source"
date: 2013-01-05T06:10:44.660Z
date: 2013-03-31T21:12:03.752Z
comments: false

@@ -70,11 +70,12 @@ sharing: false

* `delimiter` Set the field delimiter, one character only, defaults to comma.
* `quote` Set the field delimiter, one character only, defaults to double quotes.
* `escape` Set the field delimiter, one character only, defaults to double quotes.
* `columns` List of fields or true if autodiscovered in the first CSV line, default to null. Impact the `transform` argument and the `data` event by providing an object instead of an array, order matters, see the transform and the columns sections for more details.
* `flags` Used to read a file stream, default to the r charactere.
* `encoding` Encoding of the read stream, defaults to 'utf8', applied when a readable stream is created.
* `trim` If true, ignore whitespace immediately around the delimiter, defaults to false.
* `ltrim` If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* `rtrim` If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
* `delimiter` Set the field delimiter, one character only, defaults to comma.
* `rowDelimiter` String used to delimit record rows or a special value; special values are 'auto', 'unix', 'mac', 'windows', 'unicode'; defaults to 'auto' (discovered in source or 'unix' if no source is specified).
* `quote` Optionnal character surrounding a field, one character only, defaults to double quotes.
* `escape` Set the escape character, one character only, defaults to double quotes.
* `columns` List of fields or true if autodiscovered in the first CSV line, default to null. Impact the `transform` argument and the `data` event by providing an object instead of an array, order matters, see the transform and the columns sections for more details.
* `flags` Used to read a file stream, default to the r charactere.
* `encoding` Encoding of the read stream, defaults to 'utf8', applied when a readable stream is created.
* `trim` If true, ignore whitespace immediately around the delimiter, defaults to false.
* `ltrim` If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* `rtrim` If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.

@@ -81,0 +82,0 @@ Additionnaly, in case you are working with stream, you can pass all

@@ -5,3 +5,3 @@ ---

title: "Node CSV"
date: 2013-01-05T06:10:44.661Z
date: 2013-03-31T21:12:03.751Z
comments: false

@@ -99,11 +99,11 @@ sharing: false

|-----------| |---------|---------| |---------|
| | | | | | |
| | | CSV | | |
| | | | | | |
| Stream | | Writer | Reader | | Stream |
| Reader |.pipe(| API | API |).pipe(| Writer |)
| | | | | | |
| | | | | | |
|-----------| |---------|---------| |---------|
+--------+ +----------+----------+ +--------+
| | | | | | |
| | | CSV | | |
| | | | | | |
| Stream | | Writer | Reader | | Stream |
| Reader |.pipe(| API | API |).pipe(| Writer |)
| | | | | | |
| | | | | | |
+--------+ +----------+----------+ +--------+

@@ -157,2 +157,28 @@ ```

Thrown whenever an error occured.
Architecture
------------
The code is organised mainly around 5 main components.
The "from" properties provide convenient functions
to write CSV text or to plug a Stream Reader. The "parser"
takes this CSV content and transform it into an array or
an object for each lines. The "transformer" provide the ability
to work on each line in a synchronous or asynchronous mode.
The "stringifier" take an array or an object and serialize into
a CSV text. Finally, the "to" properties provides convenient
function to retrieve the text or to write to plug a Stream Writer.
```javascript
+-------+--------+--------------+--------------+-----+
| | | | | |
| from -> parser -> transformer -> stringifier -> to |
| | | | | |
+-------+--------+--------------+--------------+-----+
```
Note, even so the "parser", "transformer" and "singifier" are available
as properties, you won't have to interact with those.

@@ -198,3 +224,3 @@

<a name="transform"></a>
`transform(callback)`
`transform(callback, [options])`
---------------------

@@ -201,0 +227,0 @@

@@ -5,3 +5,3 @@ ---

title: "Parsing"
date: 2013-01-05T06:10:44.660Z
date: 2013-03-31T21:12:03.753Z
comments: false

@@ -24,4 +24,4 @@ sharing: false

<a name="parse"></a>
`parse(chars)`
<a name="write"></a>
`write(chars)`
--------------

@@ -28,0 +28,0 @@

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
var convert_anchor, convert_code, date, docs, each, fs, getindent, mecano, unindent;

@@ -12,2 +12,3 @@

var d;
return d = (new Date).toISOString();

@@ -23,2 +24,3 @@ };

var line, _i, _len;
if (!Array.isArray(text)) {

@@ -43,2 +45,3 @@ text = text.split('\n');

var indent;
lines = lines.split('\n');

@@ -59,2 +62,3 @@ indent = getindent(lines);

var re_anchor;
re_anchor = /`([\w.]+)\(/g;

@@ -68,2 +72,3 @@ return text.replace(re_anchor, function(str, code) {

var re_code;
re_code = /\n(\s{4}\s*?\S[\s\S]*?)\n(?!\s)/g;

@@ -82,2 +87,3 @@ return text.replace(re_code, function(str, code) {

var destination, source;
source = "" + __dirname + "/" + file + ".coffee";

@@ -87,2 +93,3 @@ destination = "" + __dirname + "/../doc/" + file + ".md";

var content, match, re, re_title, title;
if (err) {

@@ -112,2 +119,3 @@ return console.error(err);

var destination;
if (err) {

@@ -114,0 +122,0 @@ return console.error(err);

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
var Stream, fs, path, utils, _ref;

@@ -36,34 +36,36 @@

`from(mixed)`
-------------
`from(mixed)`
-------------
Read from any sort of source. It should be considered as a convenient function which
will discover the nature of the data source to parse.
Read from any sort of source. It should be considered as a convenient function which
will discover the nature of the data source to parse.
If it is a string, then if check if it match an existing file path and read the file content,
otherwise, it treat the string as csv data. If it is an instance of stream, it consider the
object to be an input stream. If is an array, then for each line should correspond a record.
If it is a string, then if check if it match an existing file path and read the file content,
otherwise, it treat the string as csv data. If it is an instance of stream, it consider the
object to be an input stream. If is an array, then for each line should correspond a record.
Here's some examples on how to use this function:
Here's some examples on how to use this function:
csv()
.from('"1","2","3","4"\n"a","b","c","d"')
.on('end', function(){ console.log('done') })
csv()
.from('"1","2","3","4"\n"a","b","c","d"')
.on('end', function(){ console.log('done') })
csv()
.from('./path/to/file.csv')
.on('end', function(){ console.log('done') })
csv()
.from('./path/to/file.csv')
.on('end', function(){ console.log('done') })
csv()
.from(fs.createReadStream('./path/to/file.csv'))
.on('end', function(){ console.log('done') })
csv()
.from(fs.createReadStream('./path/to/file.csv'))
.on('end', function(){ console.log('done') })
csv()
.from(['"1","2","3","4","5"',['1','2','3','4','5']])
.on('end', function(){ console.log('done') })
csv()
.from(['"1","2","3","4","5"',['1','2','3','4','5']])
.on('end', function(){ console.log('done') })
*/
var from;
from = function(mixed, options) {
var error;
error = false;

@@ -101,21 +103,21 @@ switch (typeof mixed) {

`from.options([options])`
-------------------------
`from.options([options])`
-------------------------
Update and retrieve options relative to the input source. Return
the options as an object if no argument is provided.
Update and retrieve options relative to the input source. Return
the options as an object if no argument is provided.
* `delimiter` Set the field delimiter, one character only, defaults to comma.
* `rowDelimiter` String used to delimit record rows or a special value; special values are 'auto', 'unix', 'mac', 'windows', 'unicode'; defaults to 'auto' (discovered in source or 'unix' if no source is specified).
* `quote` Optionnal character surrounding a field, one character only, defaults to double quotes.
* `escape` Set the escape character, one character only, defaults to double quotes.
* `columns` List of fields or true if autodiscovered in the first CSV line, default to null. Impact the `transform` argument and the `data` event by providing an object instead of an array, order matters, see the transform and the columns sections for more details.
* `flags` Used to read a file stream, default to the r charactere.
* `encoding` Encoding of the read stream, defaults to 'utf8', applied when a readable stream is created.
* `trim` If true, ignore whitespace immediately around the delimiter, defaults to false.
* `ltrim` If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* `rtrim` If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
Additionnaly, in case you are working with stream, you can pass all
the options accepted by the `stream.pipe` function.
* `delimiter` Set the field delimiter, one character only, defaults to comma.
* `rowDelimiter` String used to delimit record rows or a special value; special values are 'auto', 'unix', 'mac', 'windows', 'unicode'; defaults to 'auto' (discovered in source or 'unix' if no source is specified).
* `quote` Optionnal character surrounding a field, one character only, defaults to double quotes.
* `escape` Set the escape character, one character only, defaults to double quotes.
* `columns` List of fields or true if autodiscovered in the first CSV line, default to null. Impact the `transform` argument and the `data` event by providing an object instead of an array, order matters, see the transform and the columns sections for more details.
* `flags` Used to read a file stream, default to the r charactere.
* `encoding` Encoding of the read stream, defaults to 'utf8', applied when a readable stream is created.
* `trim` If true, ignore whitespace immediately around the delimiter, defaults to false.
* `ltrim` If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
* `rtrim` If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
Additionnaly, in case you are working with stream, you can pass all
the options accepted by the `stream.pipe` function.
*/

@@ -133,9 +135,9 @@

`from.array(data, [options])`
------------------------------
Read from an array. Take an array as first argument and optionally
some options as a second argument. Each element of the array
represents a csv record. Those elements may be a string, a buffer, an
array or an object.
`from.array(data, [options])`
------------------------------
Read from an array. Take an array as first argument and optionally
some options as a second argument. Each element of the array
represents a csv record. Those elements may be a string, a buffer, an
array or an object.
*/

@@ -147,2 +149,3 @@

var record, _i, _len;
for (_i = 0, _len = data.length; _i < _len; _i++) {

@@ -157,14 +160,14 @@ record = data[_i];

/*
`from.string(data, [options])`
-------------------------------
Read from a string or a buffer. Take a string as first argument and
optionally an object of options as a second argument. The string
must be the complete csv data, look at the streaming alternative if your
CSV is large.
csv()
.from( '"1","2","3","4"\n"a","b","c","d"' )
.to( function(data){} )
`from.string(data, [options])`
-------------------------------
Read from a string or a buffer. Take a string as first argument and
optionally an object of options as a second argument. The string
must be the complete csv data, look at the streaming alternative if your
CSV is large.
csv()
.from( '"1","2","3","4"\n"a","b","c","d"' )
.to( function(data){} )
*/

@@ -181,8 +184,8 @@

/*
`from.path(path, [options])`
----------------------------
Read from a file path. Take a file path as first argument and optionally an object
of options as a second argument.
`from.path(path, [options])`
----------------------------
Read from a file path. Take a file path as first argument and optionally an object
of options as a second argument.
*/

@@ -192,2 +195,3 @@

var stream;
this.options(options);

@@ -198,8 +202,8 @@ stream = fs.createReadStream(path, csv.from.options());

/*
`from.stream(stream, [options])`
--------------------------------
Read from a stream. Take a readable stream as first argument and optionally
an object of options as a second argument.
`from.stream(stream, [options])`
--------------------------------
Read from a stream. Take a readable stream as first argument and optionally
an object of options as a second argument.
*/

@@ -206,0 +210,0 @@

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
var Generator, Stream, util;

@@ -36,2 +36,3 @@

var _base, _base1, _ref, _ref1;
this.options = options != null ? options : {};

@@ -58,2 +59,3 @@ if ((_ref = (_base = this.options).duration) == null) {

var char, column, line, nb_chars, nb_words, _i, _j, _ref, _ref1;
this.paused = false;

@@ -60,0 +62,0 @@ while (!this.paused && this.readable) {

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
/*

@@ -178,2 +178,3 @@

var self;
self = this;

@@ -204,2 +205,3 @@ this.paused = false;

var eof;
eof = self.options.to.eof;

@@ -264,2 +266,3 @@ if (eof) {

var csv;
if (!this.writable) {

@@ -266,0 +269,0 @@ return false;

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
/*

@@ -10,3 +10,2 @@ Input and output options

*/
module.exports = function() {

@@ -13,0 +12,0 @@ return {

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
var EventEmitter, Parser;

@@ -32,2 +32,3 @@

this.nextChar = null;
this.closingQuote = 0;
this.line = [];

@@ -51,4 +52,7 @@ return this;

Parser.prototype.write = function(chars, end) {
var areNextCharsRowDelimiters, char, csv, delimLength, escapeIsQuote, i, isEscape, isQuote, isReallyEscaped, l, _results;
var areNextCharsRowDelimiters, char, csv, delimLength, escapeIsQuote, i, isDelimiter, isEscape, isQuote, isReallyEscaped, isRowDelimiter, l, ltrim, rtrim, _results;
csv = this.csv;
ltrim = this.options.trim || this.options.ltrim;
rtrim = this.options.trim || this.options.rtrim;
chars = this.buf + chars;

@@ -65,5 +69,9 @@ l = chars.length;

}
if ((i + this.options.escape.length >= l && chars.substr(i, this.options.escape.length) === this.options.escape) && !end) {
break;
}
char = this.nextChar ? this.nextChar : chars.charAt(i);
this.lastC = char;
this.nextChar = chars.charAt(i + 1);
if (!(this.options.rowDelimiter != null) && (this.nextChar === '\n' || this.nextChar === '\r')) {
if ((this.options.rowDelimiter == null) && (this.nextChar === '\n' || this.nextChar === '\r')) {
this.options.rowDelimiter = this.nextChar;

@@ -75,52 +83,54 @@ if (this.nextChar === '\r' && chars.charAt(i + 2) === '\n') {

}
if (char === this.options.escape || char === this.options.quote) {
isReallyEscaped = false;
if (char === this.options.escape) {
escapeIsQuote = this.options.escape === this.options.quote;
isEscape = this.nextChar === this.options.escape;
isQuote = this.nextChar === this.options.quote;
if (!(escapeIsQuote && !this.field && !this.quoting) && (isEscape || isQuote)) {
i++;
isReallyEscaped = true;
char = this.nextChar;
this.nextChar = chars.charAt(i + 1);
this.field += char;
isReallyEscaped = false;
if (char === this.options.escape) {
escapeIsQuote = this.options.escape === this.options.quote;
isEscape = this.nextChar === this.options.escape;
isQuote = this.nextChar === this.options.quote;
if (!(escapeIsQuote && !this.field && !this.quoting) && (isEscape || isQuote)) {
i++;
char = this.nextChar;
this.nextChar = chars.charAt(i + 1);
this.field += char;
i++;
continue;
}
}
if (!isReallyEscaped && char === this.options.quote) {
if (this.quoting) {
areNextCharsRowDelimiters = this.options.rowDelimiter && chars.substr(i + 1, this.options.rowDelimiter.length) === this.options.rowDelimiter;
if (this.nextChar && !areNextCharsRowDelimiters && this.nextChar !== this.options.delimiter) {
return this.error(new Error("Invalid closing quote at line " + (this.lines + 1) + "; found " + (JSON.stringify(this.nextChar)) + " instead of delimiter " + (JSON.stringify(this.options.delimiter))));
}
this.quoting = false;
this.closingQuote = i;
i++;
continue;
} else if (!this.field) {
this.quoting = true;
i++;
continue;
}
if (!isReallyEscaped && char === this.options.quote) {
if (this.quoting) {
areNextCharsRowDelimiters = this.options.rowDelimiter && chars.substr(i + 1, this.options.rowDelimiter.length) === this.options.rowDelimiter;
if (this.nextChar && !areNextCharsRowDelimiters && this.nextChar !== this.options.delimiter) {
return this.error(new Error("Invalid closing quote at line " + (this.lines + 1) + "; found " + (JSON.stringify(this.nextChar)) + " instead of delimiter " + (JSON.stringify(this.options.delimiter))));
}
this.quoting = false;
} else if (this.field) {
this.field += char;
}
isDelimiter = char === this.options.delimiter;
isRowDelimiter = this.options.rowDelimiter && chars.substr(i, this.options.rowDelimiter.length) === this.options.rowDelimiter;
if (!this.quoting && (isDelimiter || isRowDelimiter)) {
if (rtrim) {
if (this.closingQuote) {
this.field = this.field.substr(0, this.closingQuote);
} else {
this.quoting = true;
this.field = this.field.trimRight();
}
}
} else if (this.quoting) {
this.field += char;
} else if (char === this.options.delimiter) {
if (this.options.trim || this.options.rtrim) {
this.field = this.field.trimRight();
}
this.line.push(this.field);
this.closingQuote = 0;
this.field = '';
} else if (this.options.rowDelimiter && chars.substr(i, this.options.rowDelimiter.length) === this.options.rowDelimiter) {
this.lines++;
if (this.options.trim || this.options.rtrim) {
this.field = this.field.trimRight();
if (isRowDelimiter) {
this.emit('row', this.line);
this.line = [];
i += this.options.rowDelimiter.length;
this.nextChar = chars.charAt(i);
continue;
}
this.line.push(this.field);
this.field = '';
this.emit('row', this.line);
this.line = [];
this.lastC = char;
i += this.options.rowDelimiter.length;
this.nextChar = chars.charAt(i);
continue;
} else if (char === ' ' || char === '\t') {
if (!this.options.trim && !this.options.ltrim) {
} else if (!this.quoting && (char === ' ' || char === '\t')) {
if (!(ltrim && !this.field)) {
this.field += char;

@@ -131,3 +141,2 @@ }

}
this.lastC = char;
i++;

@@ -138,4 +147,2 @@ }

while (i < l) {
this.nextChar = chars.charAt(i);
this.nextChar = null;
this.buf += chars.charAt(i);

@@ -142,0 +149,0 @@ _results.push(i++);

@@ -1,3 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
module.exports = function() {

@@ -4,0 +3,0 @@ return {

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
/*

@@ -28,2 +28,4 @@

Stringifier.prototype.write = function(line, preserve) {
var e;
if (line == null) {

@@ -35,3 +37,4 @@ return;

this.csv.emit('record', line, this.csv.state.count - 1);
} catch (e) {
} catch (_error) {
e = _error;
return this.csv.error(e);

@@ -62,2 +65,3 @@ }

var column, columns, containsLinebreak, containsQuote, containsdelimiter, delimiter, escape, field, i, newLine, quote, regexp, _i, _j, _line, _ref, _ref1;
if (typeof line !== 'object') {

@@ -64,0 +68,0 @@ return line;

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
var Stream, fs, utils;

@@ -30,31 +30,33 @@

`to(mixed)`
-----------
`to(mixed)`
-----------
Write from any sort of destination. It should be considered as a convenient function
which will discover the nature of the destination where to write the CSV data.
Write from any sort of destination. It should be considered as a convenient function
which will discover the nature of the destination where to write the CSV data.
If is an function, then the csv will be provided as the first argument
of the callback. If it is a string, then it is expected to be a
file path. If it is an instance of stream, it consider the object to be an
output stream.
If is an function, then the csv will be provided as the first argument
of the callback. If it is a string, then it is expected to be a
file path. If it is an instance of stream, it consider the object to be an
output stream.
Here's some examples on how to use this function:
Here's some examples on how to use this function:
csv()
.from('"1","2","3","4","5"')
.to(function(data){ console.log(data) })
csv()
.from('"1","2","3","4","5"')
.to(function(data){ console.log(data) })
csv()
.from('"1","2","3","4","5"')
.to('./path/to/file.csv')
csv()
.from('"1","2","3","4","5"')
.to('./path/to/file.csv')
csv()
.from('"1","2","3","4","5"')
.to(fs.createWriteStream('./path/to/file.csv'))
csv()
.from('"1","2","3","4","5"')
.to(fs.createWriteStream('./path/to/file.csv'))
*/
var to;
to = function(mixed, options) {
var error;
error = false;

@@ -85,22 +87,22 @@ switch (typeof mixed) {

`to.options([options])`
-----------------------
`to.options([options])`
-----------------------
Update and retrieve options relative to the output. Return the options
as an object if no argument is provided.
Update and retrieve options relative to the output. Return the options
as an object if no argument is provided.
* `delimiter` Set the field delimiter, one character only, defaults to `options.from.delimiter` which is a comma.
* `quote` Defaults to the quote read option.
* `quoted` Boolean, default to false, quote all the fields even if not required.
* `escape` Defaults to the escape read option.
* `columns` List of fields, applied when `transform` returns an object, order matters, see the transform and the columns sections below.
* `header` Display the column names on the first line if the columns option is provided.
* `lineBreaks` String used to delimit record rows or a special value; special values are 'auto', 'unix', 'mac', 'windows', 'unicode'; defaults to 'auto' (discovered in source or 'unix' if no source is specified).
* `flags` Defaults to 'w', 'w' to create or overwrite an file, 'a' to append to a file. Applied when using the `toPath` method.
* `newColumns` If the `columns` option is not specified (which means columns will be taken from the reader options, will automatically append new columns if they are added during `transform()`.
* `end` Prevent calling `end` on the destination, so that destination is no longer writable.
* `eof` Add a linebreak on the last line, default to false, expect a charactere or use '\n' if value is set to "true"
* `delimiter` Set the field delimiter, one character only, defaults to `options.from.delimiter` which is a comma.
* `quote` Defaults to the quote read option.
* `quoted` Boolean, default to false, quote all the fields even if not required.
* `escape` Defaults to the escape read option.
* `columns` List of fields, applied when `transform` returns an object, order matters, see the transform and the columns sections below.
* `header` Display the column names on the first line if the columns option is provided.
* `lineBreaks` String used to delimit record rows or a special value; special values are 'auto', 'unix', 'mac', 'windows', 'unicode'; defaults to 'auto' (discovered in source or 'unix' if no source is specified).
* `flags` Defaults to 'w', 'w' to create or overwrite an file, 'a' to append to a file. Applied when using the `toPath` method.
* `newColumns` If the `columns` option is not specified (which means columns will be taken from the reader options, will automatically append new columns if they are added during `transform()`.
* `end` Prevent calling `end` on the destination, so that destination is no longer writable.
* `eof` Add a linebreak on the last line, default to false, expect a charactere or use '\n' if value is set to "true"
The end options is similar to passing `{end: false}` option in `stream.pipe()`. According to the Node.js documentation:
> By default end() is called on the destination when the source stream emits end, so that destination is no longer writable. Pass { end: false } as options to keep the destination stream open.
The end options is similar to passing `{end: false}` option in `stream.pipe()`. According to the Node.js documentation:
> By default end() is called on the destination when the source stream emits end, so that destination is no longer writable. Pass { end: false } as options to keep the destination stream open.
*/

@@ -140,14 +142,14 @@

`to.string(callback, [options])`
------------------------------
`to.string(callback, [options])`
------------------------------
Provide the output string to a callback.
Provide the output string to a callback.
csv()
.from( '"1","2","3"\n"a","b","c"' )
.to.string( function(data, count){} )
csv()
.from( '"1","2","3"\n"a","b","c"' )
.to.string( function(data, count){} )
Callback is called with 2 arguments:
* data Entire CSV as a string
* count Number of stringified records
Callback is called with 2 arguments:
* data Entire CSV as a string
* count Number of stringified records
*/

@@ -157,2 +159,3 @@

var data, stream;
this.options(options);

@@ -174,7 +177,7 @@ data = '';

`to.stream(stream, [options])`
------------------------------
`to.stream(stream, [options])`
------------------------------
Write to a stream. Take a writable stream as first argument and
optionally an object of options as a second argument.
Write to a stream. Take a writable stream as first argument and
optionally an object of options as a second argument.
*/

@@ -195,9 +198,9 @@

`to.path(path, [options])`
--------------------------
`to.path(path, [options])`
--------------------------
Write to a path. Take a file path as first argument and optionally an object of
options as a second argument. The `close` event is sent after the file is written.
Relying on the `end` event is incorrect because it is sent when parsing is done
but before the file is written.
Write to a path. Take a file path as first argument and optionally an object of
options as a second argument. The `close` event is sent after the file is written.
Relying on the `end` event is incorrect because it is sent when parsing is done
but before the file is written.
*/

@@ -207,2 +210,3 @@

var stream;
this.options(options);

@@ -217,14 +221,14 @@ options = utils.merge({}, csv.options.to);

`to.array(path, [options])`
--------------------------
`to.array(path, [options])`
--------------------------
Provide the output string to a callback.
Provide the output string to a callback.
csv()
.from( '"1","2","3"\n"a","b","c"' )
.to.array( function(data, count){} )
csv()
.from( '"1","2","3"\n"a","b","c"' )
.to.array( function(data, count){} )
Callback is called with 2 arguments:
* data Entire CSV as an array of records
* count Number of stringified records
Callback is called with 2 arguments:
* data Entire CSV as an array of records
* count Number of stringified records
*/

@@ -234,2 +238,3 @@

var records;
this.options(options);

@@ -236,0 +241,0 @@ records = [];

@@ -1,2 +0,2 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
var Transformer, stream;

@@ -104,2 +104,3 @@

var k, label, labels;
labels = this.csv.options.to.columns || this.csv.options.from.columns;

@@ -109,2 +110,3 @@ if (typeof labels === 'object') {

var _results;
_results = [];

@@ -133,5 +135,6 @@ for (k in labels) {

var column, columns, csv, done, finish, i, lineAsObject, run, self, sync, _i, _j, _len, _len1;
self = this;
csv = this.csv;
if (!(this.columns != null)) {
if (this.columns == null) {
columns = csv.options.from.columns;

@@ -188,2 +191,3 @@ if (typeof columns === 'object' && columns !== null && !Array.isArray(columns)) {

var isObject;
self.running--;

@@ -205,2 +209,4 @@ if (err) {

run = function(line) {
var err;
self.running++;

@@ -213,3 +219,4 @@ try {

}
} catch (err) {
} catch (_error) {
err = _error;
return done(err);

@@ -216,0 +223,0 @@ }

@@ -1,6 +0,6 @@

// Generated by CoffeeScript 1.4.0
// Generated by CoffeeScript 1.6.2
module.exports = {
merge: function(obj1, obj2) {
var key, r;
r = obj1 || {};

@@ -7,0 +7,0 @@ for (key in obj2) {

{
"name": "csv",
"version": "0.2.9",
"version": "0.3.0",
"description": "CSV parser with simple api, full of options and tested against large datasets.",

@@ -5,0 +5,0 @@ "homepage": "http://www.adaltas.com/projects/node-csv/",

@@ -41,2 +41,3 @@ [![Build Status](https://secure.travis-ci.org/wdavidw/node-csv-parser.png)](http://travis-ci.org/wdavidw/node-csv-parser)

// node samples/sample.js
var fs = require('fs');
var csv = require('csv');

@@ -43,0 +44,0 @@ csv()

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc