Fast-csv
This is a library that provides CSV parsing and formatting.
NOTE As of v0.2.0 fast-csv
supports multi-line values.
Installation
npm install fast-csv
Usage
Parsing
All methods accept the following options
headers=false
: Ste to true if you expect the first line of your CSV
to contain headers, alternatly you can specify an array of headers to use.ignoreEmpty=false
: If you wish to ignore empty rows.delimiter=','
: If your data uses an alternate delimiter such as ;
or \t
.
- NOTE When specifying an alternate
delimiter
you may only pass in a single character delimeter
quote='"'
: The character to use to escape values that contain a delimeter.escape='"'
: The character to use when escaping a value that is quoted
and contains a quote
character.
i.e
: 'First,"Name"' => '"First,""name"""'
- The following are options for parsing only.
trim=false
: If you want to trim all values parsed set to true.rtrim=false
: If you want to right trim all values parsed set to true.ltrim=false
: If you want to left trim all values parsed set to true.
events
parse-error
: Emitted if there was an error parsing a row.
record
: Emitted when a record is parsed.
data-invalid
: Emitted if there was invalid row encounted, only emitted if the validate
function is used.
data
: Emitted with the stringified
version of a record.
([options])
If you use fast-csv
as a function it returns a transform stream that can be piped into.
var stream = fs.createReadStream("my.csv");
var csvStream = csv()
.on("record", function(data){
console.log(data):
})
.on("end", function(){
console.log("done");
});
stream.pipe(csvStream);
`.fromPath(path[, options])
This method parses a file from the specified path.
var csv = require("fast-csv");
csv
.fromPath("my.csv")
.on("record", function(data){
console.log(data):
})
.on("end", function(){
console.log("done");
});
`.fromString(string[, options])
This method parses a string
var csv = require("fast-csv");
var CSV_STRING = 'a,b\n' +
'a1,b1\n' +
'a2,b2\n';
csv
.fromPath(CSV_STRING, {headers: true})
.on("record", function(data){
console.log(data):
})
.on("end", function(){
console.log("done");
});
`.fromStream(stream[, options])
This accepted a readable stream to parse data from.
var stream = fs.createReadStream("my.csv");
csv()
.fromStream(stream)
.on("record", function(data){
console.log(data):
})
.on("end", function(){
console.log("done");
});
If you expect the first line your csv to headers you may pass a headers option in. Setting the headers option will
cause change each row to an object rather than an array.
var stream = fs.createReadStream("my.csv");
csv()
.fromStream(stream, {headers : true})
.on("record", function(data){
console.log(data):
})
.on("end", function(){
console.log("done");
});
You may alternatively pass an array of header names which must match the order of each column in the csv, otherwise
the data columns will not match.
var stream = fs.createReadStream("my.csv");
csv
.fromStream(stream, {headers : ["firstName", "lastName", "address"]})
.on("record", function(data){
console.log(data):
})
.on("end", function(){
console.log("done");
});
If your data may include empty rows, the sort Excel might include at the end of the file for instance, you can ignore
these by including the ignoreEmpty
option.
Any rows consisting of nothing but empty strings and/or commas will be skipped, without emitting a 'data' or 'error' event.
var stream = fs.createReadStream("my.csv");
csv
.fromStream(stream, {ignoreEmpty: true})
.on("record", function(data){
console.log(data):
})
.on("end", function(){
console.log("done");
});
Validating
You can validate each row in the csv by providing a validate handler. If a row is invalid then a data-invalid
event
will be emitted with the row and the index.
var stream = fs.createReadStream("my.csv");
csv(
.fromStream(stream, {headers : true})
.validate(function(data){
return data.age < 50;
})
.on("data-invalid", function(data){
})
.on("record", function(data){
console.log(data):
})
.on("end", function(){
console.log("done");
});
Transforming
You can transform data by providing in a transform function. What is returned from the transform function will
be provided to validate and emitted as a row.
var stream = fs.createReadStream("my.csv");
csv
.fromStream(stream)
.transform(function(data){
return data.reverse();
})
.on("record", function(data){
console.log(data):
})
.on("end", function(){
console.log("done");
});
Formatting
fast-csv
also allows to you to create create a CSV
from data.
Formatting accepts the same options as parsing.
createWriteStream(options)
This is the lowest level of the write methods, it creates a stream that can be used to create a csv of unknown size and pipe to an output csv.
var csvStream = csv.createWriteStream({headers: true}),
writableStream = fs.createWritableStream("my.csv");
writableStream.on("finish", function(){
console.log("DONE!");
});
csvSream.pipe(writableStream);
csvStream.write({a: "a0", b: "b0"});
csvStream.write({a: "a1", b: "b1"});
csvStream.write({a: "a2", b: "b2"});
csvStream.write({a: "a3", b: "b4"});
csvStream.write({a: "a3", b: "b4"});
csvStream.write(null);
Writing Data
Each of the following methods accept an array of values to be written, however each value must be an array
of array
s or object
s.
write(arr[, options])
Create a readable stream to read data from.
var ws = fs.createWritableStream("my.csv");
csv
.write([
["a", "b"],
["a1", "b1"],
["a2", "b2"]
], {headers: true})
.pipe(ws);
var ws = fs.createWritableStream("my.csv");
csv
.write([
{a: "a1", b: "b1"},
{a: "a2", b: "b2"}
], {headers: true})
.pipe(ws);
writeToStream(stream,arr[, options])
Write an array of values to a WritableStream
csv
.writeToStream(fs.createWritableStream("my.csv"), [
["a", "b"],
["a1", "b1"],
["a2", "b2"]
], {headers: true});
csv
.writeToStream(fs.createWritableStream("my.csv"), [
{a: "a1", b: "b1"},
{a: "a2", b: "b2"}
], {headers: true})
.pipe(ws);
writeToPath(arr[, options])
Write an array of values to the specified path
csv
.writeToPath("my.csv", [
["a", "b"],
["a1", "b1"],
["a2", "b2"]
], {headers: true})
.on("finish", function(){
console.log("done!");
});
csv
.writeToStream("my.csv", [
{a: "a1", b: "b1"},
{a: "a2", b: "b2"}
], {headers: true})
.on("finish", function(){
console.log("done!");
});
writeToString(arr[, options])
csv.writeToString([
["a", "b"],
["a1", "b1"],
["a2", "b2"]
], {headers: true});
csv.writeToString([
{a: "a1", b: "b1"},
{a: "a2", b: "b2"}
], {headers: true});
Benchmarks
Parsing 20000 records AVG over 3 runs
fast-csv: 198.67ms
csv: 525.33ms
Parsing 50000 records AVG over 3 runs
fast-csv: 441.33ms
csv: 1291ms
Parsing 100000 records AVG over 3 runs
fast-csv: 866ms
csv: 2773.33ms
Parsing 1000000 records AVG over 3 runs
fast-csv: 8562.67ms
csv: 30030.67ms
License
MIT https://github.com/C2FO/fast-csv/raw/master/LICENSE
##Meta
##Namespaces
##Classes