Comparing version 0.1.1 to 0.1.2
@@ -1,1 +0,1 @@ | ||
module.exports=require("./libs"); | ||
module.exports=require("./libs/csv2json.js"); |
@@ -9,3 +9,6 @@ module.exports=csvAdv; | ||
//it is a bridge from csv component to our parsers | ||
function csvAdv(){ | ||
function csvAdv(constructResult){ | ||
if (typeof constructResult !== false){ | ||
constructResult=true; | ||
} | ||
var instance= csv.apply(this); | ||
@@ -23,3 +26,5 @@ | ||
that._rowProcess(row,index,resultRow); | ||
that.resultObject.csvRows.push(resultRow); | ||
if (constructResult){ | ||
that.resultObject.csvRows.push(resultRow); | ||
} | ||
instance.emit("record_parsed",resultRow,row,index); | ||
@@ -26,0 +31,0 @@ } |
@@ -22,6 +22,18 @@ /** | ||
var csvConverter=new Converter(); | ||
csvConverter.on("end_parsed",function(json){ | ||
process.stdout.write(JSON.stringify(json)); | ||
process.exit(0); | ||
var started=false; | ||
var writeStream=process.stdout; | ||
csvConverter.on("record_parsed",function(rowJSON){ | ||
if (started){ | ||
writeStream.write(",\n"); | ||
} | ||
writeStream.write(JSON.stringify(rowJSON)); //write parsed JSON object one by one. | ||
if (started==false){ | ||
started=true; | ||
} | ||
}); | ||
writeStream.write("[\n"); //write array symbol | ||
csvConverter.on("end_parsed",function(){ | ||
writeStream.write("\n]"); //end array symbol | ||
}); | ||
csvConverter.on("error",function(err){ | ||
@@ -28,0 +40,0 @@ console.error(err); |
@@ -11,3 +11,3 @@ { | ||
], | ||
"version":"0.1.1", | ||
"version":"0.1.2", | ||
"dependencies":{ | ||
@@ -14,0 +14,0 @@ "express":"3.2.6", |
@@ -229,1 +229,32 @@ #CSV2JSON | ||
#### Big CSV File | ||
csvtojson library was designed to accept big csv file converting. To avoid memory consumption, it is recommending to use read stream and write stream. | ||
var Converter=require("csvtojson").core.Converter; | ||
var csvConverter=new Converter(false); // The parameter false will turn off final result construction. It can avoid huge memory consumption while parsing. The trade off is final result will not be populated to end_parsed event. | ||
var readStream=require("fs").createReadStream("inputData.csv"); | ||
var writeStream=require("fs").createWriteStream("outpuData.json"); | ||
var started=false; | ||
csvConverter.on("record_parsed",function(rowJSON){ | ||
if (started){ | ||
writeStream.write(",\n"); | ||
} | ||
writeStream.write(JSON.stringify(rowJSON)); //write parsed JSON object one by one. | ||
if (started==false){ | ||
started=true; | ||
} | ||
}); | ||
writeStream.write("[\n"); //write array symbol | ||
csvConverter.on("end_parsed",function(){ | ||
writeStream.write("\n]"); //end array symbol | ||
}); | ||
csvConverter.from(readStream); | ||
The Converter constructor was passed in a "false" parameter which will tell the constructor not to combine the final result which would take simlar memory as the file size. The output is constructed line by line through writable stream object. |
24123
387
259