Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

csvtojson

Package Overview
Dependencies
Maintainers
1
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

csvtojson - npm Package Compare versions

Comparing version 0.3.1 to 0.3.2

test/data/columnArray

35

libs/core/csvConverter.js

@@ -11,4 +11,5 @@ module.exports=csvAdv;

var _param={
"constructResult":true,
"delimiter":","
"constructResult":true, //set to false to not construct result in memory. suitable for big csv data
"delimiter":",", // change the delimiter of csv columns
"quote":"\"" //quote for a column containing delimiter.
}

@@ -35,3 +36,29 @@ if (params && typeof params =="object"){

self.on("record",function(rowStr,index,lastLine){
var row=rowStr.split(self.param.delimiter);
var quote=self.param.quote;
var delimiter=self.param.delimiter;
var rowArr=rowStr.split(delimiter);
var row=[];
var inquote=false;
var quoteBuff="";
for (var i=0;i<rowArr.length;i++){
var ele=rowArr[i];
if (inquote){
quoteBuff+=delimiter;
if (ele.indexOf(quote)===ele.length-1){
quoteBuff+=ele.substr(0,ele.length-1);
row.push(quoteBuff);
inquote=false;
quoteBuff="";
}else{
quoteBuff+=ele;
}
}else{
if (ele.indexOf(quote)===0){
inquote=true;
quoteBuff+=ele.substr(1,ele.length-1);
}else{
row.push(ele);
}
}
}
if (index ==0){

@@ -43,3 +70,3 @@ self._headRowProcess(row);

self._rowProcess(row,index,resultRow);
self.emit("record_parsed",resultRow,row,index);
self.emit("record_parsed",resultRow,row,index-1);
if (started===true ){

@@ -46,0 +73,0 @@ self.push(",\n");

2

package.json

@@ -17,3 +17,3 @@ {

],
"version":"0.3.1",
"version":"0.3.2",
"keywords":[

@@ -20,0 +20,0 @@ "csv",

@@ -27,2 +27,6 @@ #CSV2JSON

##version 0.3.2
* Added quote in parameter to support quoted column content containing delimiters
* Changed row index starting from 0 instead of 1 when populated from record_parsed event
##version 0.3

@@ -50,2 +54,3 @@ * Removed all dependencies

* [Process Big CSV File in CLI](#convert-big-csv-file-with-command-line-tool)
* [Column Array](#column-array)

@@ -131,2 +136,3 @@ GitHub: https://github.com/Keyang/node-csvtojson

* delimiter: delimiter used for seperating columns. default: ","
* quote: If a column contains delimiter, it is able to use quote character to surround the column content. e.g. "hello, world" wont be split into two columns while parsing. default: " (double quote)

@@ -208,3 +214,3 @@ #### Parser

* end_parsed: It is emitted when parsing finished. the callback function will contain the JSON object if constructResult is set to true.
* record_parsed: it is emitted each time a row has been parsed. The callback function has following parameters: result row JSON object reference, Original row array object reference, row index
* record_parsed: it is emitted each time a row has been parsed. The callback function has following parameters: result row JSON object reference, Original row array object reference, row index of current row in csv (header row does not count, first row content will start from 0)

@@ -321,1 +327,51 @@ To subscribe the event:

They will do the same job.
#### Column Array
To convert a csv data to column array, you have to construct the result in memory. See example below
```js
var columArrData=__dirname+"/data/columnArray";
var rs=fs.createReadStream(columArrData);
var result = {}
var csvConverter=new CSVAdv();
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed", function(jsonObj) {
console.log(result);
console.log("Finished parsing");
done();
});
//record_parsed will be emitted each time a row has been parsed.
csvConverter.on("record_parsed", function(resultRow, rawRow, rowIndex) {
for (var key in resultRow) {
if (!result[key] || !result[key] instanceof Array) {
result[key] = [];
}
result[key][rowIndex] = resultRow[key];
}
});
rs.pipe(csvConverter);
```
Here is an example:
TIMESTAMP,UPDATE,UID,BYTES SENT,BYTES RCVED
1395426422,n,10028,1213,5461
1395426422,n,10013,9954,13560
1395426422,n,10109,221391500,141836
1395426422,n,10007,53448,308549
1395426422,n,10022,15506,72125
It will be converted to:
```json
{
"TIMESTAMP": ["1395426422", "1395426422", "1395426422", "1395426422", "1395426422"],
"UPDATE": ["n", "n", "n", "n", "n"],
"UID": ["10028", "10013", "10109", "10007", "10022"],
"BYTES SENT": ["1213", "9954", "221391500", "53448", "15506"],
"BYTES RCVED": ["5461", "13560", "141836", "308549", "72125"]
}
```

@@ -1,18 +0,18 @@

var CSVAdv=require("../libs/core/csvConverter.js");
var assert=require("assert");
var fs=require("fs");
var file=__dirname+"/data/testData";
var trailCommaData=__dirname+"/data/trailingComma";
describe("CSV Converter",function(){
it ("should create new instance of csv",function(){
var obj=new CSVAdv();
var CSVAdv = require("../libs/core/csvConverter.js");
var assert = require("assert");
var fs = require("fs");
var file = __dirname + "/data/testData";
var trailCommaData = __dirname + "/data/trailingComma";
describe("CSV Converter", function() {
it("should create new instance of csv", function() {
var obj = new CSVAdv();
assert(obj);
});
it ("should read from a stream",function(done){
var obj=new CSVAdv();
var stream=fs.createReadStream(file);
obj.on("end_parsed",function(obj){
console.log(obj);
assert(obj.length===2);
it("should read from a stream", function(done) {
var obj = new CSVAdv();
var stream = fs.createReadStream(file);
obj.on("end_parsed", function(obj) {
// console.log(obj);
assert(obj.length === 2);
done();

@@ -23,10 +23,10 @@ });

it ("should emit record_parsed message once a row is parsed.",function(done){
var obj=new CSVAdv();
var stream=fs.createReadStream(file);
obj.on("record_parsed",function(resultRow,row,index){
it("should emit record_parsed message once a row is parsed.", function(done) {
var obj = new CSVAdv();
var stream = fs.createReadStream(file);
obj.on("record_parsed", function(resultRow, row, index) {
assert(resultRow);
//console.log(resultRow);
});
obj.on("end",function(){
obj.on("end", function() {
done();

@@ -37,8 +37,8 @@ });

it ("should emit end_parsed message once it is finished.",function(done){
var obj=new CSVAdv();
var stream=fs.createReadStream(file);
obj.on("end_parsed",function(result){
it("should emit end_parsed message once it is finished.", function(done) {
var obj = new CSVAdv();
var stream = fs.createReadStream(file);
obj.on("end_parsed", function(result) {
assert(result);
assert(result.length==2);
assert(result.length == 2);
assert(result[0].date);

@@ -49,4 +49,4 @@ assert(result[0].employee);

assert(result[0].employee.number);
assert(result[0].employee.key.length===2);
assert(result[0].address.length===2);
assert(result[0].employee.key.length === 2);
assert(result[0].address.length === 2);
//console.log(JSON.stringify(result));

@@ -58,8 +58,8 @@ done();

it ("should handle traling comma gracefully",function(done){
var stream=fs.createReadStream(trailCommaData);
var obj=new CSVAdv();
obj.on("end_parsed",function(result){
it("should handle traling comma gracefully", function(done) {
var stream = fs.createReadStream(trailCommaData);
var obj = new CSVAdv();
obj.on("end_parsed", function(result) {
assert(result);
assert(result.length>0);
assert(result.length > 0);
//console.log(JSON.stringify(result));

@@ -70,2 +70,42 @@ done();

});
it("should handle comma in column which is surrounded by qoutes", function(done) {
var testData = __dirname + "/data/dataWithComma";
var rs = fs.createReadStream(testData);
var obj = new CSVAdv({
"quote": "#"
});
obj.on("end_parsed", function(result) {
assert(result[0].col1 == "Mini. Sectt,hisar S.O");
assert(result[3].col2 == "125001,fenvkdsf");
// console.log(result);
done();
});
rs.pipe(obj);
});
it("should be able to convert a csv to column array data", function(done) {
var columArrData=__dirname+"/data/columnArray";
var rs=fs.createReadStream(columArrData);
var result = {}
var csvConverter=new CSVAdv();
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed", function(jsonObj) {
assert(result.TIMESTAMP.length===5);
done();
});
//record_parsed will be emitted each time a row has been parsed.
csvConverter.on("record_parsed", function(resultRow, rawRow, rowIndex) {
for (var key in resultRow) {
if (!result[key] || !result[key] instanceof Array) {
result[key] = [];
}
result[key][rowIndex] = resultRow[key];
}
});
rs.pipe(csvConverter);
});
});
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc