Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

csvtojson

Package Overview
Dependencies
Maintainers
1
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

csvtojson - npm Package Compare versions

Comparing version 0.3.21 to 0.4.0

bin/csvtojson.bat

4

bin/options.json

@@ -27,2 +27,6 @@ {

"type": "boolean"
},
"--workerNum": {
"desc": "Number of worker processes. The worker process will use multi-cores to help process CSV data. Set to number of Core to improve the performance of processing large csv file. Keep 1 for small csv files. Default 1.",
"type": "number"
}

@@ -29,0 +33,0 @@ },

2

index.js

@@ -1,1 +0,1 @@

module.exports=require("./libs/csv2json.js");
module.exports = require("./libs/csv2json.js");

@@ -1,3 +0,3 @@

module.exports.Converter=require("./csvConverter.js");
module.exports.Parser=require("./parser.js");
module.exports.parserMgr=require("./parserMgr.js");
module.exports.Converter = require("./Converter.js");
module.exports.Parser = require("./parser.js");
module.exports.parserMgr = require("./parserMgr.js");

@@ -1,46 +0,38 @@

module.exports=Parser;
function Parser(name,regExp,parser){
this.name=typeof name == "undefined"?"Default":name;
this.regExp=null;
this.type="";
if (typeof regExp !="undefined"){
if (typeof regExp =="string"){
this.regExp=new RegExp(regExp);
}else{
this.regExp=regExp;
}
function Parser(name, regExp, parser, processSafe) {
this.name = typeof name === "undefined" ? "Default" : name;
this.regExp = null;
this.type = "";
this.processSafe = processSafe;
if (typeof regExp !== "undefined") {
if (typeof regExp === "string") {
this.regExp = new RegExp(regExp);
} else {
this.regExp = regExp;
}
if (typeof parser!="undefined"){
this.parse=parser;
}
}
if (typeof parser !== "undefined") {
this.parse = parser;
}
}
Parser.prototype.test = function(str) {
return this.regExp && this.regExp.test(str);
};
Parser.prototype.test=function(str){
if (this.regExp==null){
return true;
}else{
return this.regExp.test(str);
}
}
// Parser.prototype.newProcess=function(mixedColumnTitle){
// var title=this.getTitle(mixedColumnTitle);
// return {
// "title"
// }
// }
// Parser.prototype.getTitle=function(mixedTitle){
// return mixedTitle.replace(this.regExp,"");
// }
Parser.prototype.parse=function(params){
params.resultRow[params.head]=params.item;
}
Parser.prototype.getHead=function(){
Parser.prototype.parse = function(params) {
params.resultRow[params.head] = params.item;
};
Parser.prototype.getHead = function() {
return this.head;
}
Parser.prototype.clone=function(){
return new Parser(this.name,this.regExp,this.parse);
}
Parser.prototype.getName=function(){
};
Parser.prototype.clone = function() {
var obj=Object.create(this);
var newParser=new Parser();
for (var key in obj){
newParser[key]=obj[key];
}
return newParser;
//return new Parser(this.name, this.regExp, this.parse, this.processSafe);
};
Parser.prototype.getName = function() {
return this.name;
}
};
module.exports = Parser;

@@ -1,83 +0,77 @@

//module interfaces
module.exports.addParser = addParser;
module.exports.initParsers = initParsers;
module.exports.getParser = getParser;
//implementation
var registeredParsers = [];
var Parser = require("./parser.js");
var _ = require('underscore');
var defaultParser = require("./defaultParsers");
function registerParser(parser) {
if (parser instanceof Parser) {
if (registeredParsers.indexOf(parser) == -1) {
registeredParsers.push(parser);
}
function registerParser (parser) {
if (parser instanceof Parser && registeredParsers.indexOf(parser) === -1) {
registeredParsers.push(parser); // TODO indexOf doesn't work with object references
}
}
function addParser(name, regExp, parseFunc) {
var parser = new Parser(name, regExp, parseFunc);
registerParser(parser);
}
function initParsers(row, checkType) {
var parsers = [];
for (var i = 0; i < row.length; i++) {
var columnTitle = row[i];
parsers.push(getParser(columnTitle, checkType));
function splitTitle (columnTitle){
var splitArr = columnTitle.split("#");
var rtn;
if (splitArr.length === 1){
splitArr.unshift("");
return splitArr;
} else if (splitArr.length > 2) {
rtn = [];
rtn.push(splitArr.shift());
rtn.push(splitArr.join("#"));
return rtn;
}
return parsers;
return splitArr;
}
function getParser(columnTitle, checkType) {
if (!columnTitle){
columnTitle=""
}
var type="";
if (checkType){
var split=splitTitle(columnTitle);
type=split[0];
columnTitle=split[1];
}
for (var i = 0; i < registeredParsers.length; i++) {
var parser = registeredParsers[i];
if (parser.test(columnTitle)) {
function getParser (columnTitle, checkType) {
var inst, parser;
var type = "";
function getParserByName (parserName, columnTitle) {
var parser = _.find(registeredParsers, function (parser){
return parser.getName() === parserName;
});
if (parser) {
var inst = parser.clone();
inst.head = columnTitle;
inst.type=type;
return inst;
}
return new Parser(); //TODO remove new
}
var inst= getParserByName("json", columnTitle);
inst.type=type;
return inst;
}
function splitTitle(columnTitle){
var splitArr=columnTitle.split("#");
if (splitArr.length ===1){
splitArr.unshift("")
return splitArr
columnTitle = columnTitle ? columnTitle : '';
if (checkType){
var split = splitTitle(columnTitle);
type = split[0];
columnTitle = split[1];
}
if (splitArr.length>2){
var rtn=[];
rtn.push(splitArr.shift());
rtn.push(splitArr.join("#"));
return rtn
parser = _.find(registeredParsers, function (parser) {
return parser.test(columnTitle);
});
if (parser) {
inst = parser.clone();
inst.head = columnTitle;
} else {
inst = getParserByName("json", columnTitle);
}
return splitArr;
inst.type = type;
return inst;
}
function addParser (name, regExp, parseFunc,processSafe) {
var parser = new Parser(name, regExp, parseFunc,processSafe); //TODO remove new
registerParser(parser);
}
function getParserByName(parserName, columnTitle) {
for (var i = 0; i < registeredParsers.length; i++) {
var parser = registeredParsers[i];
if (parser.getName() == parserName) {
var inst = parser.clone();
inst.head = columnTitle;
return inst;
}
}
return new Parser();
function initParsers (row, checkType) {
var parsers = [];
row.forEach(function (columnTitle) {
parsers.push(getParser(columnTitle, checkType));
});
return parsers;
}
defaultParser.forEach(function (parserCfg){
addParser(parserCfg.name, parserCfg.regExp, parserCfg.parserFunc,parserCfg.processSafe);
});
require("./defaultParsers.js");
//module interfaces
module.exports.addParser = addParser;
module.exports.initParsers = initParsers;
module.exports.getParser = getParser;

@@ -1,2 +0,1 @@

module.exports = Result;
var Writable = require("stream").Writable;

@@ -8,4 +7,4 @@ var util = require("util");

this.parser = csvParser;
this.param=csvParser.param;
this.buffer =this.param.toArrayString?"":"["+csvParser.getEol();
this.param = csvParser.param;
this.buffer = this.param.toArrayString?"":"["+csvParser.getEol();
this.started = false;

@@ -21,3 +20,3 @@ var self = this;

Result.prototype._write = function(data, encoding, cb) {
if (encoding == "buffer") {
if (encoding === "buffer") {
encoding = "utf8";

@@ -36,7 +35,7 @@ }

cb();
}
};
Result.prototype.getBuffer = function() {
return JSON.parse(this.buffer);
}
};

@@ -46,3 +45,5 @@ Result.prototype.disableConstruct = function() {

cb(); //do nothing just dropit
}
}
};
};
module.exports = Result;

@@ -0,2 +1,6 @@

//deprecated but leave it for backword compatibility
module.exports.core=require("./core");
module.exports.interfaces=require("./interfaces");
//live apis
module.exports=require("./core");
module.exports.interfaces = require("./interfaces");

@@ -1,1 +0,1 @@

module.exports=require("./main.js");
module.exports = require("./main.js");

@@ -5,22 +5,9 @@ /**

//module interfaces
module.exports.convertFile=convertFile;
module.exports.convertString=convertString;
//implementation
var Converter=require("../../core").Converter;
function convertFile(fileName){
var csvConverter=_initConverter();
csvConverter.from(fileName);
}
function convertString(csvString){
var csvConverter=_initConverter();
csvConverter.from(csvString);
}
var Converter = require("../../core/Converter.js");
function _initConverter(){
var csvConverter=new Converter();
var started=false;
var writeStream=process.stdout;
csvConverter.on("record_parsed",function(rowJSON){
var csvConverter = new Converter();
var started = false;
var writeStream = process.stdout;
csvConverter.on("record_parsed",function(rowJSON){
if (started){

@@ -30,4 +17,4 @@ writeStream.write(",\n");

writeStream.write(JSON.stringify(rowJSON)); //write parsed JSON object one by one.
if (started==false){
started=true;
if (started === false){
started = true;
}

@@ -45,2 +32,14 @@ });

return csvConverter;
}
}
function convertFile(fileName){
var csvConverter=_initConverter();
csvConverter.from(fileName);
}
function convertString(csvString){
var csvConverter=_initConverter();
csvConverter.from(csvString);
}
//module interfaces
module.exports.convertFile = convertFile;
module.exports.convertString = convertString;

@@ -1,1 +0,1 @@

module.exports=require("./webServer.js");
module.exports = require("./webServer.js");

@@ -1,55 +0,23 @@

//module interfaces
module.exports.startWebServer=startWebServer;
module.exports.applyWebServer=applyWebServer;
//implementation
// var express=require("express");
// var expressApp=express();
var http=require("http");
var CSVConverter=require("../../core").Converter;
var defaultArgs={
"port":"8801",
"urlpath":"/parseCSV"
}
var server=null;
function applyWebServer(app,url){
console.error("applyWebServer is deprecated. Use core you create your own handler.");
}
function startWebServer(args){
if (typeof args=="undefined"){
args={};
}
var serverArgs={};
for (var key in defaultArgs){
if (args[key]){
serverArgs[key]=args[key];
}else{
serverArgs[key]=defaultArgs[key];
var http = require("http");
var Converter = require("../../core/Converter.js");
function startWebServer (args) {
args = args || {};
var serverArgs = {
port: args.port || '8801',
urlpath: args.urlpath || '/parseCSV'
};
var server = http.createServer();
server.on("request", function(req, res){
if (req.url === serverArgs.urlpath && req.method === "POST"){
req.pipe(new Converter({constructResult:false})).pipe(res);
} else {
res.end("Please post data to: " + serverArgs.urlpath);
}
}
server=http.createServer();
server.on("request",function(req,res){
if (req.url==serverArgs.urlpath && req.method =="POST"){
_POSTData(req,res);
}else{
res.end("Please post data to: "+serverArgs.urlpath);
}
});
server.listen(serverArgs.port);
//expressApp.use(express.bodyParser());
// expressApp.post(serverArgs.urlpath,_POSTData);
// expressApp.get("/",function(req,res){
// res.end("POST to "+serverArgs.urlpath+" with CSV data to get parsed.");
// });
// expressApp.listen(serverArgs.port);
console.log("CSV Web Server Listen On:"+serverArgs.port);
console.log("POST to "+serverArgs.urlpath+" with CSV data to get parsed.");
console.log("CSV Web Server Listen On:" + serverArgs.port);
console.log("POST to " + serverArgs.urlpath + " with CSV data to get parsed.");
return server;
}
function _POSTData(req,res){
var converter=new CSVConverter({constructResult:false});
req.pipe(converter).pipe(res);
}
module.exports.startWebServer = startWebServer;

@@ -11,7 +11,13 @@ {

},
"contributors": [{
"name": "Keyang Xiang",
"email": "keyang.xiang@gmail.com"
}],
"version": "0.3.21",
"contributors": [
{
"name": "Keyang Xiang",
"email": "keyang.xiang@gmail.com"
},
{
"name": "Tom Dodson",
"email": "t3dodson@gmail.com"
}
],
"version": "0.4.0",
"keywords": [

@@ -28,9 +34,27 @@ "csv",

},
"license": [{
"type": "MIT",
"url": "https://github.com/Keyang/node-csvtojson/blob/master/LICENSE"
}],
"engines":{
"node":">=0.10"
"license": [
{
"type": "MIT",
"url": "https://github.com/Keyang/node-csvtojson/blob/master/LICENSE"
}
],
"engines": {
"node": ">=0.10"
},
"devDependencies": {
"grunt": "^0.4.5",
"grunt-contrib-jshint": "^0.11.2",
"grunt-contrib-uglify": "^0.9.1",
"grunt-contrib-watch": "^0.6.1",
"grunt-git": "^0.3.5",
"grunt-madge": "0.0.6",
"grunt-mocha-test": "^0.12.7",
"grunt-newer": "^1.1.0",
"imgur": "^0.1.5",
"mocha": "^2.2.5"
},
"dependencies": {
"async": "^1.2.1",
"underscore": "^1.8.3"
}
}

@@ -1,3 +0,11 @@

#CSV2JSON
All you need nodejs csv to json converter. Support big json data, CLI, web server, powerful nested JSON, customised parser, stream, pipe, and more!
#CSVTOJSON
All you need nodejs csv to json converter.
* Large CSV data
* Command Line Tool and Node.JS Lib
* Complex/nested JSON
* Easy Customised Parser
* Stream based
* multi CPU core support
* Easy Usage
* more!

@@ -8,17 +16,13 @@ #IMPORTANT!!

//Converter Class
var Converter=require("csvtojson").core.Converter;
var fs=require("fs");
var csvFileName="./myCSVFile";
var fileStream=fs.createReadStream(csvFileName);
var fs = require("fs");
var Converter = require("csvtojson").Converter;
var fileStream = fs.createReadStream("./file.csv");
//new converter instance
var csvConverter=new Converter({constructResult:true});
var converter = new Converter({constructResult:true});
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed",function(jsonObj){
converter.on("end_parsed", function (jsonObj) {
console.log(jsonObj); //here is your result json object
});
//read from file
fileStream.pipe(csvConverter);
fileStream.pipe(converter);
```

@@ -33,3 +37,3 @@

```js
csvConverter.fromString(csvString,callback);
csvConverter.fromString(csvString, callback);
```

@@ -61,2 +65,3 @@

* [Field Type](#field-type)
* [Multi-Core / Fork Process](#multi-cpu-(core))
* [Change Log](#change-log)

@@ -73,2 +78,3 @@

* Powerful library for you nodejs applications processing csv data.
* Multi cpu core support
* Extremly straight forward

@@ -118,4 +124,3 @@ * Multiple input support: CSV File, Readable Stream, CSV String etc.

```js
var server=require("csvtojson").interfaces.web;
var server = require("csvtojson").interfaces.web;
server.startWebServer({

@@ -144,13 +149,11 @@ "port":8801

//Converter Class
var Converter=require("csvtojson").core.Converter;
var fs=require("fs");
var csvFileName="./myCSVFile";
var fileStream=fs.createReadStream(csvFileName);
var fs = require("fs");
var Converter = require("csvtojson").Converter;
var fileStream = fs.createReadStream("./file.csv");
//new converter instance
var param={};
var csvConverter=new Converter(param);
var converter = new Converter(param);
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed",function(jsonObj){
converter.on("end_parsed", function (jsonObj) {
console.log(jsonObj); //here is your result json object

@@ -160,3 +163,3 @@ });

//read from file
fileStream.pipe(csvConverter);
fileStream.pipe(converter);
```

@@ -173,2 +176,4 @@ # Params

* ignoreEmpty: Ignore the empty value in CSV columns. If a column value is not giving, set this to true to skip them. Defalut: false.
* workerNum: Number of worker processes. The worker process will use multi-cores to help process CSV data. Set to number of Core to improve the performance of processing large csv file. Keep 1 for small csv files. Default 1.
* fork: Use another CPU core to convert the CSV stream

@@ -183,3 +188,3 @@ # Parser

//Parser Manager
var parserMgr=require("csvtojson").core.parserMgr;
var parserMgr=require("csvtojson").parserMgr;

@@ -257,3 +262,3 @@ parserMgr.addParser("myParserName",/^\*parserRegExp\*/,function (params){

//Converter Class
var Converter=require("csvtojson").core.Converter;
var Converter=require("csvtojson").Converter;

@@ -337,3 +342,3 @@ //end_parsed will be emitted once parsing finished

```js
var Converter=require("csvtojson").core.Converter;
var Converter=require("csvtojson").Converter;
var csvConverter=new Converter({constructResult:false}); // The parameter false will turn off final result construction. It can avoid huge memory consumption while parsing. The trade off is final result will not be populated to end_parsed event.

@@ -528,2 +533,3 @@

Will be converted into:
```json

@@ -541,2 +547,3 @@ {

If checkType is turned **OFF**, it will be converted to:
```json

@@ -558,11 +565,13 @@ {

* number
* date
* date (Not supported since 0.3.21)
### Define Type
To define the field type, see following example
```csv
string#appNumber, string#finished, date#startDate
string#appNumber, string#finished, startDate
201401010002, true, 2014-01-01
```
The data will be converted to:
```json

@@ -572,24 +581,45 @@ {

"finished":"true",
"startDate":Wed Jan 01 2014 00:00:00 GMT+0000 (GMT)
"startDate":"2014-01-01"
}
```
### Invalid Value
If parser meets invalid value for a type while parsing a value, it will fallback to use string value.
For example:
```csv
number#order, date#shipDate
A00001, Unknown
## Multi-CPU (Core)
Since version 0.4.0, csvtojson supports multiple CPU cores to process large csv files.
The implementation and benchmark result can be found [here](http://keyangxiang.com/2015/06/11/node-js-multi-core-programming-pracitse/).
To enable multi-core, just pass the worker number as parameter of constructor:
```js
var Converter=require("csvtojson").Converter;
var converter=new Converter({
workerNum:2 //use two cores
});
```
The minimum worker number is 1. When worker number is larger than 1, the parser will balance the job load among workers.
It will be converted to:
```json
{
"order":"A00001",
"shipDate":"Unknown"
}
For command line, to use worker just use ```--workerNum``` argument:
```
csvtojson --workerNum=4 ./myfile.csv
```
It is worth to mention that for small size of CSV file it actually costs more time to create processes and keep the communication between them. Therefore, use less workers for small CSV files.
### Fork Process
Node.JS is running on single thread. You will not want to convert a large csv file on the same process where your node.js webserver is running. csvtojson gives an option to fork the whole conversion process to a new system process while the origin process will only pipe the input and result in and out. It very simple to enable this feature:
```js
var Converter=require("csvtojson").Converter;
var converter=new Converter({
fork:true //use child process to convert
});
```
Same as multi-workers, fork a new process will cause extra cost on process communication and life cycle management. Use it wisely.
#Change Log
##0.4.0
* Added Multi-core CPU support to increase performance
* Added "fork" option to delegate csv converting work to another process.
* Refactoring general flow
##0.3.21

@@ -626,1 +656,2 @@ * Refactored Command Line Tool.

* Converter Class now works as a proper stream object

@@ -1,2 +0,2 @@

var CSVAdv = require("../libs/core/csvConverter.js");
var Converter = require("../libs/core/Converter.js");
var assert = require("assert");

@@ -6,13 +6,12 @@ var fs = require("fs");

var trailCommaData = __dirname + "/data/trailingComma";
describe("CSV Converter", function() {
it("should create new instance of csv", function() {
var obj = new CSVAdv();
describe("CSV Converter", function () {
it("should create new instance of csv", function () {
var obj = new Converter();
assert(obj);
});
it("should read from a stream", function(done) {
var obj = new CSVAdv();
it("should read from a stream", function (done) {
var obj = new Converter();
var stream = fs.createReadStream(file);
obj.on("end_parsed", function(obj) {
// console.log(obj);
obj.on("end_parsed", function (obj) {
assert(obj.length === 2);

@@ -24,10 +23,9 @@ done();

it("should emit record_parsed message once a row is parsed.", function(done) {
var obj = new CSVAdv();
it("should emit record_parsed message once a row is parsed.", function (done) {
var obj = new Converter();
var stream = fs.createReadStream(file);
obj.on("record_parsed", function(resultRow, row, index) {
obj.on("record_parsed", function (resultRow) {
assert(resultRow);
//console.log(resultRow);
});
obj.on("end", function() {
obj.on("end", function () {
done();

@@ -38,8 +36,7 @@ });

it("should emit end_parsed message once it is finished.", function(done) {
var obj = new CSVAdv();
var stream = fs.createReadStream(file);
obj.on("end_parsed", function(result) {
it("should emit end_parsed message once it is finished.", function (done) {
var obj = new Converter();
obj.on("end_parsed", function (result) {
assert(result);
assert(result.length == 2);
assert(result.length === 2);
assert(result[0].date);

@@ -52,15 +49,13 @@ assert(result[0].employee);

assert(result[0].address.length === 2);
//console.log(JSON.stringify(result));
done();
});
stream.pipe(obj);
fs.createReadStream(file).pipe(obj);
});
it("should handle traling comma gracefully", function(done) {
it("should handle traling comma gracefully", function (done) {
var stream = fs.createReadStream(trailCommaData);
var obj = new CSVAdv();
obj.on("end_parsed", function(result) {
var obj = new Converter();
obj.on("end_parsed", function (result) {
assert(result);
assert(result.length > 0);
//console.log(JSON.stringify(result));
done();

@@ -70,11 +65,11 @@ });

});
it("should handle comma in column which is surrounded by qoutes", function(done) {
it("should handle comma in column which is surrounded by qoutes", function (done) {
var testData = __dirname + "/data/dataWithComma";
var rs = fs.createReadStream(testData);
var obj = new CSVAdv({
var obj = new Converter({
"quote": "#"
});
obj.on("end_parsed", function(result) {
assert(result[0].col1 == "\"Mini. Sectt");
assert(result[3].col2 == "125001,fenvkdsf");
obj.on("end_parsed", function (result) {
assert(result[0].col1 === "\"Mini. Sectt");
assert(result[3].col2 === "125001,fenvkdsf");
// console.log(result);

@@ -86,11 +81,10 @@ done();

it("should be able to convert a csv to column array data", function(done) {
it("should be able to convert a csv to column array data", function (done) {
var columArrData = __dirname + "/data/columnArray";
var rs = fs.createReadStream(columArrData);
var result = {}
var csvConverter = new CSVAdv();
var result = {};
var csvConverter = new Converter();
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed", function(jsonObj) {
csvConverter.on("end_parsed", function () {
assert(result.TIMESTAMP.length === 5);
done();

@@ -100,24 +94,23 @@ });

//record_parsed will be emitted each time a row has been parsed.
csvConverter.on("record_parsed", function(resultRow, rawRow, rowIndex) {
csvConverter.on("record_parsed", function (resultRow, rawRow, rowIndex) {
for (var key in resultRow) {
if (!result[key] || !result[key] instanceof Array) {
result[key] = [];
if (resultRow.hasOwnProperty(key)){
if (!result[key] || !(result[key] instanceof Array)) {
result[key] = [];
}
result[key][rowIndex] = resultRow[key];
}
result[key][rowIndex] = resultRow[key];
}
});
rs.pipe(csvConverter);
});
it("should be able to convert csv string directly", function(done) {
it("should be able to convert csv string directly", function (done) {
var testData = __dirname + "/data/testData";
var data = fs.readFileSync(testData).toString();
var result = {}
var csvConverter = new CSVAdv();
var csvConverter = new Converter();
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed", function(jsonObj) {
csvConverter.on("end_parsed", function (jsonObj) {
assert(jsonObj.length === 2);
});
csvConverter.fromString(data, function(err, jsonObj) {
csvConverter.fromString(data, function (err, jsonObj) {
assert(jsonObj.length === 2);

@@ -127,9 +120,8 @@ done();

});
it("should be able to convert csv string without callback provided", function(done) {
it("should be able to convert csv string without callback provided", function (done) {
var testData = __dirname + "/data/testData";
var data = fs.readFileSync(testData).toString();
var result = {}
var csvConverter = new CSVAdv();
var csvConverter = new Converter();
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed", function(jsonObj) {
csvConverter.on("end_parsed", function (jsonObj) {
assert(jsonObj.length === 2);

@@ -140,13 +132,11 @@ done();

});
it("should be able to handle columns with double quotes", function(done) {
it("should be able to handle columns with double quotes", function (done) {
var testData = __dirname + "/data/dataWithQoutes";
var data = fs.readFileSync(testData).toString();
var result = {}
var csvConverter = new CSVAdv();
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed", function(jsonObj) {});
csvConverter.fromString(data, function(err, jsonObj) {
var csvConverter = new Converter();
csvConverter.on("end_parsed", function () {});
csvConverter.fromString(data, function (err, jsonObj) {
//console.log(jsonObj);
assert(jsonObj[0].TIMESTAMP == '13954264"22', JSON.stringify(jsonObj[0].TIMESTAMP));
assert(jsonObj[1].TIMESTAMP == 'abc, def, ccc', JSON.stringify(jsonObj[1].TIMESTAMP));
assert(jsonObj[0].TIMESTAMP === '13954264"22', JSON.stringify(jsonObj[0].TIMESTAMP));
assert(jsonObj[1].TIMESTAMP === 'abc, def, ccc', JSON.stringify(jsonObj[1].TIMESTAMP));
done();

@@ -156,23 +146,20 @@ });

it("should be able to handle columns with two double quotes", function(done) {
it("should be able to handle columns with two double quotes", function (done) {
var testData = __dirname + "/data/twodoublequotes";
var data = fs.readFileSync(testData).toString();
var result = {}
var csvConverter = new CSVAdv();
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed", function(jsonObj) {});
csvConverter.fromString(data, function(err, jsonObj) {
assert(jsonObj[0].data == "xyabcde", jsonObj);
assert(jsonObj[0].uuid == "fejal\"eifa", jsonObj);
assert(jsonObj[0].fieldA == "bnej\"\"falkfe", jsonObj);
var csvConverter = new Converter();
csvConverter.on("end_parsed", function () {});
csvConverter.fromString(data, function (err, jsonObj) {
assert(jsonObj[0].data === "xyabcde", jsonObj);
assert(jsonObj[0].uuid === "fejal\"eifa", jsonObj);
assert(jsonObj[0].fieldA === "bnej\"\"falkfe", jsonObj);
done();
});
});
it("should handle empty csv file", function(done) {
it("should handle empty csv file", function (done) {
var testData = __dirname + "/data/emptyFile";
var rs = fs.createReadStream(testData);
var result = {}
var csvConverter = new CSVAdv();
csvConverter.on("end_parsed", function(jsonObj) {
assert(jsonObj.length===0)
var csvConverter = new Converter();
csvConverter.on("end_parsed", function (jsonObj) {
assert(jsonObj.length === 0);
done();

@@ -182,14 +169,15 @@ });

});
it ("shoudl parse large csv file",function(done){
it ("should parse large csv file",function (done){
var testData=__dirname+"/data/large-csv-sample.csv";
var rs=fs.createReadStream(testData);
var csvConverter=new CSVAdv({
var rs = fs.createReadStream(testData);
var csvConverter = new Converter({
constructResult:false
});
var count=0;
csvConverter.on("record_parsed",function(d){
var count = 0;
csvConverter.on("record_parsed", function () {
//console.log(arguments);
count++;
});
csvConverter.on("end_parsed",function(){
assert(count===5290);
csvConverter.on("end_parsed", function () {
assert(count === 5290);
done();

@@ -199,18 +187,17 @@ });

});
it ("should parse data and covert to specific types",function(done){
var testData=__dirname+"/data/dataWithType";
var rs=fs.createReadStream(testData);
var csvConverter=new CSVAdv();
csvConverter.on("record_parsed",function(d){
it ("should parse data and covert to specific types",function (done){
var testData = __dirname + "/data/dataWithType";
var rs = fs.createReadStream(testData);
var csvConverter=new Converter();
csvConverter.on("record_parsed",function (d){
assert(typeof d.column1 === "number");
assert(typeof d.column2 === "string");
assert( d.column3 instanceof Date == true);
assert(d.colume4==="someinvaliddate");
assert(d.column5.hello==="world");
assert(d.column6==='{"hello":"world"}');
assert(d.column7==="1234");
assert(d.column8==="abcd");
assert(d.column9===true);
assert(d.colume4 === "someinvaliddate");
assert(d.column5.hello === "world");
assert(d.column6 === '{"hello":"world"}');
assert(d.column7 === "1234");
assert(d.column8 === "abcd");
assert(d.column9 === true);
});
csvConverter.on("end_parsed",function(){
csvConverter.on("end_parsed",function (){
done();

@@ -220,20 +207,20 @@ });

});
it ("should turn off field type check",function(done){
var testData=__dirname+"/data/dataWithType";
var rs=fs.createReadStream(testData);
var csvConverter=new CSVAdv({
it ("should turn off field type check",function (done){
var testData = __dirname + "/data/dataWithType";
var rs = fs.createReadStream(testData);
var csvConverter = new Converter({
checkType:false
});
csvConverter.on("record_parsed",function(d){
csvConverter.on("record_parsed",function (d){
assert(typeof d.column1 === "string");
assert(typeof d.column2 === "string");
assert( d["date#column3"] ==="2012-01-01");
assert(d["date#colume4"]==="someinvaliddate");
assert(d["column5"]==='{"hello":"world"}');
assert(d["string#column6"]==='{"hello":"world"}');
assert(d["string#column7"]==="1234");
assert(d["number#column8"]==="abcd");
assert(d["column9"]==="true");
assert( d["date#column3"] === "2012-01-01");
assert(d["date#colume4"] === "someinvaliddate");
assert(d.column5 === '{"hello":"world"}');
assert(d["string#column6"] === '{"hello":"world"}');
assert(d["string#column7"] === "1234");
assert(d["number#column8"] === "abcd");
assert(d.column9 === "true");
});
csvConverter.on("end_parsed",function(){
csvConverter.on("end_parsed",function (){
done();

@@ -243,14 +230,14 @@ });

});
it ("should emit data event correctly",function(done){
var testData=__dirname+"/data/large-csv-sample.csv";
var rs=fs.createReadStream(testData);
var csvConverter=new CSVAdv({
constructResult:false
it ("should emit data event correctly",function (done){
var testData = __dirname + "/data/large-csv-sample.csv";
var rs = fs.createReadStream(testData);
var csvConverter = new Converter({
constructResult: false
});
var count=0;
csvConverter.on("data",function(d){
var count = 0;
csvConverter.on("data", function (d) {
count++;
});
csvConverter.on("end",function(){
assert(count===5290);
csvConverter.on("end",function (){
assert(count === 5290);
done();

@@ -260,11 +247,11 @@ });

});
it ("should process column with linebreaks",function(done){
var testData=__dirname+"/data/lineBreak";
var rs=fs.createReadStream(testData);
var csvConverter=new CSVAdv({
constructResult:false
it ("should process column with linebreaks",function (done){
var testData = __dirname + "/data/lineBreak";
var rs = fs.createReadStream(testData);
var csvConverter = new Converter({
constructResult: false
});
csvConverter.on("record_parsed",function(d){
assert(d.Period===13);
assert(d["Apparent age"]=="Unknown");
csvConverter.on("record_parsed",function (d){
assert(d.Period === 13);
assert(d["Apparent age"] === "Unknown");
done();

@@ -274,15 +261,15 @@ });

});
it ("should stream to array string",function(done){
var testData=__dirname+"/data/dataDiffDelimiter";
var rs=fs.createReadStream(testData)
var data="";
var st=rs.pipe(new CSVAdv({ constructResult: false, delimiter: ';', trim: true, toArrayString:true}))
st.on("data",function(d){
data+=d.toString("utf8");
it ("should stream to array string",function (done){
var testData = __dirname + "/data/dataDiffDelimiter";
var rs = fs.createReadStream(testData);
var data = "";
var st = rs.pipe(new Converter({ constructResult: false, delimiter: ';', trim: true, toArrayString:true}));
st.on("data",function (d){
data += d.toString("utf8");
});
st.on("end",function(){
var obj=JSON.parse(data);
assert(obj.length===2);
assert(obj[0].annee==2015029);
assert(obj[1].annee==2015028);
st.on("end",function (){
var obj = JSON.parse(data);
assert(obj.length === 2);
assert(obj[0].annee === 2015029);
assert(obj[1].annee === 2015028);
done();

@@ -292,12 +279,12 @@ });

});
it ("be able to ignore empty columns",function(done){
var testData=__dirname+"/data/dataIgnoreEmpty";
var rs=fs.createReadStream(testData)
var st=rs.pipe(new CSVAdv({ignoreEmpty:true}))
st.on("end_parsed",function(res){
var j=res[0];
assert (j.col2.length===1);
assert(j.col2[0]==="d3");
assert(j.col4.col3===undefined);
assert(j.col4.col5==="world");
it ("be able to ignore empty columns",function (done){
var testData = __dirname + "/data/dataIgnoreEmpty";
var rs = fs.createReadStream(testData);
var st = rs.pipe(new Converter({ignoreEmpty:true}));
st.on("end_parsed",function (res){
var j = res[0];
assert (j.col2.length === 1);
assert(j.col2[0] === "d3");
assert(j.col4.col3 === undefined);
assert(j.col4.col5 === "world");
done();

@@ -304,0 +291,0 @@ });

var assert = require("assert");
var parserMgr = require("../libs/core/parserMgr.js");
var CSVAdv = require("../libs/core/csvConverter.js");
var Converter = require("../libs/core/Converter.js");
var fs = require("fs");
describe("ParserMgr", function() {

@@ -8,3 +9,2 @@ it("should add a correct parser", function() {

});
it("should add a parser if regular expression is a string", function() {

@@ -33,4 +33,4 @@ parserMgr.addParser("myparserName", "hello regexp", function() {});

});
assert(resultRow.myArray[0] == "item1");
assert(resultRow.myArray[1] == "item2");
assert(resultRow.myArray[0] === "item1");
assert(resultRow.myArray[1] === "item2");
});

@@ -56,4 +56,4 @@ });

});
assert(resultRow.myJSON.item1 == "item1");
assert(resultRow.myJSON.item2 == "item2");
assert(resultRow.myJSON.item1 === "item1");
assert(resultRow.myJSON.item2 === "item2");
});

@@ -94,3 +94,3 @@ it("should parse a json containing array", function() {

assert(resultRow.myJSON.item1.arr.length === 2);
assert(resultRow.myJSON.item1.title == "mytitle");
assert(resultRow.myJSON.item1.title === "mytitle");
});

@@ -116,3 +116,3 @@ it("should parse a json containing child json with array containing child json", function() {

assert(resultRow.myJSON.item1.arr.length === 3);
assert(resultRow.myJSON.item1.arr[0].title == "item1");
assert(resultRow.myJSON.item1.arr[0].title === "item1");
});

@@ -138,22 +138,21 @@ it("should parse a json containing child json with dynamic array containing child json", function() {

assert(resultRow.myJSON.item1.arr.length === 3);
assert(resultRow.myJSON.item1.arr[2].title == "item3");
assert(resultRow.myJSON.item1.arr[2].title === "item3");
});
it("should parse a complex JSON's original CSV file", function(done) {
var converter = new CSVAdv();
var fs = require("fs");
it("should parse a complex JSON's original CSV file", function (done) {
var converter = new Converter();
var r = fs.createReadStream(__dirname + "/data/complexJSONCSV");
converter.on("end_parsed", function(res) {
converter.on("end_parsed", function (res) {
assert(res);
assert(res.length === 2);
assert(res[0].fieldA.title == "Food Factory");
assert(res[0].fieldA.title === "Food Factory");
assert(res[0].fieldA.children.length === 2);
assert(res[0].fieldA.children[0].name == "Oscar");
assert(res[0].fieldA.children[0].id == "0023");
assert(res[0].fieldA.children[1].name == "Tikka");
assert(res[0].fieldA.children[0].name === "Oscar");
assert(res[0].fieldA.children[0].id === "0023");
assert(res[0].fieldA.children[1].name === "Tikka");
assert(res[0].fieldA.children[1].employee.length === 2);
assert(res[0].fieldA.children[1].employee[0].name == "Tim",JSON.stringify(res[0].fieldA.children[1].employee[0] ));
assert(res[0].fieldA.children[1].employee[0].name === "Tim", JSON.stringify(res[0].fieldA.children[1].employee[0]));
assert(res[0].fieldA.address.length === 2);
assert(res[0].fieldA.address[0] == "3 Lame Road");
assert(res[0].fieldA.address[1] == "Grantstown");
assert(res[0].description == "A fresh new food factory",res[0].description);
assert(res[0].fieldA.address[0] === "3 Lame Road");
assert(res[0].fieldA.address[1] === "Grantstown");
assert(res[0].description === "A fresh new food factory",res[0].description);
done();

@@ -164,4 +163,4 @@ });

});
describe("json array parser", function() {
it("should return an json array parser with specific column title", function() {
describe("json array parser", function () {
it("should return an json array parser with specific column title", function () {
var parser = parserMgr.getParser("*jsonarray*myJSON.item");

@@ -171,3 +170,3 @@ assert(parser.name === "jsonarray");

it("should parse as an json array with multiple columns", function() {
it("should parse as an json array with multiple columns", function () {
var parser1 = parserMgr.getParser("*jsonarray*myJSON.item");

@@ -186,4 +185,4 @@ var parser2 = parserMgr.getParser("*jsonarray*myJSON.item");

});
assert(resultRow.myJSON.item[0] == "item1");
assert(resultRow.myJSON.item[1] == "item2");
assert(resultRow.myJSON.item[0] === "item1");
assert(resultRow.myJSON.item[1] === "item2");
});

@@ -212,5 +211,5 @@ });

var parsers = parserMgr.initParsers(head);
assert(parsers[0].name == "array");
assert(parsers[1].name == "json");
assert(parsers[0].name === "array");
assert(parsers[1].name === "json");
});
});

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc