Comparing version 0.3.13 to 0.3.14
@@ -18,3 +18,4 @@ module.exports = csvAdv; | ||
"quote": "\"", //quote for a column containing delimiter. | ||
"trim": true //trim column's space charcters | ||
"trim": true, //trim column's space charcters | ||
"checkType":true //weather check column type | ||
} | ||
@@ -105,3 +106,3 @@ if (params && typeof params == "object") { | ||
csvAdv.prototype._flush = function(cb) { | ||
this._startInit(); | ||
this._startInit(); // this is called in case input is empty | ||
if (this._buffer.length != 0) { //emit last line | ||
@@ -118,3 +119,3 @@ this.emit("record", this._buffer, this.rowIndex++, true); | ||
this.headRow = headRow; | ||
this.parseRules = parserMgr.initParsers(headRow); | ||
this.parseRules = parserMgr.initParsers(headRow,this.param.checkType); | ||
}; | ||
@@ -133,3 +134,4 @@ csvAdv.prototype._rowProcess = function(row, index, resultRow) { | ||
rowIndex: index, | ||
resultObject: this.resultObject | ||
resultObject: this.resultObject, | ||
config:this.param || {} | ||
}); | ||
@@ -136,0 +138,0 @@ } |
@@ -33,2 +33,3 @@ var parserMgr = require("./parserMgr.js"); | ||
params.resultRow[fieldName].push(params.item); | ||
//console.log("array parser has been deprecated. See https://github.com/Keyang/node-csvtojson#default-parsers"); | ||
} | ||
@@ -52,3 +53,4 @@ | ||
pointer[arrFieldName].push(params.item); | ||
//console.log("json array parser has been deprecated. See https://github.com/Keyang/node-csvtojson#default-parsers"); | ||
} | ||
initDefaultParsers(); |
@@ -40,9 +40,38 @@ module.exports = function(params) { | ||
} | ||
if (index==""){ | ||
index=pointer[key].length; | ||
if (index == "") { | ||
index = pointer[key].length; | ||
} | ||
pointer[key][index]=params.item; | ||
pointer[key][index] = params.item; | ||
} else { //last element is normal | ||
pointer[key] = params.item; | ||
if (params.config && params.config.checkType) { | ||
try { | ||
switch (this.type) { | ||
case "date": | ||
var d = new Date(params.item); | ||
if (isNaN(d.getTime())) { | ||
d = params.item; | ||
} | ||
pointer[key] = d; | ||
break; | ||
case "number": | ||
if (!isNaN(params.item)) { | ||
pointer[key] = parseFloat(params.item); | ||
} else { | ||
pointer[key] = params.item; | ||
} | ||
break; | ||
case "": | ||
pointer[key] = JSON.parse(params.item); | ||
break; | ||
case "string": | ||
default: | ||
pointer[key] = params.item; | ||
} | ||
} catch (e) { | ||
pointer[key] = params.item; | ||
} | ||
} else { | ||
pointer[key] = params.item; | ||
} | ||
} | ||
} |
@@ -6,2 +6,3 @@ module.exports=Parser; | ||
this.regExp=null; | ||
this.type=""; | ||
if (typeof regExp !="undefined"){ | ||
@@ -8,0 +9,0 @@ if (typeof regExp =="string"){ |
//module interfaces | ||
module.exports.addParser=addParser; | ||
module.exports.initParsers=initParsers; | ||
module.exports.getParser=getParser; | ||
module.exports.addParser = addParser; | ||
module.exports.initParsers = initParsers; | ||
module.exports.getParser = getParser; | ||
//implementation | ||
var registeredParsers=[]; | ||
var Parser=require("./parser.js"); | ||
var registeredParsers = []; | ||
var Parser = require("./parser.js"); | ||
function registerParser(parser){ | ||
if (parser instanceof Parser){ | ||
if (registeredParsers.indexOf(parser)==-1){ | ||
registeredParsers.push(parser); | ||
} | ||
function registerParser(parser) { | ||
if (parser instanceof Parser) { | ||
if (registeredParsers.indexOf(parser) == -1) { | ||
registeredParsers.push(parser); | ||
} | ||
} | ||
} | ||
function addParser(name,regExp,parseFunc){ | ||
var parser=new Parser(name,regExp,parseFunc); | ||
registerParser(parser); | ||
function addParser(name, regExp, parseFunc) { | ||
var parser = new Parser(name, regExp, parseFunc); | ||
registerParser(parser); | ||
} | ||
function initParsers(row){ | ||
var parsers=[]; | ||
for (var i=0;i<row.length;i++){ | ||
var columnTitle=row[i]; | ||
parsers.push(getParser(columnTitle)); | ||
} | ||
return parsers; | ||
function initParsers(row, checkType) { | ||
var parsers = []; | ||
for (var i = 0; i < row.length; i++) { | ||
var columnTitle = row[i]; | ||
parsers.push(getParser(columnTitle, checkType)); | ||
} | ||
return parsers; | ||
} | ||
function getParser(columnTitle){ | ||
for (var i=0;i<registeredParsers.length;i++){ | ||
var parser=registeredParsers[i]; | ||
if (parser.test(columnTitle)){ | ||
var inst=parser.clone(); | ||
inst.head=columnTitle; | ||
return inst; | ||
} | ||
function getParser(columnTitle, checkType) { | ||
if (!columnTitle){ | ||
columnTitle="" | ||
} | ||
var type=""; | ||
if (checkType){ | ||
var split=splitTitle(columnTitle); | ||
type=split[0]; | ||
columnTitle=split[1]; | ||
} | ||
for (var i = 0; i < registeredParsers.length; i++) { | ||
var parser = registeredParsers[i]; | ||
if (parser.test(columnTitle)) { | ||
var inst = parser.clone(); | ||
inst.head = columnTitle; | ||
inst.type=type; | ||
return inst; | ||
} | ||
return getParserByName("json",columnTitle); | ||
} | ||
var inst= getParserByName("json", columnTitle); | ||
inst.type=type; | ||
return inst; | ||
} | ||
function getParserByName(parserName,columnTitle){ | ||
for (var i=0;i<registeredParsers.length;i++){ | ||
var parser=registeredParsers[i]; | ||
if (parser.getName()==parserName){ | ||
var inst=parser.clone(); | ||
inst.head=columnTitle; | ||
function splitTitle(columnTitle){ | ||
var splitArr=columnTitle.split("#"); | ||
if (splitArr.length ===1){ | ||
splitArr.unshift("") | ||
return splitArr | ||
} | ||
if (splitArr.length>2){ | ||
var rtn=[]; | ||
rtn.push(splitArr.shift()); | ||
rtn.push(splitArr.join("#")); | ||
return rtn | ||
} | ||
return splitArr; | ||
} | ||
function getParserByName(parserName, columnTitle) { | ||
for (var i = 0; i < registeredParsers.length; i++) { | ||
var parser = registeredParsers[i]; | ||
if (parser.getName() == parserName) { | ||
var inst = parser.clone(); | ||
inst.head = columnTitle; | ||
return inst; | ||
@@ -49,0 +78,0 @@ } |
@@ -15,3 +15,3 @@ { | ||
}], | ||
"version": "0.3.13", | ||
"version": "0.3.14", | ||
"keywords": [ | ||
@@ -18,0 +18,0 @@ "csv", |
@@ -37,2 +37,3 @@ #CSV2JSON | ||
After version 0.3, csvtojson requires node 0.10 and above. | ||
@@ -58,2 +59,3 @@ ##Menu | ||
* [Empowered JSON Parser](#empowered-json-parser) | ||
* [Field Type](#field-type) | ||
* [Change Log](#change-log) | ||
@@ -160,2 +162,3 @@ | ||
* trim: Indicate if parser trim off spaces surrounding column content. e.g. " content " will be trimmed to "content". Default: true | ||
* checkType: This parameter turns on and off weather check field type. default is true. See [Field type](field-type) | ||
@@ -259,11 +262,13 @@ # Parser | ||
**Array**: For columns head start with "\*array\*" e.g. "\*array\*fieldName", this parser will combine cells data with same fieldName to one Array. | ||
~~**Array**: For columns head start with "\*array\*" e.g. "\*array\*fieldName", this parser will combine cells data with same fieldName to one Array.~~ | ||
**Nested JSON**: For columns head start with "\*json\*" e.g. "\*json\*my.nested.json.structure", this parser will create nested nested JSON structure: my.nested.json | ||
~~**Nested JSON**: For columns head start with "\*json\*" e.g. "\*json\*my.nested.json.structure", this parser will create nested nested JSON structure: my.nested.json~~ | ||
**Nested JSON Array**: For columns head start with "\*jsonarray\*" e.g. "\*jsonarray\*my.items", this parser will create structure like my.items[]. | ||
~~**Nested JSON Array**: For columns head start with "\*jsonarray\*" e.g. "\*jsonarray\*my.items", this parser will create structure like my.items[].~~ | ||
**JSON**: Any valid JSON structure (array, nested json) are supported. see [Empowered JSON Parser](#empowered-json-parser) | ||
**Omitted column**: For columns head start with "\*omit\*" e.g. "\*omit\*id", the parser will omit the column's data. | ||
#Example: | ||
#~~Example:~~(This example is deprecated see [Empowered JSON Parser](#empowered-json-parser)) | ||
@@ -492,4 +497,88 @@ Original data: | ||
#Field Type | ||
From version 0.3.14, type of fields are supported by csvtojson. | ||
The parameter checkType is used to whether to check and convert the field type. | ||
See [here](#params) for the parameter usage. | ||
Thank all who have contributed to ticket [#20](https://github.com/Keyang/node-csvtojson/issues/20). | ||
##Implict Type | ||
When checkType is turned on, parser will try to convert value to its implicit type if it is not explicitly specified. | ||
For example, csv data: | ||
```csv | ||
name, age, married, msg | ||
Tom, 12, false, {"hello":"world","total":23} | ||
``` | ||
Will be converted into: | ||
```json | ||
{ | ||
"name":"Tom", | ||
"age":12, | ||
"married":false, | ||
"msg":{ | ||
"hello":"world", | ||
"total":"23" | ||
} | ||
} | ||
``` | ||
If checkType is turned **OFF**, it will be converted to: | ||
```json | ||
{ | ||
"name":"Tom", | ||
"age":"12", | ||
"married":"false", | ||
"msg":"{\"hello\":\"world\",\"total\":23}" | ||
} | ||
``` | ||
##Explicit Type | ||
CSV header column can explicitly define the type of the field. | ||
Simply add type before column name with a hash symbol (#). | ||
###Supported types: | ||
* string | ||
* number | ||
* date | ||
### Define Type | ||
To define the field type, see following example | ||
```csv | ||
string#appNumber, string#finished, date#startDate | ||
201401010002, true, 2014-01-01 | ||
``` | ||
The data will be converted to: | ||
```json | ||
{ | ||
"appNumber":"201401010002", | ||
"finished":"true", | ||
"startDate":Wed Jan 01 2014 00:00:00 GMT+0000 (GMT) | ||
} | ||
``` | ||
### Invalid Value | ||
If parser meets invalid value for a type while parsing a value, it will fallback to use string value. | ||
For example: | ||
```csv | ||
number#order, date#shipDate | ||
A00001, Unknown | ||
``` | ||
It will be converted to: | ||
```json | ||
{ | ||
"order":"A00001", | ||
"shipDate":"Unknown" | ||
} | ||
``` | ||
#Change Log | ||
##0.3.14 | ||
* Added field type support | ||
* Fixed some minor bugs | ||
##0.3.8 | ||
@@ -496,0 +585,0 @@ * Empowered built-in JSON parser. |
@@ -185,2 +185,44 @@ var CSVAdv = require("../libs/core/csvConverter.js"); | ||
}); | ||
it ("should parse data and covert to specific types",function(done){ | ||
var testData=__dirname+"/data/dataWithType"; | ||
var rs=fs.createReadStream(testData); | ||
var csvConverter=new CSVAdv(); | ||
csvConverter.on("record_parsed",function(d){ | ||
assert(typeof d.column1 === "number"); | ||
assert(typeof d.column2 === "string"); | ||
assert( d.column3 instanceof Date == true); | ||
assert(d.colume4==="someinvaliddate"); | ||
assert(d.column5.hello==="world"); | ||
assert(d.column6==='{"hello":"world"}'); | ||
assert(d.column7==="1234"); | ||
assert(d.column8==="abcd"); | ||
assert(d.column9===true); | ||
}); | ||
csvConverter.on("end_parsed",function(){ | ||
done(); | ||
}); | ||
rs.pipe(csvConverter); | ||
}); | ||
it ("should turn off field type check",function(done){ | ||
var testData=__dirname+"/data/dataWithType"; | ||
var rs=fs.createReadStream(testData); | ||
var csvConverter=new CSVAdv({ | ||
checkType:false | ||
}); | ||
csvConverter.on("record_parsed",function(d){ | ||
assert(typeof d.column1 === "string"); | ||
assert(typeof d.column2 === "string"); | ||
assert( d["date#column3"] ==="2012-01-01"); | ||
assert(d["date#colume4"]==="someinvaliddate"); | ||
assert(d["column5"]==='{"hello":"world"}'); | ||
assert(d["string#column6"]==='{"hello":"world"}'); | ||
assert(d["string#column7"]==="1234"); | ||
assert(d["number#column8"]==="abcd"); | ||
assert(d["column9"]==="true"); | ||
}); | ||
csvConverter.on("end_parsed",function(){ | ||
done(); | ||
}); | ||
rs.pipe(csvConverter); | ||
}); | ||
}); |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
137233
33
989
599