Comparing version 1.0.0 to 1.1.0
@@ -0,1 +1,8 @@ | ||
# v1.1.0 | ||
* Fix for when chunk returned by transform to the parser ends with a space | ||
* Functionality to give a reason when invalid data | ||
* Fix problem with utf8 encoded streams that have multi-byte characters | ||
* Allow passing sparse array of headers | ||
# v1.0.0 | ||
@@ -2,0 +9,0 @@ |
@@ -11,2 +11,3 @@ var extended = require("../extended"), | ||
fs = require("fs"), | ||
StringDecoder = require('string_decoder').StringDecoder, | ||
hasIsPaused = !!stream.Transform.prototype.isPaused; | ||
@@ -19,2 +20,3 @@ | ||
this.lines = ""; | ||
this.decoder = new StringDecoder(); | ||
this._parsedHeaders = false; | ||
@@ -68,3 +70,3 @@ this._rowCount = -1; | ||
} else { | ||
self.__validate(line, function (err, isValid) { | ||
self.__validate(line, function (err, isValid, reason) { | ||
if (err) { | ||
@@ -75,3 +77,3 @@ next(err); | ||
} else { | ||
self.emit("data-invalid", line, index); | ||
self.emit("data-invalid", line, index, reason); | ||
next(null, null); | ||
@@ -146,2 +148,5 @@ } | ||
while (++i < headersLength) { | ||
if (isUndefined(headers[i])) { | ||
continue; | ||
} | ||
val = data[i]; | ||
@@ -199,3 +204,3 @@ ret[headers[i]] = isUndefined(val) ? '' : val; | ||
var lines = this.lines, | ||
lineData = (lines + data), | ||
lineData = (lines + this.decoder.write(data)), | ||
self = this; | ||
@@ -202,0 +207,0 @@ if (lineData.length > 1) { |
@@ -150,2 +150,3 @@ var extended = require("./../extended"), | ||
i = lastLineI; | ||
cursor = null; | ||
break; | ||
@@ -152,0 +153,0 @@ } else if (ROW_DELIMITER.test(token)) { |
{ | ||
"name": "fast-csv", | ||
"version": "1.0.0", | ||
"version": "1.1.0", | ||
"description": "CSV parser and writer", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
@@ -19,3 +19,3 @@ [![build status](https://secure.travis-ci.org/C2FO/fast-csv.png)](http://travis-ci.org/C2FO/fast-csv) | ||
* `objectMode=true`: Ensure that `data` events have an object emitted rather than the stringified version set to false to have a stringified buffer. | ||
* `headers=false`: Set to true if you expect the first line of your `CSV` to contain headers, alternatly you can specify an array of headers to use. | ||
* `headers=false`: Set to true if you expect the first line of your `CSV` to contain headers, alternatly you can specify an array of headers to use. You can also specify a sparse array to omit some of the columns. | ||
* `ignoreEmpty=false`: If you wish to ignore empty rows. | ||
@@ -201,2 +201,18 @@ * `discardUnmappedColumns=false`: If you want to discard columns that do not map to a header. | ||
To omit some of the data columns you may not need, pass a sparse array as `headers`. | ||
```javascript | ||
var stream = fs.createReadStream("my.csv"); | ||
csv | ||
.fromStream(stream, {headers : ["firstName" , , "address"]}) | ||
.on("data", function(data){ | ||
console.log(data); | ||
}) | ||
.on("end", function(){ | ||
console.log("done"); | ||
}); | ||
``` | ||
If your data may include empty rows, the sort Excel might include at the end of the file for instance, you can ignore | ||
@@ -203,0 +219,0 @@ these by including the `ignoreEmpty` option. |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
61703
935
851
0