Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

json2csv

Package Overview
Dependencies
Maintainers
4
Versions
104
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

json2csv - npm Package Compare versions

Comparing version 4.3.5 to 4.4.0

lib/JSON2CSVAsyncParser.js

32

bin/json2csv.js

@@ -9,6 +9,6 @@ #!/usr/bin/env node

const program = require('commander');
const pkg = require('../package');
const json2csv = require('../lib/json2csv');
const parseNdJson = require('./utils/parseNdjson');
const TablePrinter = require('./utils/TablePrinter');
const pkg = require('../package');

@@ -20,22 +20,22 @@ const JSON2CSVParser = json2csv.Parser;

.version(pkg.version)
.option('-i, --input <input>', 'Path and name of the incoming json file. If not provided, will read from stdin.')
.option('-i, --input <input>', 'Path and name of the incoming json file. Defaults to stdin.')
.option('-o, --output [output]', 'Path and name of the resulting csv file. Defaults to stdout.')
.option('-n, --ndjson', 'Treat the input as NewLine-Delimited JSON.')
.option('-s, --no-streaming', 'Process the whole JSON array in memory instead of doing it line by line.')
.option('-f, --fields <fields>', 'Specify the fields to convert.')
.option('-c, --fields-config <path>', 'Specify a file with a fields configuration as a JSON array.')
.option('-f, --fields <fields>', 'List of fields to process. Defaults to field auto-detection.')
.option('-c, --fields-config <path>', 'File with a fields configuration as a JSON array.')
.option('-u, --unwind <paths>', 'Creates multiple rows from a single JSON document similar to MongoDB unwind.')
.option('-B, --unwind-blank', 'When unwinding, blank out instead of repeating data.')
.option('-F, --flatten', 'Flatten nested objects.')
.option('-S, --flatten-separator <separator>', 'Flattened keys separator.')
.option('-v, --default-value [defaultValue]', 'Specify a default value other than empty string.')
.option('-q, --quote [value]', 'Specify an alternate quote value.')
.option('-Q, --double-quote [value]', 'Specify a value to replace double quote in strings.')
.option('-d, --delimiter [delimiter]', 'Specify a delimiter other than the default comma to use.')
.option('-e, --eol [value]', 'Specify an End-of-Line value for separating rows.')
.option('-E, --excel-strings','Converts string data into normalized Excel style data.')
.option('-S, --flatten-separator <separator>', 'Flattened keys separator. Defaults to \'.\'.')
.option('-v, --default-value [defaultValue]', 'Default value to use for missing fields.')
.option('-q, --quote [quote]', 'Character(s) to use as quote mark. Defaults to \'"\'.')
.option('-Q, --double-quote [doubleQuote]', 'Character(s) to use as a escaped quote. Defaults to a double `quote`, \'""\'.')
.option('-d, --delimiter [delimiter]', 'Character(s) to use as delimiter. Defaults to \',\'.')
.option('-e, --eol [eol]', 'Character(s) to use as End-of-Line for separating rows. Defaults to \'\\n\'.')
.option('-E, --excel-strings','Wraps string data to force Excel to interpret it as string even if it contains a number.')
.option('-H, --no-header', 'Disable the column name header.')
.option('-a, --include-empty-rows', 'Includes empty rows in the resulting CSV output.')
.option('-b, --with-bom', 'Includes BOM character at the beginning of the csv.')
.option('-p, --pretty', 'Use only when printing to console. Logs output in pretty tables.')
.option('-b, --with-bom', 'Includes BOM character at the beginning of the CSV.')
.option('-p, --pretty', 'Print output as a pretty table. Use only when printing to console.')
.parse(process.argv);

@@ -231,7 +231,7 @@

.catch((err) => {
if (inputPath && err.message.indexOf(inputPath) !== -1) {
if (inputPath && err.message.includes(inputPath)) {
err = new Error('Invalid input file. (' + err.message + ')');
} else if (outputPath && err.message.indexOf(outputPath) !== -1) {
} else if (outputPath && err.message.includes(outputPath)) {
err = new Error('Invalid output file. (' + err.message + ')');
} else if (fieldsConfigPath && err.message.indexOf(fieldsConfigPath) !== -1) {
} else if (fieldsConfigPath && err.message.includes(fieldsConfigPath)) {
err = new Error('Invalid fields config file. (' + err.message + ')');

@@ -238,0 +238,0 @@ }

@@ -5,2 +5,20 @@ # Change Log

# [4.4.0](https://github.com/zemirco/json2csv/compare/v4.3.5...v4.4.0) (2019-03-25)
### Bug Fixes
* audit deps ([3b1ba07](https://github.com/zemirco/json2csv/commit/3b1ba07))
* backslash type ([#369](https://github.com/zemirco/json2csv/issues/369)) ([389891a](https://github.com/zemirco/json2csv/commit/389891a))
* objectMode example typo in README.md ([#364](https://github.com/zemirco/json2csv/issues/364)) ([28f8f5d](https://github.com/zemirco/json2csv/commit/28f8f5d))
* Simplify object mode test ([#370](https://github.com/zemirco/json2csv/issues/370)) ([29bbad1](https://github.com/zemirco/json2csv/commit/29bbad1))
* typo in fixture name ([#371](https://github.com/zemirco/json2csv/issues/371)) ([9eebf40](https://github.com/zemirco/json2csv/commit/9eebf40))
### Features
* Performance improvements and new async api ([#360](https://github.com/zemirco/json2csv/issues/360)) ([d59dea1](https://github.com/zemirco/json2csv/commit/d59dea1))
<a name="4.3.5"></a>

@@ -7,0 +25,0 @@ ## [4.3.5](https://github.com/zemirco/json2csv/compare/v4.3.4...v4.3.5) (2019-02-22)

'use strict';
const JSON2CSVParser = require('./JSON2CSVParser');
const JSON2CSVAsyncParser = require('./JSON2CSVAsyncParser');
const JSON2CSVTransform = require('./JSON2CSVTransform');
module.exports.Parser = JSON2CSVParser;
module.exports.AsyncParser = JSON2CSVAsyncParser;
module.exports.Transform = JSON2CSVTransform;

@@ -11,1 +13,10 @@

module.exports.parse = (data, opts) => new JSON2CSVParser(opts).parse(data);
module.exports.parseAsync = (data, opts, transformOpts) => {
const asyncParser = new JSON2CSVAsyncParser(opts, transformOpts);
const promise = asyncParser.promise();
data.forEach(item => asyncParser.input.push(item));
asyncParser.input.push(null);
return promise;
};

@@ -5,13 +5,8 @@ 'use strict';

const lodashGet = require('lodash.get');
const { setProp, flattenReducer } = require('./utils');
const setProp = (obj, path, value) => {
const pathArray = Array.isArray(path) ? path : path.split('.');
const key = pathArray[0];
const newValue = pathArray.length > 1 ? setProp(obj[key] || {}, pathArray.slice(1), value) : value;
return Object.assign({}, obj, { [key]: newValue });
};
class JSON2CSVBase {
constructor(opts) {
this.opts = this.preprocessOpts(opts);
this.preprocessRow = this.memoizePreprocessRow();
}

@@ -107,19 +102,36 @@

memoizePreprocessRow() {
if (this.opts.unwind && this.opts.unwind.length) {
if (this.opts.flatten) {
return function (row) {
return this.unwindData(row, this.opts.unwind)
.map(row => this.flatten(row, this.opts.flattenSeparator));
};
}
return function (row) {
return this.unwindData(row, this.opts.unwind);
};
}
if (this.opts.flatten) {
return function (row) {
return [this.flatten(row, this.opts.flattenSeparator)];
};
}
return function (row) {
return [row];
};
}
/**
* Preprocess each object according to the give opts (unwind, flatten, etc.).
* The actual body of the function is dynamically set on the constructor by the
* `memoizePreprocessRow` method after parsing the options.
*
* @param {Object} row JSON object to be converted in a CSV row
*/
preprocessRow(row) {
const processedRow = (this.opts.unwind && this.opts.unwind.length)
? this.unwindData(row, this.opts.unwind)
: [row];
preprocessRow() {}
if (this.opts.flatten) {
return processedRow.map(row => this.flatten(row, this.opts.flattenSeparator));
}
return processedRow;
}
/**

@@ -180,3 +192,3 @@ * Create the content of a specific CSV row

if (typeof value === 'object' && !/^"(.*)"$/.test(stringifiedValue)) {
if (typeof value === 'object' && !/^".*"$/.test(stringifiedValue)) {
// Stringify object that are not stringified to a

@@ -198,20 +210,18 @@ // JSON string (like Date) to escape commas, quotes, etc.

// Replace automatically scaped single quotes by doubleQuotes
stringifiedValue = stringifiedValue
.replace(/\\"(?!$)/g, this.opts.doubleQuote);
if (this.opts.quote === '"') {
// Replace automatically scaped single quotes by doubleQuotes
stringifiedValue = stringifiedValue
.replace(/(\\")(?!$)/g, this.opts.doubleQuote);
} else {
// Unescape automatically escaped double quote symbol
if (this.opts.quote !== '"') {
// Replace single quote with double quote
// Replace wrapping quotes
stringifiedValue = stringifiedValue
.replace(/(\\")(?!$)/g, '"')
.replace(new RegExp(this.opts.quote, 'g'), this.opts.doubleQuote)
.replace(/^"(.*)"$/, this.opts.quote + '$1' + this.opts.quote);
.replace(/^"/, this.opts.quote)
.replace(/"$/, this.opts.quote);
}
// Remove double backslashes
stringifiedValue = stringifiedValue
.replace(/\\\\/g, '\\');
// Remove double backslashes
stringifiedValue = stringifiedValue
.replace(/\\\\/g, '\\');

@@ -288,3 +298,3 @@ if (this.opts.excelStrings && typeof value === 'string') {

})
.reduce((a, e) => a.concat(e), []);
.reduce(flattenReducer, []);
};

@@ -291,0 +301,0 @@

'use strict';
const JSON2CSVBase = require('./JSON2CSVBase');
const { flattenReducer } = require('./utils');
class JSON2CSVParser extends JSON2CSVBase {
constructor(opts) {
super(opts);
if (this.opts.fields) {
this.opts.fields = this.preprocessFieldsInfo(this.opts.fields);
}
}
/**

@@ -16,12 +23,16 @@ * Main function that converts json to csv.

if (!this.opts.fields) {
const dataFields = processedData
.map(item => Object.keys(item))
.reduce((tempData, rows) => tempData.concat(rows), []);
this.opts.fields = processedData
.reduce((fields, item) => {
Object.keys(item).forEach((field) => {
if (!fields.includes(field)) {
fields.push(field)
}
});
this.opts.fields = dataFields
.filter((field, pos, arr) => arr.indexOf(field) == pos);
return fields
}, []);
this.opts.fields = this.preprocessFieldsInfo(this.opts.fields);
}
this.opts.fields = this.preprocessFieldsInfo(this.opts.fields);
const header = this.opts.header ? this.getHeader() : '';

@@ -50,5 +61,9 @@ const rows = this.processData(processedData);

if ((!this.opts.unwind || !this.opts.unwind.length) && !this.opts.flatten) {
return processedData;
}
return processedData
.map(row => this.preprocessRow(row))
.reduce((tempData, rows) => tempData.concat(rows), []);
.reduce(flattenReducer, []);
}

@@ -55,0 +70,0 @@

'use strict';
const Transform = require('stream').Transform;
const { Transform } = require('stream');
const Parser = require('jsonparse');

@@ -16,2 +16,3 @@ const JSON2CSVBase = require('./JSON2CSVBase');

this.opts = this.preprocessOpts(opts);
this.preprocessRow = this.memoizePreprocessRow();

@@ -122,2 +123,3 @@ this._data = '';

}
if (this.stack.length === 1) {

@@ -141,3 +143,3 @@ if(this.depthToEmit === undefined) {

this.parser.onError = function (err) {
if(err.message.indexOf('Unexpected') > -1) {
if(err.message.includes('Unexpected')) {
err.message = 'Invalid JSON (' + err.message + ')';

@@ -198,6 +200,4 @@ }

if (line === undefined) return;
const eoledLine = (this._hasWritten ? this.opts.eol : '')
+ line;
this.emit('line', line);
this.push(eoledLine);
this.push(this._hasWritten ? this.opts.eol + line : line);
this._hasWritten = true;

@@ -204,0 +204,0 @@ });

{
"name": "json2csv",
"version": "4.3.5",
"version": "4.4.0",
"description": "Convert JSON to CSV",

@@ -47,12 +47,12 @@ "keywords": [

"devDependencies": {
"babel-core": "^6.26.3",
"babel-preset-es2015-rollup": "^3.0.0",
"coveralls": "^3.0.1",
"@babel/core": "^7.3.3",
"@babel/preset-env": "^7.3.1",
"coveralls": "^3.0.3",
"docpress": "^0.7.6",
"eslint": "^5.0.1",
"eslint": "^5.14.1",
"gh-pages": "^2.0.1",
"in-publish": "^2.0.0",
"nyc": "^12.0.2",
"rollup": "^0.62.0",
"rollup-plugin-babel": "^3.0.5",
"nyc": "^13.3.0",
"rollup": "^1.2.2",
"rollup-plugin-babel": "^4.3.2",
"rollup-plugin-commonjs": "^9.1.3",

@@ -62,6 +62,6 @@ "rollup-plugin-node-builtins": "^2.1.2",

"rollup-plugin-node-resolve": "^3.3.0",
"standard-version": "^4.4.0",
"standard-version": "^5.0.0",
"tap-spec": "^5.0.0",
"tape": "^4.9.1"
"tape": "^4.10.1"
}
}

@@ -15,15 +15,11 @@ # json2csv

- Uses proper line endings on various operating systems
- Handles double quotes
- Allows custom column selection
- Allows specifying nested properties
- Reads column selection from file
- Pretty writing to stdout
- Supports optional custom delimiters
- Supports optional custom eol value
- Supports optional custom quotation marks
- Optional header.
- If field doesn't exist in object the field value in CSV will be empty.
- Preserve new lines in values. Should be used with \r\n line endings for full compatibility with Excel.
- Add a BOM character at the beginning of the csv to make Excel displaying special characters correctly.
- Fast and lightweight
- Scalable to infinitely large datasets (using stream processing)
- Support for standard JSON as well as NDJSON
- Advanced data selection (automatic field discovery, underscore-like selectors, custom data getters, default values for missing fields, flattening nested object, unwinding arrays, etc.)
- Highly customizable (supportting custom quotation marks, delimiters, eol values, etc.)
- Automatic escaping (preserving new lines, quotes, etc. in them)
- Optional headers
- Unicode encoding support
- Pretty printing in table format to stdout

@@ -34,3 +30,3 @@ ## How to install

```bash
```sh
# Global so it can be call from anywhere

@@ -58,30 +54,28 @@ $ npm install -g json2csv

```bash
Usage: json2csv [options]
```sh
Usage: json2csv [options]
Options:
-V, --version output the version number
-i, --input <input> Path and name of the incoming json file. If not provided, will read from stdin.
-o, --output [output] Path and name of the resulting csv file. Defaults to stdout.
-n, --ndjson Treat the input as NewLine-Delimited JSON.
-s, --no-streaming Process the whole JSON array in memory instead of doing it line by line.
-f, --fields <fields> Specify the fields to convert.
-c, --fields-config <path> Specify a file with a fields configuration as a JSON array.
-u, --unwind <paths> Creates multiple rows from a single JSON document similar to MongoDB unwind.
-B, --unwind-blank When unwinding, blank out instead of repeating data.
-F, --flatten Flatten nested objects.
-S, --flatten-separator <separator> Flattened keys separator.
-v, --default-value [defaultValue] Specify a default value other than empty string.
-q, --quote [value] Specify an alternate quote value.
-Q, --double-quote [value] Specify a value to replace double quote in strings.
-d, --delimiter [delimiter] Specify a delimiter other than the default comma to use.
-e, --eol [value] Specify an End-of-Line value for separating rows.
-E, --excel-strings Converts string data into normalized Excel style data.
-H, --no-header Disable the column name header.
-a, --include-empty-rows Includes empty rows in the resulting CSV output.
-b, --with-bom Includes BOM character at the beginning of the csv.
-p, --pretty Use only when printing to console. Logs output in pretty tables.
-h, --help output usage information
Options:
-V, --version output the version number
-i, --input <input> Path and name of the incoming json file. Defaults to stdin.
-o, --output [output] Path and name of the resulting csv file. Defaults to stdout.
-n, --ndjson Treat the input as NewLine-Delimited JSON.
-s, --no-streaming Process the whole JSON array in memory instead of doing it line by line.
-f, --fields <fields> List of fields to process. Defaults to field auto-detection.
-c, --fields-config <path> File with a fields configuration as a JSON array.
-u, --unwind <paths> Creates multiple rows from a single JSON document similar to MongoDB unwind.
-B, --unwind-blank When unwinding, blank out instead of repeating data.
-F, --flatten Flatten nested objects.
-S, --flatten-separator <separator> Flattened keys separator. Defaults to '.'.
-v, --default-value [defaultValue] Default value to use for missing fields.
-q, --quote [value] Character(s) to use a quote mark. Defaults to '"'.
-Q, --double-quote [value] Character(s) to use as a escaped quote. Defaults to a double `quote`, '""'.
-d, --delimiter [delimiter] Character(s) to use as delimiter. Defaults to ','.
-e, --eol [value] Character(s) to use as End-of-Line for separating rows. Defaults to '\n'.
-E, --excel-strings Wraps string data to force Excel to interpret it as string even if it contains a number.
-H, --no-header Disable the column name header.
-a, --include-empty-rows Includes empty rows in the resulting CSV output.
-b, --with-bom Includes BOM character at the beginning of the CSV.
-p, --pretty Print output as a pretty table. Use only when printing to console.
-h, --help output usage information
```

@@ -98,3 +92,3 @@

```bash
```sh
$ json2csv -i input.json -f carModel,price,color

@@ -110,3 +104,3 @@ carModel,price,color

```bash
```sh
$ json2csv -i input.json -f carModel,price,color -p

@@ -119,3 +113,3 @@ ```

```bash
```sh
$ json2csv -i input.json -f carModel,price,color -o out.csv

@@ -132,3 +126,3 @@ $ cat out.csv

```bash
```sh
$ json2csv -i input.json -c fieldsConfig.json -o out.csv

@@ -149,3 +143,3 @@ ```

```bash
```sh
$ json2csv -f price

@@ -168,3 +162,3 @@ [{"price":1000},{"price":2000}]

```bash
```sh
# Initial creation of csv with headings

@@ -203,4 +197,5 @@ $ json2csv -i test.json -f name,version > test.csv

`json2csv` can also be use programatically as a synchronous converter using its `parse` method.
```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = ['field1', 'field2', 'field3'];

@@ -210,3 +205,3 @@ const opts = { fields };

try {
const parser = new Json2csvParser(opts);
const parser = new Parser(opts);
const csv = parser.parse(myData);

@@ -221,4 +216,5 @@ console.log(csv);

```javascript
const json2csv = require('json2csv').parse;
```js
const { parse } = require('json2csv');
const fields = ['field1', 'field2', 'field3'];

@@ -228,3 +224,3 @@ const opts = { fields };

try {
const csv = json2csv(myData, opts);
const csv = parse(myData, opts);
console.log(csv);

@@ -236,21 +232,94 @@ } catch (err) {

### json2csv transform (Streaming API)
Both of the methods above load the entire JSON in memory and do the whole processing in-memory while blocking Javascript event loop. For that reason is rarely a good reason to use it until your data is very small or your application doesn't do anything else.
The parse method is really good but has the downside of loading the entire JSON array in memory. This might not be optimal or even possible for large JSON files.
### json2csv async parser (Streaming API)
For such cases json2csv offers a stream transform so pipe your json content into it and it will output it.
The synchronous API has the downside of loading the entire JSON array in memory and blocking javascript's event loop while processing the data. This means that you server won't be able to process more request or your UI will become irresponsive while data is being processed. For those reasons, is rarely a good reason to use it unless your data is very small or your application doesn't do anything else.
One very important difference between the transform and the parser is that the json objects are processed one by one. In practice, this means that only the fields in the first object of the array are considered and fields in other other objects that were not present in the first one are just ignored. To avoid this. It's advisable to ensure that all the objects contain exactly the same fields or provide the list of fields using the `fields` option.
The async parser process the data as a non-blocking stream. This approach ensures a consistent memory footprint and avoid blocking javascript's event loop. Thus, it's better suited for large datasets or system with high concurrency.
```javascript
const fs = require('fs');
const Json2csvTransform = require('json2csv').Transform;
One very important difference between the asynchronous and the synchronous APIs is that using the asynchronous API json objects are processed one by one. In practice, this means that only the fields in the first object of the array are automatically detected and other fields are just ignored. To avoid this, it's advisable to ensure that all the objects contain exactly the same fields or provide the list of fields using the `fields` option.
The async API uses takes a second options arguments that's directly passed to the underlying streams and accept the same options as the standard [Node.js streams](https://nodejs.org/api/stream.html#stream_new_stream_duplex_options).
Instances of `AsyncParser` expose three objects:
* *input:* Which allows to push more data
* *processor:* A readable string representing the whole data processing. You can listen to all the standard events of Node.js streams.
* *transform:* The json2csv transform. See bellow for more details.
```js
const { AsyncParser } = require('json2csv');
const fields = ['field1', 'field2', 'field3'];
const opts = { fields };
const transformOpts = { highWaterMark: 8192 };
const asyncParser = new JSON2CSVAsyncParser(opts, transformOpts);
let csv = '';
asyncParser.processor
.on('data', chunk => (csv += chunk.toString()))
.on('end', () => console.log(csv))
.on('error', err => console.error(err));
// You can also listen for events on the conversion and see how the header or the lines are coming out.
asyncParser.transform
.on('header', header => console.log(header))
.on('line', line => console.log(line))
.on('error', err => console.log(err));
asyncParser.input.push(data); // This data might come from an HTTP request, etc.
asyncParser.input.push(null); // Sending `null` to a stream signal that no more data is expected and ends it.
```
`AsyncParser` also exposes some convenience methods:
* `fromInput` allows you to set the input stream.
* `throughTransform` allows you to add transforms to the stream.
* `toOutput` allows you to set the output stream.
* `promise` returns a promise that resolves when the stream ends or errors.
```js
const { createReadStream, createWriteStream } = require('fs');
const { AsyncParser } = require('json2csv');
const fields = ['field1', 'field2', 'field3'];
const opts = { fields };
const transformOpts = { highWaterMark: 8192 };
const input = createReadStream(inputPath, { encoding: 'utf8' });
const output = createWriteStream(outputPath, { encoding: 'utf8' });
const asyncParser = new JSON2CSVAsyncParser(opts, transformOpts);
asyncParser.fromInput(input).toOutput(output).promise()
.then(csv => console.log(csv))
.catch(err => console.error(err));;
```
you can also use the convenience method `parseAsync` which returns a promise.
```js
const { parseAsync } = require('json2csv');
const fields = ['field1', 'field2', 'field3'];
const opts = { fields };
parseAsync(myData, opts)
.then(csv => console.log(csv))
.catch(err => console.error(err));
```
### json2csv transform (Streaming API)
json2csv also exposes the raw stream transform so you can pipe your json content into it. This is the same Transform that `AsyncParser` uses under the hood.
```js
const { createReadStream, createWriteStream } = require('fs');
const { Transform } = require('json2csv');
const fields = ['field1', 'field2', 'field3'];
const opts = { fields };
const transformOpts = { highWaterMark: 16384, encoding: 'utf-8' };
const input = fs.createReadStream(inputPath, { encoding: 'utf8' });
const output = fs.createWriteStream(outputPath, { encoding: 'utf8' });
const json2csv = new Json2csvTransform(opts, transformOpts);
const input = createReadStream(inputPath, { encoding: 'utf8' });
const output = createWriteStream(outputPath, { encoding: 'utf8' });
const json2csv = new Transform(opts, transformOpts);

@@ -268,3 +337,6 @@ const processor = input.pipe(json2csv).pipe(output);

```javascript
```js
const { Transform } = require("json2csv");
const { Readable } = require('stream');
const input = new Readable({ objectMode: true });

@@ -277,7 +349,9 @@ input._read = () => {};

const output = process.stdout;
const opts = {};
const transformOpts = { objectMode: true };
const json2csv = new Json2csvTransform(opts, transformOpts);
const processor = input.pipe(transform).pipe(output);
const json2csv = new Transform(opts, transformOpts);
const processor = input.pipe(json2csv).pipe(output);
```

@@ -288,3 +362,3 @@

#### Example `fields` option
``` javascript
```js
{

@@ -316,4 +390,5 @@ fields: [

```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = ['car', 'price', 'color'];

@@ -336,3 +411,3 @@ const myCars = [

const json2csvParser = new Json2csvParser({ fields });
const json2csvParser = new Parser({ fields });
const csv = json2csvParser.parse(myCars);

@@ -356,7 +431,7 @@

```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = ['car', 'color'];
const json2csvParser = new Json2csvParser({ fields });
const json2csvParser = new Parser({ fields });
const csv = json2csvParser.parse(myCars);

@@ -380,4 +455,5 @@

```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = [{

@@ -391,3 +467,3 @@ label: 'Car Name',

const json2csvParser = new Json2csvParser({ fields });
const json2csvParser = new Parser({ fields });
const csv = json2csvParser.parse(myCars);

@@ -402,4 +478,5 @@

```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = ['car.make', 'car.model', 'price', 'color'];

@@ -422,3 +499,3 @@ const myCars = [

const json2csvParser = new Json2csvParser({ fields });
const json2csvParser = new Parser({ fields });
const csv = json2csvParser.parse(myCars);

@@ -442,7 +519,8 @@

```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = ['car', 'price', 'color'];
const json2csvParser = new Json2csvParser({ fields, delimiter: '\t' });
const json2csvParser = new Parser({ fields, delimiter: '\t' });
const tsv = json2csvParser.parse(myCars);

@@ -469,4 +547,5 @@

```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = [{

@@ -480,3 +559,3 @@ label: 'Car Name',

const json2csvParser = new Json2csvParser({ fields, quote: '' });
const json2csvParser = new Parser({ fields, quote: '' });
const csv = json2csvParser.parse(myCars);

@@ -500,4 +579,5 @@

```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = ['carModel', 'price', 'colors'];

@@ -524,3 +604,3 @@ const myCars = [

const json2csvParser = new Json2csvParser({ fields, unwind: 'colors' });
const json2csvParser = new Parser({ fields, unwind: 'colors' });
const csv = json2csvParser.parse(myCars);

@@ -550,4 +630,5 @@

```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color'];

@@ -598,3 +679,3 @@ const myCars = [

const json2csvParser = new Json2csvParser({ fields, unwind: ['items', 'items.items'] });
const json2csvParser = new Parser({ fields, unwind: ['items', 'items.items'] });
const csv = json2csvParser.parse(myCars);

@@ -621,4 +702,5 @@

```javascript
const Json2csvParser = require('json2csv').Parser;
```js
const { Parser } = require('json2csv');
const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color'];

@@ -669,3 +751,3 @@ const myCars = [

const json2csvParser = new Json2csvParser({ fields, unwind: ['items', 'items.items'], unwindBlank: true });
const json2csvParser = new Parser({ fields, unwind: ['items', 'items.items'], unwindBlank: true });
const csv = json2csvParser.parse(myCars);

@@ -691,3 +773,3 @@

What in 3.X used to be
```
```js
const json2csv = require('json2csv');

@@ -698,3 +780,3 @@ const csv = json2csv({ data: myData, fields: myFields, unwindPath: paths, ... });

can be replaced by
```
```js
const Json2csvParser = require('json2csv').Parser;

@@ -706,3 +788,3 @@ const json2csvParser = new Json2csvParser({ fields: myFields, unwind: paths, ... });

or the convenience method
```
```js
const json2csv = require('json2csv');

@@ -715,8 +797,44 @@ const csv = json2csv.parse(myData, { fields: myFields, unwind: paths, ... });

## Known Gotchas
### Excel support
#### Avoiding excel autoformatting
Excel tries to automatically detect the format of every field (number, date, string, etc.) regardless of whether the field is quoted or not.
This might produce few undesired effects with, for example, serial numbers:
- Large numbers are displayed using scientific notation
- Leading zeros are stripped.
The `excelString` option produces a Excel-specific CSV file that forces Excel to interpret string fields as strings. Please note that the CSV will look incorrect if viewing it somewhere else than Excel.
#### Preserving new lines
Excel only recognize `\r\n` as valid new line inside a cell.
#### Unicode Support
Excel can display Unicode correctly (just setting the `withBOM` option to true). However, Excel can't save unicode so, if you do changes to the CSV and save it from Excel, the Unicode character will not be displayed correctly.
### PowerShell escaping
PowerShell do some estrange double quote escaping escaping which results on each line of the CSV missing the first and last quote if outputting the result directly to stdout. Instead of that, it's advisable that you write the result directly to a file.
## Building
When developing, it's necessary to run `webpack` to prepare the built script. This can be done easily with `npm run build`.
json2csv is packaged using `rollup`. You can generate the packages running:
If `webpack` is not already available from the command line, use `npm install -g webpack`.
```sh
npm run build
```
which generates 3 files under the `dist folder`:
* `json2csv.umd.js` UMD module transpiled to ES5
* `json2csv.esm.js` ES5 module (import/export)
* `json2csv.cjs.js` CommonJS module
When you use packaging tools like webpack and such, they know which version to use depending on your configuration.
## Testing

@@ -726,3 +844,3 @@

```bash
```sh
$ npm run lint

@@ -733,3 +851,3 @@ ```

```bash
```sh
$ npm run test-with-coverage

@@ -742,3 +860,3 @@ ```

```bash
```sh
$ npm install

@@ -749,6 +867,2 @@ ```

## Similar Projects
* [Papa Parse](http://papaparse.com/)
## License

@@ -755,0 +869,0 @@

@@ -25,3 +25,4 @@ import resolve from 'rollup-plugin-node-resolve';

exclude: ['node_modules/**'],
presets: ['es2015-rollup']
babelrc: false,
presets: [['@babel/env', { modules: false }]],
})

@@ -42,3 +43,4 @@ ]

exclude: ['node_modules/**'],
presets: ['es2015-rollup']
babelrc: false,
presets: [['@babel/env', { modules: false }]],
})

@@ -45,0 +47,0 @@ ]

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc