Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

json2csv

Package Overview
Dependencies
Maintainers
4
Versions
104
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

json2csv - npm Package Compare versions

Comparing version 4.5.4 to 5.0.0

lib/transforms/flatten.js

268

bin/json2csv.js

@@ -5,5 +5,6 @@ #!/usr/bin/env node

const fs = require('fs');
const { promisify } = require('util');
const { createReadStream, createWriteStream, readFile: readFileOrig, writeFile: writeFileOrig } = require('fs');
const os = require('os');
const path = require('path');
const { isAbsolute, join } = require('path');
const program = require('commander');

@@ -15,2 +16,8 @@ const pkg = require('../package');

const readFile = promisify(readFileOrig);
const writeFile = promisify(writeFileOrig);
const isAbsolutePath = promisify(isAbsolute);
const joinPath = promisify(join);
const { unwind, flatten } = json2csv.transforms;
const JSON2CSVParser = json2csv.Parser;

@@ -22,16 +29,12 @@ const Json2csvTransform = json2csv.Transform;

.option('-i, --input <input>', 'Path and name of the incoming json file. Defaults to stdin.')
.option('-o, --output [output]', 'Path and name of the resulting csv file. Defaults to stdout.')
.option('-o, --output <output>', 'Path and name of the resulting csv file. Defaults to stdout.')
.option('-c, --config <path>', 'Specify a file with a valid JSON configuration.')
.option('-n, --ndjson', 'Treat the input as NewLine-Delimited JSON.')
.option('-s, --no-streaming', 'Process the whole JSON array in memory instead of doing it line by line.')
.option('-f, --fields <fields>', 'List of fields to process. Defaults to field auto-detection.')
.option('-c, --fields-config <path>', 'File with a fields configuration as a JSON array.')
.option('-u, --unwind <paths>', 'Creates multiple rows from a single JSON document similar to MongoDB unwind.')
.option('-B, --unwind-blank', 'When unwinding, blank out instead of repeating data.')
.option('-F, --flatten', 'Flatten nested objects.')
.option('-S, --flatten-separator <separator>', 'Flattened keys separator. Defaults to \'.\'.')
.option('-v, --default-value [defaultValue]', 'Default value to use for missing fields.')
.option('-q, --quote [quote]', 'Character(s) to use as quote mark. Defaults to \'"\'.')
.option('-Q, --double-quote [doubleQuote]', 'Character(s) to use as a escaped quote. Defaults to a double `quote`, \'""\'.')
.option('-d, --delimiter [delimiter]', 'Character(s) to use as delimiter. Defaults to \',\'.')
.option('-e, --eol [eol]', 'Character(s) to use as End-of-Line for separating rows. Defaults to \'\\n\'.')
.option('-v, --default-value <defaultValue>', 'Default value to use for missing fields.')
.option('-q, --quote <quote>', 'Character(s) to use as quote mark. Defaults to \'"\'.')
.option('-Q, --escaped-quote <escapedQuote>', 'Character(s) to use as a escaped quote. Defaults to a double `quote`, \'""\'.')
.option('-d, --delimiter <delimiter>', 'Character(s) to use as delimiter. Defaults to \',\'.', ',')
.option('-e, --eol <eol>', 'Character(s) to use as End-of-Line for separating rows. Defaults to \'\\n\'.', os.EOL)
.option('-E, --excel-strings','Wraps string data to force Excel to interpret it as string even if it contains a number.')

@@ -42,76 +45,48 @@ .option('-H, --no-header', 'Disable the column name header.')

.option('-p, --pretty', 'Print output as a pretty table. Use only when printing to console.')
// Built-in transforms
.option('--unwind [paths]', 'Creates multiple rows from a single JSON document similar to MongoDB unwind.')
.option('--unwind-blank', 'When unwinding, blank out instead of repeating data. Defaults to false.', false)
.option('--flatten-objects', 'Flatten nested objects. Defaults to false.', false)
.option('--flatten-arrays', 'Flatten nested arrays. Defaults to false.', false)
.option('--flatten-separator <separator>', 'Flattened keys separator. Defaults to \'.\'.', '.')
.parse(process.argv);
function makePathAbsolute(filePath) {
return (filePath && !path.isAbsolute(filePath))
? path.join(process.cwd(), filePath)
return (filePath && !isAbsolutePath(filePath))
? joinPath(process.cwd(), filePath)
: filePath;
}
const inputPath = makePathAbsolute(program.input);
const outputPath = makePathAbsolute(program.output);
const fieldsConfigPath = makePathAbsolute(program.fieldsConfig);
program.input = makePathAbsolute(program.input);
program.output = makePathAbsolute(program.output);
program.config = makePathAbsolute(program.config);
program.delimiter = program.delimiter || ',';
program.eol = program.eol || os.EOL;
// don't fail if piped to e.g. head
/* istanbul ignore next */
process.stdout.on('error', (error) => {
if (error.code === 'EPIPE') {
process.exit(1);
}
if (error.code === 'EPIPE') process.exit(1);
});
function getFields() {
if (fieldsConfigPath) {
return require(fieldsConfigPath);
}
function getInputStream(inputPath) {
if (inputPath) return createReadStream(inputPath, { encoding: 'utf8' });
return program.fields
? program.fields.split(',')
: undefined;
process.stdin.resume();
process.stdin.setEncoding('utf8');
return process.stdin;
}
function getInputStream() {
if (!inputPath) {
process.stdin.resume();
process.stdin.setEncoding('utf8');
return process.stdin;
}
return fs.createReadStream(inputPath, { encoding: 'utf8' })
function getOutputStream(outputPath, config) {
if (outputPath) return createWriteStream(outputPath, { encoding: 'utf8' });
if (config.pretty) return new TablePrinter(config).writeStream();
return process.stdout;
}
function getInput() {
if (!inputPath) {
return getInputFromStdin();
}
if (program.ndjson) {
return getInputFromNDJSON();
}
try {
return Promise.resolve(require(inputPath));
} catch (err) {
return Promise.reject(err);
}
async function getInput(inputPath, ndjson) {
if (!inputPath) return getInputFromStdin();
if (ndjson) return parseNdJson(await readFile(inputPath, 'utf8'));
return require(inputPath);
}
function getInputFromNDJSON() {
async function getInputFromStdin() {
return new Promise((resolve, reject) => {
fs.readFile(inputPath, 'utf8', (err, data) => {
if (err) {
reject(err);
return;
}
resolve(parseNdJson(data));
});
});
}
function getInputFromStdin() {
return new Promise((resolve, reject) => {
process.stdin.resume();

@@ -126,7 +101,3 @@ process.stdin.setEncoding('utf8');

try {
const rows = program.ndjson
? parseNdJson(inputData)
: JSON.parse(inputData);
resolve(rows);
resolve(program.ndjson ? parseNdJson(inputData) : JSON.parse(inputData));
} catch (err) {

@@ -139,107 +110,82 @@ reject(new Error('Invalid data received from stdin', err));

function processOutput(csv) {
async function processOutput(outputPath, csv, config) {
if (!outputPath) {
// eslint-disable-next-line no-console
program.pretty ? (new TablePrinter(program)).printCSV(csv) : console.log(csv);
config.pretty ? (new TablePrinter(config)).printCSV(csv) : console.log(csv);
return;
}
return new Promise((resolve, reject) => {
fs.writeFile(outputPath, csv, (err) => {
if (err) {
reject(err);
return;
}
await writeFile(outputPath, csv);
}
resolve();
});
async function processInMemory(config, opts) {
const input = await getInput(program.input, config.ndjson);
const output = new JSON2CSVParser(opts).parse(input);
await processOutput(program.output, output, config);
}
async function processStream(config, opts) {
const input = getInputStream(program.input);
const transform = new Json2csvTransform(opts);
const output = getOutputStream(program.output, config);
await new Promise((resolve, reject) => {
input.pipe(transform).pipe(output);
input.on('error', reject);
transform.on('error', reject);
output.on('error', reject)
.on('finish', resolve);
});
}
Promise.resolve()
.then(() => {
const opts = {
fields: getFields(),
unwind: program.unwind ? program.unwind.split(',') : [],
unwindBlank: program.unwindBlank,
flatten: program.flatten,
flattenSeparator: program.flattenSeparator,
defaultValue: program.defaultValue,
quote: program.quote,
doubleQuote: program.doubleQuote,
delimiter: program.delimiter,
eol: program.eol,
excelStrings: program.excelStrings,
header: program.header,
includeEmptyRows: program.includeEmptyRows,
withBOM: program.withBom
};
(async (program) => {
try {
const config = Object.assign({}, program.config ? require(program.config) : {}, program);
if (!program.streaming) {
return getInput()
.then(input => new JSON2CSVParser(opts).parse(input))
.then(processOutput);
const transforms = [];
if (config.unwind) {
transforms.push(unwind({
paths: config.unwind === true ? undefined : config.unwind.split(','),
blankOut: config.unwindBlank
}));
}
const transform = new Json2csvTransform(opts);
const input = getInputStream();
const stream = input.pipe(transform);
if (config.flattenObjects || config.flattenArrays) {
transforms.push(flatten({
objects: config.flattenObjects,
arrays: config.flattenArrays,
separator: config.flattenSeparator
}));
}
if (program.output) {
const outputStream = fs.createWriteStream(outputPath, { encoding: 'utf8' });
const output = stream.pipe(outputStream);
return new Promise((resolve, reject) => {
input.on('error', reject);
outputStream.on('error', reject);
output.on('error', reject);
output.on('finish', () => resolve());
});
}
const opts = {
transforms,
fields: config.fields
? (Array.isArray(config.fields) ? config.fields : config.fields.split(','))
: config.fields,
defaultValue: config.defaultValue,
quote: config.quote,
escapedQuote: config.escapedQuote,
delimiter: config.delimiter,
eol: config.eol,
excelStrings: config.excelStrings,
header: config.header,
includeEmptyRows: config.includeEmptyRows,
withBOM: config.withBom
};
if (!program.pretty) {
const output = stream.pipe(process.stdout);
return new Promise((resolve, reject) => {
input.on('error', reject);
stream
.on('finish', () => resolve())
.on('error', reject);
output.on('error', reject);
});
await (config.streaming ? processStream : processInMemory)(config, opts);
} catch(err) {
let processedError = err;
if (program.input && err.message.includes(program.input)) {
processedError = new Error(`Invalid input file. (${err.message})`);
} else if (program.output && err.message.includes(program.output)) {
processedError = new Error(`Invalid output file. (${err.message})`);
} else if (program.config && err.message.includes(program.config)) {
processedError = new Error(`Invalid config file. (${err.message})`);
}
return new Promise((resolve, reject) => {
input.on('error', reject);
stream.on('error', reject);
let csv = '';
const table = new TablePrinter(program);
stream
.on('data', chunk => {
csv += chunk.toString();
const index = csv.lastIndexOf(program.eol);
let lines = csv.substring(0, index);
csv = csv.substring(index + 1);
if (lines) {
table.push(lines);
}
})
.on('end', () => {
table.end(csv);
resolve();
})
.on('error', reject);
});
})
.catch((err) => {
if (inputPath && err.message.includes(inputPath)) {
err = new Error('Invalid input file. (' + err.message + ')');
} else if (outputPath && err.message.includes(outputPath)) {
err = new Error('Invalid output file. (' + err.message + ')');
} else if (fieldsConfigPath && err.message.includes(fieldsConfigPath)) {
err = new Error('Invalid fields config file. (' + err.message + ')');
}
// eslint-disable-next-line no-console
console.error(err);
console.error(processedError);
process.exit(1);
});
}
})(program);
'use strict';
const { Writable } = require('stream');
const MIN_CELL_WIDTH = 15;

@@ -94,4 +96,23 @@

}
writeStream() {
let csv = '';
const table = this;
return new Writable({
write(chunk, encoding, callback) {
csv += chunk.toString();
const index = csv.lastIndexOf(table.opts.eol);
let lines = csv.substring(0, index);
csv = csv.substring(index + 1);
if (lines) table.push(lines);
callback();
},
final() {
table.end(csv);
}
});
}
}
module.exports = TablePrinter;
module.exports = TablePrinter;

@@ -1,14 +0,56 @@

# Change Log
# Changelog
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
## [4.5.4](https://github.com/zemirco/json2csv/compare/v4.5.3...v4.5.4) (2019-10-09)
## [5.0.0](https://github.com/zemirco/json2csv/compare/v4.5.2...v5.0.0) (2020-03-15)
### ⚠ BREAKING CHANGES
* Node 8 and 9 no longer supported, use Node 10 or greater. It might still work, but it has reached End-Of-Life.
* module no longer takes `unwind`, `unwindBlank`, `flatten` or the `flattenSeparator` options, instead see the new `transforms` option. CLI options are unchanged from the callers side, but use the built in transforms under the hood.
* Add support for transforms
* Add documentation about transforms
* remove extra commonjs build, use starting point in package.json `main` field.
* Renamed `doubleQuote` to `escapedQuote`
* remove `stringify` option
* `--fields-config` option has been removed, use the new `--config` option for all configuration, not just fields.
* Drop node 6 and 7, and add node 11 and 12
### Bug Fixes
* Ensure defaultValue is always applied in the case of nulls ([#427](https://github.com/zemirco/json2csv/issues/427)) ([0a01470](https://github.com/zemirco/json2csv/commit/0a01470))
* Always error asynchronously from parseAsync method ([#412](https://github.com/zemirco/json2csv/issues/412)) ([16cc044](https://github.com/zemirco/json2csv/commit/16cc044))
* audit deps ([15992cf](https://github.com/zemirco/json2csv/commit/15992cf))
* drop Node 8 and 9 ([7295465](https://github.com/zemirco/json2csv/commit/7295465))
* Make some CLI options mandatory ([#433](https://github.com/zemirco/json2csv/issues/433)) ([bd51527](https://github.com/zemirco/json2csv/commit/bd51527))
* Remove CommonJS build ([#422](https://github.com/zemirco/json2csv/issues/422)) ([5ce0089](https://github.com/zemirco/json2csv/commit/5ce0089))
* Remove stringify option ([#419](https://github.com/zemirco/json2csv/issues/419)) ([39f303d](https://github.com/zemirco/json2csv/commit/39f303d))
* Rename doubleQuote to escapedQuote ([#418](https://github.com/zemirco/json2csv/issues/418)) ([f99408c](https://github.com/zemirco/json2csv/commit/f99408c))
* update CI node versions ([#413](https://github.com/zemirco/json2csv/issues/413)) ([6fd6c09](https://github.com/zemirco/json2csv/commit/6fd6c09))
* update commander cli dep ([74aa40a](https://github.com/zemirco/json2csv/commit/74aa40a))
* update commander dep ([272675b](https://github.com/zemirco/json2csv/commit/272675b))
* **deps:** audit dependencies ([bf9877a](https://github.com/zemirco/json2csv/commit/bf9877a))
* **deps:** update commander ([3f099f2](https://github.com/zemirco/json2csv/commit/3f099f2))
* **security:** fix audit vulnerabilities ([b57715b](https://github.com/zemirco/json2csv/commit/b57715b))
### Features
* Add support for flattening arrays and change transforms arguments to an object. ([#432](https://github.com/zemirco/json2csv/issues/432)) ([916e448](https://github.com/zemirco/json2csv/commit/916e448))
* Add support for transforms ([#431](https://github.com/zemirco/json2csv/issues/431)) ([f1d04d0](https://github.com/zemirco/json2csv/commit/f1d04d0))
* Improve async promise to optionally not return ([#421](https://github.com/zemirco/json2csv/issues/421)) ([3e296f6](https://github.com/zemirco/json2csv/commit/3e296f6))
* Improves the unwind transform so it unwinds all unwindable fields if … ([#434](https://github.com/zemirco/json2csv/issues/434)) ([ec1f301](https://github.com/zemirco/json2csv/commit/ec1f301))
* replace fields config by a global config ([#338](https://github.com/zemirco/json2csv/issues/338)) ([d6c1c5f](https://github.com/zemirco/json2csv/commit/d6c1c5f))
## [4.5.2](https://github.com/zemirco/json2csv/compare/v4.5.1...v4.5.2) (2019-07-05)
### Bug Fixes
* Improve the inference of the header name when using function as value ([#395](https://github.com/zemirco/json2csv/issues/395)) ([590d19a](https://github.com/zemirco/json2csv/commit/590d19a))
<a name="4.4.0"></a>

@@ -15,0 +57,0 @@ ## [4.4.0](https://github.com/zemirco/json2csv/compare/v4.3.5...v4.4.0) (2019-03-25)

@@ -7,2 +7,4 @@ 'use strict';

const JSON2CSVTransform = require('./JSON2CSVTransform');
const flatten = require('./transforms/flatten');
const unwind = require('./transforms/unwind');

@@ -39,1 +41,6 @@ module.exports.Parser = JSON2CSVParser;

};
module.exports.transforms = {
flatten,
unwind,
};

@@ -42,4 +42,11 @@ 'use strict';

promise() {
promise(returnCSV = true) {
return new Promise((resolve, reject) => {
if (!returnCSV) {
this.processor
.on('finish', () => resolve())
.on('error', err => reject(err));
return;
}
let csvBuffer = [];

@@ -46,0 +53,0 @@ this.processor

@@ -5,3 +5,3 @@ 'use strict';

const lodashGet = require('lodash.get');
const { getProp, setProp, fastJoin, flattenReducer } = require('./utils');
const { getProp, fastJoin, flattenReducer } = require('./utils');

@@ -11,3 +11,2 @@ class JSON2CSVBase {

this.opts = this.preprocessOpts(opts);
this.preprocessRow = this.memoizePreprocessRow();
}

@@ -23,14 +22,13 @@

const processedOpts = Object.assign({}, opts);
processedOpts.unwind = !Array.isArray(processedOpts.unwind)
? (processedOpts.unwind ? [processedOpts.unwind] : [])
: processedOpts.unwind
processedOpts.transforms = !Array.isArray(processedOpts.transforms)
? (processedOpts.transforms ? [processedOpts.transforms] : [])
: processedOpts.transforms
processedOpts.delimiter = processedOpts.delimiter || ',';
processedOpts.flattenSeparator = processedOpts.flattenSeparator || '.';
processedOpts.eol = processedOpts.eol || os.EOL;
processedOpts.quote = typeof processedOpts.quote === 'string'
? opts.quote
? processedOpts.quote
: '"';
processedOpts.doubleQuote = typeof processedOpts.doubleQuote === 'string'
? processedOpts.doubleQuote
: processedOpts.quote + processedOpts.quote;
processedOpts.escapedQuote = typeof processedOpts.escapedQuote === 'string'
? processedOpts.escapedQuote
: `${processedOpts.quote}${processedOpts.quote}`;
processedOpts.header = processedOpts.header !== false;

@@ -58,3 +56,2 @@ processedOpts.includeEmptyRows = processedOpts.includeEmptyRows || false;

: row => getProp(row, fieldInfo, this.opts.defaultValue),
stringify: true,
};

@@ -74,3 +71,2 @@ }

: row => getProp(row, fieldInfo.value, defaultValue),
stringify: fieldInfo.stringify !== undefined ? fieldInfo.stringify : true,
};

@@ -90,3 +86,2 @@ }

},
stringify: fieldInfo.stringify !== undefined ? fieldInfo.stringify : true,
}

@@ -112,35 +107,12 @@ }

memoizePreprocessRow() {
if (this.opts.unwind && this.opts.unwind.length) {
if (this.opts.flatten) {
return function (row) {
return this.unwindData(row, this.opts.unwind)
.map(row => this.flatten(row, this.opts.flattenSeparator));
};
}
return function (row) {
return this.unwindData(row, this.opts.unwind);
};
}
if (this.opts.flatten) {
return function (row) {
return [this.flatten(row, this.opts.flattenSeparator)];
};
}
return function (row) {
return [row];
};
}
/**
* Preprocess each object according to the give opts (unwind, flatten, etc.).
* The actual body of the function is dynamically set on the constructor by the
* `memoizePreprocessRow` method after parsing the options.
*
* Preprocess each object according to the given transforms (unwind, flatten, etc.).
* @param {Object} row JSON object to be converted in a CSV row
*/
preprocessRow() {}
preprocessRow(row) {
return this.opts.transforms.reduce((rows, transform) =>
rows.map(row => transform(row)).reduce(flattenReducer, []),
[row]
);
}

@@ -178,3 +150,3 @@ /**

processCell(row, fieldInfo) {
return this.processValue(fieldInfo.value(row), fieldInfo.stringify);
return this.processValue(fieldInfo.value(row));
}

@@ -186,6 +158,5 @@

* @param {Any} value Value to be included in a CSV cell
* @param {Boolean} stringify Details of the field to process to be a CSV cell
* @returns {String} Value stringified and processed
*/
processValue(value, stringify) {
processValue(value) {
if (value === null || value === undefined) {

@@ -210,13 +181,6 @@ return undefined;

if(value.includes(this.opts.quote)) {
value = value.replace(new RegExp(this.opts.quote, 'g'), this.opts.doubleQuote);
value = value.replace(new RegExp(this.opts.quote, 'g'), this.opts.escapedQuote);
}
// This should probably be remove together with the whole strignify option
if (stringify) {
value = `${this.opts.quote}${value}${this.opts.quote}`;
} else {
value = value
.replace(new RegExp(`^${this.opts.doubleQuote}`), this.opts.quote)
.replace(new RegExp(`${this.opts.doubleQuote}$`), this.opts.quote);
}
value = `${this.opts.quote}${value}${this.opts.quote}`;

@@ -230,73 +194,4 @@ if (this.opts.excelStrings) {

}
/**
* Performs the flattening of a data row recursively
*
* @param {Object} dataRow Original JSON object
* @param {String} separator Separator to be used as the flattened field name
* @returns {Object} Flattened object
*/
flatten(dataRow, separator) {
function step (obj, flatDataRow, currentPath) {
Object.keys(obj).forEach((key) => {
const value = obj[key];
const newPath = currentPath
? `${currentPath}${separator}${key}`
: key;
if (typeof value !== 'object'
|| value === null
|| Array.isArray(value)
|| Object.prototype.toString.call(value.toJSON) === '[object Function]'
|| !Object.keys(value).length) {
flatDataRow[newPath] = value;
return;
}
step(value, flatDataRow, newPath);
});
return flatDataRow;
}
return step(dataRow, {});
}
/**
* Performs the unwind recursively in specified sequence
*
* @param {Object} dataRow Original JSON object
* @param {String[]} unwindPaths The paths as strings to be used to deconstruct the array
* @returns {Array} Array of objects containing all rows after unwind of chosen paths
*/
unwindData(dataRow, unwindPaths) {
const unwind = (rows, unwindPath) => {
return rows
.map(row => {
const unwindArray = lodashGet(row, unwindPath);
if (!Array.isArray(unwindArray)) {
return row;
}
if (!unwindArray.length) {
return setProp(row, unwindPath, undefined);
}
return unwindArray.map((unwindRow, index) => {
const clonedRow = (this.opts.unwindBlank && index > 0)
? {}
: row;
return setProp(clonedRow, unwindPath, unwindRow);
});
})
.reduce(flattenReducer, []);
};
return unwindPaths.reduce(unwind, [dataRow]);
}
}
module.exports = JSON2CSVBase;

@@ -60,5 +60,3 @@ 'use strict';

if ((!this.opts.unwind || !this.opts.unwind.length) && !this.opts.flatten) {
return processedData;
}
if (this.opts.transforms.length === 0) return processedData;

@@ -65,0 +63,0 @@ return processedData

@@ -16,3 +16,2 @@ 'use strict';

this.opts = this.preprocessOpts(opts);
this.preprocessRow = this.memoizePreprocessRow();

@@ -83,3 +82,3 @@ this._data = '';

} else {
e.message = 'Invalid JSON (' + line + ')'
e.message = `Invalid JSON (${line})`
transform.emit('error', e);

@@ -144,3 +143,3 @@ }

if(err.message.includes('Unexpected')) {
err.message = 'Invalid JSON (' + err.message + ')';
err.message = `Invalid JSON (${err.message})`;
}

@@ -147,0 +146,0 @@ transform.emit('error', err);

'use strict';
function getProp(obj, path, defaultValue) {
return (obj[path] === undefined || obj[path] === null) ? defaultValue : obj[path];
return obj[path] === undefined ? defaultValue : obj[path];
}

@@ -6,0 +6,0 @@

{
"name": "json2csv",
"version": "4.5.4",
"version": "5.0.0",
"description": "Convert JSON to CSV",

@@ -22,3 +22,3 @@ "keywords": [

},
"main": "dist/json2csv.cjs.js",
"main": "lib/json2csv.js",
"module": "dist/json2csv.esm.js",

@@ -30,2 +30,3 @@ "browser": "dist/json2csv.umd.js",

},
"homepage": "http://zemirco.github.io/json2csv",
"scripts": {

@@ -44,3 +45,3 @@ "build": "rollup -c",

"dependencies": {
"commander": "^2.15.1",
"commander": "^5.0.0",
"jsonparse": "^1.3.1",

@@ -53,17 +54,24 @@ "lodash.get": "^4.4.2"

"coveralls": "^3.0.3",
"docpress": "^0.7.6",
"eslint": "^5.14.1",
"docpress": "^0.8.0",
"eslint": "^6.1.0",
"gh-pages": "^2.0.1",
"in-publish": "^2.0.0",
"nyc": "^13.3.0",
"rollup": "^1.2.2",
"nyc": "^14.1.1",
"rollup": "^1.11.0",
"rollup-plugin-babel": "^4.3.2",
"rollup-plugin-commonjs": "^9.1.3",
"rollup-plugin-commonjs": "^10.0.2",
"rollup-plugin-node-builtins": "^2.1.2",
"rollup-plugin-node-globals": "^1.2.1",
"rollup-plugin-node-resolve": "^3.3.0",
"standard-version": "^5.0.0",
"rollup-plugin-node-resolve": "^5.2.0",
"standard-version": "^7.0.0",
"tap-spec": "^5.0.0",
"tape": "^4.10.1"
},
"engines": {
"node": ">= 10",
"npm": ">= 6.13.0"
},
"volta": {
"node": "10.19.0"
}
}

@@ -18,3 +18,3 @@ # json2csv

- Support for standard JSON as well as NDJSON
- Advanced data selection (automatic field discovery, underscore-like selectors, custom data getters, default values for missing fields, flattening nested object, unwinding arrays, etc.)
- Advanced data selection (automatic field discovery, underscore-like selectors, custom data getters, default values for missing fields, transforms, etc.)
- Highly customizable (supportting custom quotation marks, delimiters, eol values, etc.)

@@ -28,6 +28,7 @@ - Automatic escaping (preserving new lines, quotes, etc. in them)

You can install json2csv as a dependency using NPM.
You can install json2csv as a dependency using NPM.
Requires **Node v10** or higher.
```sh
# Global so it can be call from anywhere
# Global so it can be called from anywhere
$ npm install -g json2csv

@@ -54,28 +55,29 @@ # or as a dependency of a project

```sh
```bash
Usage: json2csv [options]
Options:
-V, --version output the version number
-i, --input <input> Path and name of the incoming json file. Defaults to stdin.
-o, --output [output] Path and name of the resulting csv file. Defaults to stdout.
-n, --ndjson Treat the input as NewLine-Delimited JSON.
-s, --no-streaming Process the whole JSON array in memory instead of doing it line by line.
-f, --fields <fields> List of fields to process. Defaults to field auto-detection.
-c, --fields-config <path> File with a fields configuration as a JSON array.
-u, --unwind <paths> Creates multiple rows from a single JSON document similar to MongoDB unwind.
-B, --unwind-blank When unwinding, blank out instead of repeating data.
-F, --flatten Flatten nested objects.
-S, --flatten-separator <separator> Flattened keys separator. Defaults to '.'.
-v, --default-value [defaultValue] Default value to use for missing fields.
-q, --quote [value] Character(s) to use a quote mark. Defaults to '"'.
-Q, --double-quote [value] Character(s) to use as a escaped quote. Defaults to a double `quote`, '""'.
-d, --delimiter [delimiter] Character(s) to use as delimiter. Defaults to ','.
-e, --eol [value] Character(s) to use as End-of-Line for separating rows. Defaults to '\n'.
-E, --excel-strings Wraps string data to force Excel to interpret it as string even if it contains a number.
-H, --no-header Disable the column name header.
-a, --include-empty-rows Includes empty rows in the resulting CSV output.
-b, --with-bom Includes BOM character at the beginning of the CSV.
-p, --pretty Print output as a pretty table. Use only when printing to console.
-h, --help output usage information
-V, --version output the version number
-i, --input <input> Path and name of the incoming json file. Defaults to stdin.
-o, --output <output> Path and name of the resulting csv file. Defaults to stdout.
-c, --config <path> Specify a file with a valid JSON configuration.
-n, --ndjson Treat the input as NewLine-Delimited JSON.
-s, --no-streaming Process the whole JSON array in memory instead of doing it line by line.
-f, --fields <fields> List of fields to process. Defaults to field auto-detection.
-v, --default-value <defaultValue> Default value to use for missing fields.
-q, --quote <quote> Character(s) to use as quote mark. Defaults to '"'.
-Q, --escaped-quote <escapedQuote> Character(s) to use as a escaped quote. Defaults to a double `quote`, '""'.
-d, --delimiter <delimiter> Character(s) to use as delimiter. Defaults to ','. (default: ",")
-e, --eol <eol> Character(s) to use as End-of-Line for separating rows. Defaults to '\n'. (default: "\n")
-E, --excel-strings Wraps string data to force Excel to interpret it as string even if it contains a number.
-H, --no-header Disable the column name header.
-a, --include-empty-rows Includes empty rows in the resulting CSV output.
-b, --with-bom Includes BOM character at the beginning of the CSV.
-p, --pretty Print output as a pretty table. Use only when printing to console.
--unwind [paths] Creates multiple rows from a single JSON document similar to MongoDB unwind.
--unwind-blank When unwinding, blank out instead of repeating data. Defaults to false. (default: false)
--flatten-objects Flatten nested objects. Defaults to false. (default: false)
--flatten-arrays Flatten nested arrays. Defaults to false. (default: false)
--flatten-separator <separator> Flattened keys separator. Defaults to '.'. (default: ".")
-h, --help output usage information
```

@@ -85,5 +87,7 @@

If no output `-o` is specified the result is printed to the console standard output.
If no fields `-f` or `-c` are passed the fields of the first element are used since json2csv CLI process the items one at a time. You can use the `--no-streaming` flag to load the entire JSON in memory and get all the headers. However, keep in mind that this is slower and requires much more memory.
If no fields `-f` or config `-c` are passed the fields of the first element are used since json2csv CLI process the items one at a time. You can use the `--no-streaming` flag to load the entire JSON in memory and get all the headers. However, keep in mind that this is slower and requires much more memory.
Use `-p` to show the result as a table in the console.
Any option passed through the config file `-c` will be overriden if a specific flag is passed as well. For example, the fields option of the config will be overriden if the fields flag `-f` is used.
### CLI examples

@@ -177,9 +181,6 @@

- `ndjson` - Only effective on the streaming API. Indicates that data coming through the stream is NDJSON.
- `unwind` - Array of Strings, creates multiple rows from a single JSON document similar to MongoDB's $unwind
- `unwindBlank` - Boolean, unwind using blank values instead of repeating data.
- `flatten` - Boolean, flattens nested objects. Defaults to `false`.
- `flattenSeparator` - String, separator to use between nested JSON keys when `flatten` option enabled. Defaults to `.` if not specified.
- `transforms` - Array of transforms to be applied to each data item. A transform is simply a function that receives a data item and returns the transformed item.
- `defaultValue` - String, default value to use when missing data. Defaults to `<empty>` if not specified. (Overridden by `fields[].default`)
- `quote` - String, quote around cell values and column names. Defaults to `"` if not specified.
- `doubleQuote` - String, the value to replace double quote in strings. Defaults to 2x`quotes` (for example `""`) if not specified.
- `escapedQuote` - String, the value to replace escaped quotes in strings. Defaults to 2x`quotes` (for example `""`) if not specified.
- `delimiter` - String, delimiter of columns. Defaults to `,` if not specified.

@@ -272,3 +273,3 @@ - `eol` - String, overrides the default OS line ending (i.e. `\n` on Unix and `\r\n` on Windows).

* `toOutput` allows you to set the output stream.
* `promise` returns a promise that resolves when the stream ends or errors.
* `promise` returns a promise that resolves when the stream ends or errors. Takes a boolean parameter to indicate if the resulting CSV should be kept in-memory and be resolved by the promise.

@@ -283,8 +284,19 @@ ```js

// Using the promise API
const input = createReadStream(inputPath, { encoding: 'utf8' });
const asyncParser = new JSON2CSVAsyncParser(opts, transformOpts);
const parsingProcessor = asyncParser.fromInput(input);
parsingProcessor.promise()
.then(csv => console.log(csv))
.catch(err => console.error(err));
// Using the promise API just to know when the process finnish
// but not actually load the CSV in memory
const input = createReadStream(inputPath, { encoding: 'utf8' });
const output = createWriteStream(outputPath, { encoding: 'utf8' });
const asyncParser = new JSON2CSVAsyncParser(opts, transformOpts);
asyncParser.fromInput(input).toOutput(output).promise()
.then(csv => console.log(csv))
.catch(err => console.error(err));;
const parsingProcessor = asyncParser.fromInput(input).toOutput(output);
parsingProcessor.promise(false).catch(err => console.error(err));
```

@@ -333,21 +345,91 @@

```js
const { Transform } = require("json2csv");
const { Readable } = require('stream');
const { Transform } = require("json2csv");
const { Readable } = require('stream');
const input = new Readable({ objectMode: true });
input._read = () => {};
// myObjectEmitter is just a fake example representing anything that emit objects.
myObjectEmitter.on('object', obj => input.push(obj));
// Pushing a null close the stream
myObjectEmitter.end(()) => input.push(null));
const input = new Readable({ objectMode: true });
input._read = () => {};
// myObjectEmitter is just a fake example representing anything that emit objects.
myObjectEmitter.on('object', obj => input.push(obj));
// Pushing a null close the stream
myObjectEmitter.end(() => input.push(null));
const output = process.stdout;
const output = process.stdout;
const opts = {};
const transformOpts = { objectMode: true };
const opts = {};
const transformOpts = { objectMode: true };
const json2csv = new Transform(opts, transformOpts);
const processor = input.pipe(json2csv).pipe(output);
const json2csv = new Transform(opts, transformOpts);
const processor = input.pipe(json2csv).pipe(output);
```
### Data transforms
json2csv supports data transforms. A transform is simply a function that receives a data item and returns the transformed item.
#### Custom transforms
```js
function (item) {
// apply tranformations or create new object
return transformedItem;
}
```
or using ES6
```js
(item) => {
// apply tranformations or create new object
return transformedItem;
}
```
For example, let's add a line counter to our CSV, capitalize the car field and change the price to be in Ks (1000s).
```js
let counter = 1;
(item) => ({ counter: counter++, ...item, car: item.car.toUpperCase(), price: item.price / 1000 });
```
#### Built-in transforms
There is a number of built-in transform provider by the library.
```js
const { transforms: { unwind, flatten } } = require('json2csv');
```
##### Unwind
The unwind transform deconstructs an array field from the input item to output a row for each element. Is's similar to MongoDB's $unwind aggregation.
The transform needs to be instantiated and takes an options object as arguments containing:
- `paths` - Array of String, list the paths to the fields to be unwound. It's mandatory and should not be empty.
- `blankOut` - Boolean, unwind using blank values instead of repeating data. Defaults to `false`.
```js
// Default
unwind({ paths: ['fieldToUnwind'] });
// Blanking out repeated data
unwind({ paths: ['fieldToUnwind'], blankOut: true });
```
##### Flatten
Flatten nested javascript objects into a single level object.
The transform needs to be instantiated and takes an options object as arguments containing:
- `objects` - Boolean, whether to flatten JSON objects or not. Defaults to `true`.
- `arrays`- Boolean, whether to flatten Arrays or not. Defaults to `false`.
- `separator` - String, separator to use between nested JSON keys when flattening a field. Defaults to `.`.
```js
// Default
flatten();
// Custom separator '__'
flatten({ separator: '_' });
// Flatten only arrays
flatten({ objects: false, arrays: true });
```
### Javascript module examples

@@ -374,3 +456,3 @@

value: (row, field) => row[field.label].toLowerCase() ||field.default,
default: 'NULL', // default if value function returns null or undefined
default: 'NULL' // default if value function returns null or undefined
},

@@ -385,4 +467,3 @@

{
value: (row) => `"${row.arrayField.join(',')}"`,
stringify: false // This flag signals if the resulting string should be quoted (stringified) or not (optional, default: true)
value: (row) => `"${row.arrayField.join(',')}"`
},

@@ -398,3 +479,2 @@ ]

const fields = ['car', 'price', 'color'];
const myCars = [

@@ -416,3 +496,3 @@ {

const json2csvParser = new Parser({ fields });
const json2csvParser = new Parser();
const csv = json2csvParser.parse(myCars);

@@ -426,3 +506,3 @@

```
car, price, color
"car", "price", "color"
"Audi", 40000, "blue"

@@ -435,3 +515,3 @@ "BMW", 35000, "black"

Similarly to [mongoexport](http://www.mongodb.org/display/DOCS/mongoexport) you can choose which fields to export.
You can choose which fields to include in the CSV.

@@ -448,6 +528,6 @@ ```js

Results in
will output to console
```
car, color
"car", "color"
"Audi", "blue"

@@ -479,2 +559,11 @@ "BMW", "black"

will output to console
```
"Car Name", "Price USD"
"Audi", 40000
"BMW", 35000
"Porsche", 60000
```
#### Example 4

@@ -487,14 +576,13 @@

const fields = ['car.make', 'car.model', 'price', 'color'];
const myCars = [
{
"car": {"make": "Audi", "model": "A3"},
"car": { "make": "Audi", "model": "A3" },
"price": 40000,
"color": "blue"
}, {
"car": {"make": "BMW", "model": "F20"},
"car": { "make": "BMW", "model": "F20" },
"price": 35000,
"color": "black"
}, {
"car": {"make": "Porsche", "model": "9PA AF1"},
"car": { "make": "Porsche", "model": "9PA AF1" },
"price": 60000,

@@ -505,2 +593,4 @@ "color": "green"

const fields = ['car.make', 'car.model', 'price', 'color'];
const json2csvParser = new Parser({ fields });

@@ -515,3 +605,3 @@ const csv = json2csvParser.parse(myCars);

```
car.make, car.model, price, color
"car.make", "car.model", "price", "color"
"Audi", "A3", 40000, "blue"

@@ -529,5 +619,3 @@ "BMW", "F20", 35000, "black"

const fields = ['car', 'price', 'color'];
const json2csvParser = new Parser({ fields, delimiter: '\t' });
const json2csvParser = new Parser({ delimiter: '\t' });
const tsv = json2csvParser.parse(myCars);

@@ -538,6 +626,6 @@

Will output:
will output to console
```
car price color
"car" "price" "color"
"Audi" 10000 "blue"

@@ -549,3 +637,3 @@ "BMW" 15000 "red"

If no delimiter is specified, the default `,` is used
If no delimiter is specified, the default `,` is used.

@@ -559,11 +647,3 @@ #### Example 6

const fields = [{
label: 'Car Name',
value: 'car'
},{
label: 'Price USD',
value: 'price'
}];
const json2csvParser = new Parser({ fields, quote: '' });
const json2csvParser = new Parser({ quote: '' });
const csv = json2csvParser.parse(myCars);

@@ -574,9 +654,9 @@

Results in
will output to console
```
Car Name, Price USD
Audi, 10000
BMW, 15000
Porsche, 30000
car, price, color
Audi, 40000, blue
BMW", 35000, black
Porsche", 60000, green
```

@@ -586,8 +666,7 @@

You can unwind arrays similar to MongoDB's $unwind operation using the `unwind` option.
You can unwind arrays similar to MongoDB's $unwind operation using the `unwind` transform.
```js
const { Parser } = require('json2csv');
const { Parser, transforms: { unwind } } = require('json2csv');
const fields = ['carModel', 'price', 'colors'];
const myCars = [

@@ -613,3 +692,6 @@ {

const json2csvParser = new Parser({ fields, unwind: 'colors' });
const fields = ['carModel', 'price', 'colors'];
const transforms = [unwind('colors')];
const json2csvParser = new Parser({ fields, transforms });
const csv = json2csvParser.parse(myCars);

@@ -642,3 +724,2 @@

const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color'];
const myCars = [

@@ -688,3 +769,5 @@ {

const json2csvParser = new Parser({ fields, unwind: ['items', 'items.items'] });
const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color'];
const transforms = [unwind(['items', 'items.items'])];
const json2csvParser = new Parser({ fields, transforms });
const csv = json2csvParser.parse(myCars);

@@ -714,3 +797,2 @@

const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color'];
const myCars = [

@@ -760,3 +842,6 @@ {

const json2csvParser = new Parser({ fields, unwind: ['items', 'items.items'], unwindBlank: true });
const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color'];
const transforms = [unwind(['items', 'items.items'], true)];
const json2csvParser = new Parser({ fields, transforms });
const csv = json2csvParser.parse(myCars);

@@ -779,4 +864,6 @@

### Migrating from 3.X to 4.X
### Migrations
#### Migrating from 3.X to 4.X
What in 3.X used to be

@@ -788,6 +875,6 @@ ```js

can be replaced by
should be replaced by
```js
const Json2csvParser = require('json2csv').Parser;
const json2csvParser = new Json2csvParser({ fields: myFields, unwind: paths, ... });
const { Parser } = require('json2csv');
const json2csvParser = new Parser({ fields: myFields, unwind: paths, ... });
const csv = json2csvParser.parse(myData);

@@ -805,2 +892,28 @@ ```

#### Migrating from 4.X to 5.X
In the CLI, the config file option, `-c`, used to be a list of fields and now it's expected to be a full configuration object.
The `stringify` option hass been removed.
`doubleQuote` has been renamed to `escapedQuote`.
The `unwind` and `flatten` -related options has been moved to their own transforms.
What used to be
```js
const { Parser } = require('json2csv');
const json2csvParser = new Parser({ unwind: paths, unwindBlank: true, flatten: true, flattenSeparator: '__' });
const csv = json2csvParser.parse(myData);
```
should be replaced by
```js
const { Parser, transform: { unwind, flatten } } = require('json2csv');
const json2csvParser = new Parser({ transforms: [unwind(paths, true), flatten('__')] });
const csv = json2csvParser.parse(myData);
```
You can se the documentation for json2csv v4.X.X [here](https://github.com/zemirco/json2csv/blob/v4/README.md).
## Known Gotchas

@@ -818,11 +931,19 @@

The `excelString` option produces a Excel-specific CSV file that forces Excel to interpret string fields as strings. Please note that the CSV will look incorrect if viewing it somewhere else than Excel.
Enabling the `excelString` option produces an Excel-specific CSV file that forces Excel to interpret string fields as strings. Please note that the CSV will look incorrect if viewing it somewhere else than Excel.
#### Avoiding CSV injection
As part of Excel automatically format detection, fields regarded as formulas (starting with `=`, `+`, `-` or `@`) are interpreted regardless of whether the field is quoted or not, creating a security risk (see [CSV Injection](https://www.owasp.org/index.php/CSV_Injection).
This issue has nothing to do with the CSV format, since CSV knows nothing about formulas, but with how Excel parses CSV files.
Enabling the `excelString` option produces an Excel-specific CSV file that forces Excel to interpret string fields as strings. Please note that the CSV will look incorrect if viewing it somewhere else than Excel.
#### Preserving new lines
Excel only recognize `\r\n` as valid new line inside a cell.
Excel only recognizes `\r\n` as valid new line inside a cell.
#### Unicode Support
Excel can display Unicode correctly (just setting the `withBOM` option to true). However, Excel can't save unicode so, if you do changes to the CSV and save it from Excel, the Unicode character will not be displayed correctly.
Excel can display Unicode correctly (just setting the `withBOM` option to true). However, Excel can't save unicode so, if you perform any changes to the CSV and save it from Excel, the Unicode characters will not be displayed correctly.

@@ -829,0 +950,0 @@

@@ -33,3 +33,2 @@ import resolve from 'rollup-plugin-node-resolve';

output: [
{ file: pkg.main, format: 'cjs' },
{ file: pkg.module, format: 'es' }

@@ -36,0 +35,0 @@ ],

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc