Socket
Socket
Sign inDemoInstall

json-2-csv

Package Overview
Dependencies
Maintainers
1
Versions
140
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

json-2-csv - npm Package Compare versions

Comparing version 1.0.8 to 1.1.0

.idea/.name

4

bower.json
{
"name": "json-2-csv",
"version": "1.0.7",
"version": "1.1.0",
"homepage": "https://github.com/mrodrig/json-2-csv",

@@ -18,4 +18,4 @@ "moduleType": [

"underscore": "1.6.0",
"async": "0.2.10"
"bluebird": "2.9.24"
}
}

@@ -7,3 +7,5 @@ 'use strict';

// Default options; By using a function this is essentially a 'static' variable
/**
* Default options
*/
var defaultOptions = {

@@ -19,12 +21,15 @@ DELIMITER : {

// Build the options to be passed to the appropriate function
// If a user does not provide custom options, then we use our default
// If options are provided, then we set each valid key that was passed
/**
* Build the options to be passed to the appropriate function
* If a user does not provide custom options, then we use our default
* If options are provided, then we set each valid key that was passed
*/
var buildOptions = function (opts, cb) {
opts = opts ? opts : {}; // If undefined, set to an empty doc
var out = _.defaults(opts, defaultOptions);
opts = _.defaults(opts || {}, defaultOptions);
// Note: _.defaults does a shallow default, we need to deep copy the DELIMITER object
opts.DELIMITER = _.defaults(opts.DELIMITER || {}, defaultOptions.DELIMITER);
// If the delimiter fields are the same, report an error to the caller
if (out.DELIMITER.FIELD === out.DELIMITER.ARRAY) { return cb(new Error('The field and array delimiters must differ.')); }
if (opts.DELIMITER.FIELD === opts.DELIMITER.ARRAY) { return cb(new Error('The field and array delimiters must differ.')); }
// Otherwise, send the options back
else { return cb(null, out); }
else { return cb(null, opts); }
};

@@ -31,0 +36,0 @@

'use strict';
var _ = require('underscore'),
async = require('async');
var _ = require('underscore');

@@ -37,3 +36,3 @@ var options = {}; // Initialize the options - this will be populated when the csv2json function is called.

var keyExists = function (key, doc) {
return (typeof doc[key] !== 'undefined'); // If the key doesn't exist, then the type is 'undefined'
return (!_.isUndefined(doc[key])); // If the key doesn't exist, then the type is 'undefined'
};

@@ -100,3 +99,3 @@

if (!data) { callback(new Error('Cannot call csv2json on ' + data + '.')); return null; } // If we don't receive data, report an error
if (typeof data !== 'string') { // The data is not a string
if (!_.isString(data)) { // The data is not a string
callback(new Error("CSV is not a string.")); // Report an error back to the caller

@@ -103,0 +102,0 @@ }

'use strict';
var _ = require('underscore'),
async = require('async');
Promise = require('bluebird');
var options = {}; // Initialize the options - this will be populated when the csv2json function is called.
var options = {}; // Initialize the options - this will be populated when the json2csv function is called.
// Retrieve the headings for all documents and return it. This checks that all documents have the same schema.
var generateHeading = function(data) {
return new Promise(function (resolve, reject) {
var keys = _.map(_.keys(data), function (key, indx) { // for each key
if (_.isObject(data[key])) {
// if the data at the key is a document, then we retrieve the subHeading starting with an empty string heading and the doc
return generateSubHeading('', data[key]);
}
return key;
});
// Check for a consistent schema that does not require the same order:
// if we only have one document - then there is no possiblility of multiple schemas
if (keys && keys.length <= 1) {
return resolve(_.flatten(keys) || []);
}
// else - multiple documents - ensure only one schema (regardless of field ordering)
var firstDocSchema = _.flatten(keys[0]);
_.each(keys, function (keyList) {
// If there is a difference between the schemas, throw the inconsistent schema error
var diff = _.difference(firstDocSchema, _.flatten(keyList));
if (!_.isEqual(diff, [])) {
return reject(new Error('Not all documents have the same schema.'));
}
});
return resolve(_.flatten(keys[0]));
});
};
// Takes the parent heading and this doc's data and creates the subdocument headings (string)
var retrieveSubHeading = function (heading, data) {
var subKeys = _.keys(data), // retrieve the keys from the current document
newKey; // temporary variable to aid in determining the heading - used to generate the 'nested' headings
_.each(subKeys, function (subKey, indx) {
var generateSubHeading = function(heading, data) {
var subKeys, // retrieve the keys from the current document
newKey = ''; // temporary variable to aid in determining the heading - used to generate the 'nested' headings
subKeys = _.map(_.keys(data), function (subKey) {
// If the given heading is empty, then we set the heading to be the subKey, otherwise set it as a nested heading w/ a dot
newKey = heading === '' ? subKey : heading + '.' + subKey;
if (typeof data[subKey] === 'object' && data[subKey] !== null && typeof data[subKey].length === 'undefined' && _.keys(data[subKey]).length > 0) { // If we have another nested document
subKeys[indx] = retrieveSubHeading(newKey, data[subKey]); // Recur on the subdocument to retrieve the full key name
if (_.isObject(data[subKey]) && !_.isNull(data[subKey]) && _.isUndefined(data[subKey].length) && _.keys(data[subKey]).length > 0) { // If we have another nested document
return generateSubHeading(newKey, data[subKey]); // Recur on the sub-document to retrieve the full key name
} else {
subKeys[indx] = (options.DELIMITER.WRAP || '') + (newKey || '') + (options.DELIMITER.WRAP || ''); // Set the key name since we don't have a sub document
return newKey; // Set the key name since we don't have a sub document
}
});
return subKeys.join(options.DELIMITER.FIELD); // Return the headings joined by our field delimiter
};
// Retrieve the headings for all documents and return it. This checks that all documents have the same schema.
var retrieveHeading = function (data) {
return function (cb) { // Returns a function that takes a callback - the function is passed to async.parallel
var keys = _.keys(data); // Retrieve the current data keys
_.each(keys, function (key, indx) { // for each key
if (typeof data[key] === 'object') {
// if the data at the key is a document, then we retrieve the subHeading starting with an empty string heading and the doc
keys[indx] = retrieveSubHeading('', data[key]);
}
});
// Retrieve the unique array of headings (keys)
keys = _.uniq(keys);
// If we have more than 1 unique list, then not all docs have the same schema - report an error
if (keys.length > 1) { throw new Error('Not all documents have the same schema.', keys); }
return cb(null, _.flatten(keys).join(options.DELIMITER.FIELD)); // Return headings back
};
return subKeys; // Return the headings joined by our field delimiter
};

@@ -46,26 +59,36 @@

value; // Temporary variable to store the current data
_.each(keys, function (key, indx) { // For each key
value = data[key]; // Set the current data that we are looking at
if (keys.indexOf(key) > -1) { // If the keys contain the current key, then process the data
if (_.isArray(value)) { // We have an array of values
output.push((options.DELIMITER.WRAP || '') + '[' + value.join(options.DELIMITER.ARRAY) + ']' + (options.DELIMITER.WRAP || ''));
} else if (_.isDate(value)) { // If we have a date
output.push(value.toString());
} else if (_.isObject(value)) { // If we have an object
output.push(convertData(value, _.keys(value))); // Push the recursively generated CSV
} else {
value = value == null ? '' : value.toString();
output.push((options.DELIMITER.WRAP || '') + value + (options.DELIMITER.WRAP || '')); // Otherwise push the current value
}
_.each(keys, function (key) { // For each key
var indexOfPeriod = _.indexOf(key, '.');
if (indexOfPeriod > -1) {
var pathPrefix = key.slice(0, indexOfPeriod),
pathRemainder = key.slice(indexOfPeriod+1);
output.push(convertData(data[pathPrefix], [pathRemainder]));
} else if (keys.indexOf(key) > -1) { // If the keys contain the current key, then process the data
value = data[key]; // Set the current data that we are looking at
convertField(value, output);
}
});
return output.join(options.DELIMITER.FIELD); // Return the data joined by our field delimiter
return output; // Return the data joined by our field delimiter
};
var convertField = function (value, output) {
if (_.isArray(value)) { // We have an array of values
output.push(options.DELIMITER.WRAP + '[' + value.join(options.DELIMITER.ARRAY) + ']' + options.DELIMITER.WRAP);
} else if (_.isDate(value)) { // If we have a date
output.push(value.toString());
} else if (_.isObject(value)) { // If we have an object
output.push(convertData(value, _.keys(value))); // Push the recursively generated CSV
} else {
value = value === null ? '' : value.toString();
output.push(options.DELIMITER.WRAP + value + options.DELIMITER.WRAP); // Otherwise push the current value
}
};
// Generate the CSV representing the given data.
var generateCsv = function (data) {
return function (cb) { // Returns a function that takes a callback - the function is passed to async.parallel
// Reduce each JSON document in data to a CSV string and append it to the CSV accumulator
return cb(null, _.reduce(data, function (csv, doc) { return csv += convertData(doc, _.keys(doc)) + options.EOL; }, ''));
};
var generateCsv = function (data, headingKeys) {
// Reduce each JSON document in data to a CSV string and append it to the CSV accumulator
return Promise.resolve([headingKeys, _.reduce(data, function (csv, doc) {
return csv += _.flatten(convertData(doc, headingKeys)).join(options.DELIMITER.FIELD) + options.EOL;
}, '')]);
};

@@ -79,21 +102,31 @@

if (!callback) { throw new Error('A callback is required!'); } // If a callback wasn't provided, throw an error
if (!opts) { callback(new Error('Options were not passed and are required.')); return null; } // Shouldn't happen, but just in case
if (!opts) { return callback(new Error('Options were not passed and are required.')); } // Shouldn't happen, but just in case
else { options = opts; } // Options were passed, set the global options value
if (!data) { callback(new Error('Cannot call json2csv on ' + data + '.')); return null; } // If we don't receive data, report an error
if (typeof data !== 'object') { // If the data was not a single document or an array of documents
return cb(new Error('Data provided was not an array of documents.')); // Report the error back to the caller
} else if (typeof data === 'object' && !data.length) { // Single document, not an array
if (!data) { return callback(new Error('Cannot call json2csv on ' + data + '.')); } // If we don't receive data, report an error
if (!_.isObject(data)) { // If the data was not a single document or an array of documents
return callback(new Error('Data provided was not an array of documents.')); // Report the error back to the caller
} else if (_.isObject(data) && !data.length) { // Single document, not an array
data = [data]; // Convert to an array of the given document
}
// Retrieve the heading and the CSV asynchronously in parallel
async.parallel([retrieveHeading(data), generateCsv(data)], function (err, res) {
if (!err) {
// Data received with no errors, join the two responses with an end of line delimiter to setup heading and CSV body
return callback(null, res.join(options.EOL));
} else {
return callback(err, null); // Report received error back to caller
}
});
// Retrieve the heading and then generate the CSV with the keys that are identified
generateHeading(data)
.then(_.partial(generateCsv, data))
.spread(function (csvHeading, csvData) {
if (options.DELIMITER.WRAP) {
csvHeading = _.map(csvHeading, function(headingKey) {
return options.DELIMITER.WRAP + headingKey + options.DELIMITER.WRAP;
});
}
csvHeading = csvHeading.join(options.DELIMITER.FIELD);
return callback(null, [csvHeading, csvData].join(options.EOL));
})
.catch(function (err) {
return callback(err);
});
}
};

@@ -5,3 +5,3 @@ {

"description": "A JSON to CSV and CSV to JSON converter that natively supports sub-documents and auto-generates the CSV heading.",
"version": "1.0.8",
"version": "1.1.0",
"repository": {

@@ -28,3 +28,3 @@ "type": "git",

"underscore": "1.6.0",
"async": "0.2.10"
"bluebird": "~2.9.24"
},

@@ -40,2 +40,2 @@ "devDependencies": {

"license": "MIT"
}
}

@@ -140,3 +140,3 @@ # Convert JSON to CSV or CSV to JSON

- Header Generation (per document keys)
- Verifies all documents have same schema
- Verifies all documents have same schema (schema field order does not matter as of 1.1.0)
- Supports sub-documents natively

@@ -155,7 +155,2 @@ - Supports arrays as document values for both json2csv and csv2json

- Created: Apr 23, 2014
- 1K Downloads/Month: January 15, 2015
## TODO
- Use PARSE_CSV_NUMBERS option to actually convert numbers. Not currently implemented.
- Respect nested arrays when in json2csv - Currently flattens them
- If quotes in CSV header, strip them? Add as an option?
- 1K Downloads/Month: January 15, 2015
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc