@commercetools/csv-parser-orders
Advanced tools
Comparing version 1.3.0 to 1.3.1
@@ -41,3 +41,3 @@ 'use strict'; | ||
const args = _yargs2.default.usage(`\n | ||
var args = _yargs2.default.usage(`\n | ||
Usage: $0 [options] | ||
@@ -58,3 +58,3 @@ Convert commercetools order CSV data to JSON.`).showHelpOnFail(false).option('help', { | ||
describe: 'Path to input CSV file.' | ||
}).coerce('inputFile', arg => { | ||
}).coerce('inputFile', function (arg) { | ||
if (arg !== 'stdin') return _fs2.default.createReadStream(String(arg)); | ||
@@ -67,3 +67,3 @@ | ||
describe: 'Path to output JSON file.' | ||
}).coerce('outputFile', arg => { | ||
}).coerce('outputFile', function (arg) { | ||
if (arg !== 'stdout') return _fs2.default.createWriteStream(String(arg)); | ||
@@ -93,4 +93,4 @@ | ||
const logError = error => { | ||
const errorFormatter = new _prettyError2.default(); | ||
var logError = function logError(error) { | ||
var errorFormatter = new _prettyError2.default(); | ||
@@ -100,3 +100,3 @@ if (_npmlog2.default.level === 'verbose') process.stderr.write(`ERR: ${errorFormatter.render(error)}`);else process.stderr.write(`ERR: ${error.message || error}`); | ||
const errorHandler = errors => { | ||
var errorHandler = function errorHandler(errors) { | ||
if (Array.isArray(errors)) errors.forEach(logError);else logError(errors); | ||
@@ -107,22 +107,30 @@ | ||
const getModuleConfig = () => ({ | ||
logger: { | ||
error: _npmlog2.default.error.bind(undefined, ''), | ||
warn: _npmlog2.default.warn.bind(undefined, ''), | ||
info: _npmlog2.default.info.bind(undefined, ''), | ||
verbose: _npmlog2.default.verbose.bind(undefined, '') | ||
}, | ||
csvConfig: { | ||
delimiter: args.delimiter, | ||
batchSize: args.batchSize, | ||
strictMode: args.strictMode | ||
} | ||
}); | ||
var getModuleConfig = function getModuleConfig() { | ||
return { | ||
logger: { | ||
error: _npmlog2.default.error.bind(undefined, ''), | ||
warn: _npmlog2.default.warn.bind(undefined, ''), | ||
info: _npmlog2.default.info.bind(undefined, ''), | ||
verbose: _npmlog2.default.verbose.bind(undefined, '') | ||
}, | ||
csvConfig: { | ||
delimiter: args.delimiter, | ||
batchSize: args.batchSize, | ||
strictMode: args.strictMode | ||
} | ||
}; | ||
}; | ||
if (args.outputFile === process.stdout) _npmlog2.default.stream = _fs2.default.createWriteStream(args.logFile); | ||
const methodMapping = { | ||
lineitemstate: config => new _lineItemState2.default(config), | ||
returninfo: config => new _addReturnInfo2.default(config), | ||
deliveries: config => new _deliveries2.default(config) | ||
var methodMapping = { | ||
lineitemstate: function lineitemstate(config) { | ||
return new _lineItemState2.default(config); | ||
}, | ||
returninfo: function returninfo(config) { | ||
return new _addReturnInfo2.default(config); | ||
}, | ||
deliveries: function deliveries(config) { | ||
return new _deliveries2.default(config); | ||
} | ||
@@ -129,0 +137,0 @@ // Register error listener |
@@ -6,3 +6,3 @@ 'use strict'; | ||
}); | ||
const CONSTANTS = { | ||
var CONSTANTS = { | ||
host: { | ||
@@ -26,3 +26,3 @@ api: 'https://api.sphere.io', | ||
// Go through object because `freeze` works shallow | ||
};Object.keys(CONSTANTS).forEach(key => { | ||
};Object.keys(CONSTANTS).forEach(function (key) { | ||
Object.freeze(CONSTANTS[key]); | ||
@@ -29,0 +29,0 @@ }); |
@@ -7,2 +7,4 @@ 'use strict'; | ||
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); | ||
var _highland = require('highland'); | ||
@@ -24,5 +26,12 @@ | ||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } | ||
/* eslint class-methods-use-this:["error",{"exceptMethods":["_processData"]}] */ | ||
class AbstractParser { | ||
constructor(conf = {}, moduleName) { | ||
var AbstractParser = function () { | ||
function AbstractParser() { | ||
var conf = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
var moduleName = arguments[1]; | ||
_classCallCheck(this, AbstractParser); | ||
this.moduleName = moduleName; | ||
@@ -37,37 +46,51 @@ | ||
this.logger = (0, _lodash.defaults)(conf.logger || {}, { | ||
error: () => {}, | ||
warn: () => {}, | ||
info: () => {}, | ||
verbose: () => {} | ||
error: function error() {}, | ||
warn: function warn() {}, | ||
info: function info() {}, | ||
verbose: function verbose() {} | ||
}); | ||
} | ||
_streamInput(input, output) { | ||
let rowIndex = 1; | ||
_createClass(AbstractParser, [{ | ||
key: '_streamInput', | ||
value: function _streamInput(input, output) { | ||
var _this = this; | ||
return (0, _highland2.default)(input).through((0, _csvParser2.default)({ | ||
separator: this.csvConfig.delimiter, | ||
strict: this.csvConfig.strictMode | ||
})).stopOnError(err => { | ||
this.logger.error(err); | ||
return output.emit('error', err); | ||
}).batch(this.csvConfig.batchSize).doto(data => { | ||
this.logger.verbose(`Parsed row-${rowIndex}: ${JSON.stringify(data)}`); | ||
rowIndex += 1; | ||
}).flatMap(_highland2.default).flatMap(data => (0, _highland2.default)(this._processData(data))).stopOnError(err => { | ||
this.logger.error(err); | ||
return output.emit('error', err); | ||
}).doto(data => this.logger.verbose(`Converted row-${rowIndex}: ${JSON.stringify(data)}`)); | ||
} | ||
var rowIndex = 1; | ||
_getMissingHeaders(data) { | ||
const headerDiff = (0, _lodash.difference)(_constants2.default.requiredHeaders[this.moduleName], Object.keys(data)); | ||
return (0, _highland2.default)(input).through((0, _csvParser2.default)({ | ||
separator: this.csvConfig.delimiter, | ||
strict: this.csvConfig.strictMode | ||
})).stopOnError(function (err) { | ||
_this.logger.error(err); | ||
return output.emit('error', err); | ||
}).batch(this.csvConfig.batchSize).doto(function (data) { | ||
_this.logger.verbose(`Parsed row-${rowIndex}: ${JSON.stringify(data)}`); | ||
rowIndex += 1; | ||
}).flatMap(_highland2.default).flatMap(function (data) { | ||
return (0, _highland2.default)(_this._processData(data)); | ||
}).stopOnError(function (err) { | ||
_this.logger.error(err); | ||
return output.emit('error', err); | ||
}).doto(function (data) { | ||
return _this.logger.verbose(`Converted row-${rowIndex}: ${JSON.stringify(data)}`); | ||
}); | ||
} | ||
}, { | ||
key: '_getMissingHeaders', | ||
value: function _getMissingHeaders(data) { | ||
var headerDiff = (0, _lodash.difference)(_constants2.default.requiredHeaders[this.moduleName], Object.keys(data)); | ||
return headerDiff; | ||
} | ||
return headerDiff; | ||
} | ||
}, { | ||
key: '_processData', | ||
value: function _processData() { | ||
throw new Error('Method AbstractParser._processData has to be overridden!'); | ||
} | ||
}]); | ||
_processData() { | ||
throw new Error('Method AbstractParser._processData has to be overridden!'); | ||
} | ||
} | ||
return AbstractParser; | ||
}(); | ||
exports.default = AbstractParser; |
@@ -7,2 +7,4 @@ 'use strict'; | ||
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); | ||
var _JSONStream = require('JSONStream'); | ||
@@ -20,84 +22,110 @@ | ||
class AddReturnInfoParser extends _abstractParser2.default { | ||
constructor(config) { | ||
super(config, 'returnInfo'); | ||
} | ||
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } | ||
parse(input, output) { | ||
this.logger.info('Starting Return Info CSV conversion'); | ||
this._streamInput(input, output).reduce([], AddReturnInfoParser._reduceOrders).stopOnError(err => { | ||
this.logger.error(err); | ||
return output.emit('error', err); | ||
}).pipe(_JSONStream2.default.stringify(false)).pipe(output); | ||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } | ||
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } | ||
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } | ||
var AddReturnInfoParser = function (_AbstractParser) { | ||
_inherits(AddReturnInfoParser, _AbstractParser); | ||
function AddReturnInfoParser(config) { | ||
_classCallCheck(this, AddReturnInfoParser); | ||
return _possibleConstructorReturn(this, (AddReturnInfoParser.__proto__ || Object.getPrototypeOf(AddReturnInfoParser)).call(this, config, 'returnInfo')); | ||
} | ||
_processData(data) { | ||
this.logger.verbose('Processing data to CTP format'); | ||
_createClass(AddReturnInfoParser, [{ | ||
key: 'parse', | ||
value: function parse(input, output) { | ||
var _this2 = this; | ||
const missingHeaders = this._getMissingHeaders(data); | ||
if (missingHeaders.length) return Promise.reject(new Error(`Required headers missing: '${missingHeaders.join(',')}'`)); | ||
this.logger.info('Starting Return Info CSV conversion'); | ||
this._streamInput(input, output).reduce([], AddReturnInfoParser._reduceOrders).stopOnError(function (err) { | ||
_this2.logger.error(err); | ||
return output.emit('error', err); | ||
}).pipe(_JSONStream2.default.stringify(false)).pipe(output); | ||
} | ||
}, { | ||
key: '_processData', | ||
value: function _processData(data) { | ||
this.logger.verbose('Processing data to CTP format'); | ||
/** | ||
* Sample returnInfo object that the API supports: | ||
* | ||
* orderNumber: String, | ||
* returnInfo: [{ | ||
* returnTrackingId: String, | ||
* returnDate: DateTime, | ||
* items: [{ | ||
* quantity: String, | ||
* lineItemId: String, | ||
* comment: String, | ||
* shipmentState: Ref | ||
* }] | ||
* }] | ||
*/ | ||
const result = { | ||
orderNumber: data.orderNumber, | ||
returnInfo: [{ | ||
returnTrackingId: data.returnTrackingId, | ||
_returnId: data._returnId, // Internal value to group the returnInfo | ||
returnDate: data.returnDate, | ||
items: [{ | ||
quantity: parseInt(data.quantity, 10), | ||
lineItemId: data.lineItemId, | ||
comment: data.comment, | ||
shipmentState: data.shipmentState | ||
var missingHeaders = this._getMissingHeaders(data); | ||
if (missingHeaders.length) return Promise.reject(new Error(`Required headers missing: '${missingHeaders.join(',')}'`)); | ||
/** | ||
* Sample returnInfo object that the API supports: | ||
* | ||
* orderNumber: String, | ||
* returnInfo: [{ | ||
* returnTrackingId: String, | ||
* returnDate: DateTime, | ||
* items: [{ | ||
* quantity: String, | ||
* lineItemId: String, | ||
* comment: String, | ||
* shipmentState: Ref | ||
* }] | ||
* }] | ||
*/ | ||
var result = { | ||
orderNumber: data.orderNumber, | ||
returnInfo: [{ | ||
returnTrackingId: data.returnTrackingId, | ||
_returnId: data._returnId, // Internal value to group the returnInfo | ||
returnDate: data.returnDate, | ||
items: [{ | ||
quantity: parseInt(data.quantity, 10), | ||
lineItemId: data.lineItemId, | ||
comment: data.comment, | ||
shipmentState: data.shipmentState | ||
}] | ||
}] | ||
}] | ||
}; | ||
return Promise.resolve(result); | ||
} | ||
}; | ||
return Promise.resolve(result); | ||
} | ||
}], [{ | ||
key: '_reduceOrders', | ||
value: function _reduceOrders(allOrders, currentOrder) { | ||
var _existingOrder$return; | ||
static _reduceOrders(allOrders, currentOrder) { | ||
/** | ||
* Reduce all orders to one order object | ||
* 1. Group all orders by the orderNumber | ||
* 2. Group all returnInfo of an order by the _returnId | ||
*/ | ||
/** | ||
* Reduce all orders to one order object | ||
* 1. Group all orders by the orderNumber | ||
* 2. Group all returnInfo of an order by the _returnId | ||
*/ | ||
// push first order into final array | ||
if (!allOrders.length) return allOrders.concat(currentOrder); | ||
// push first order into final array | ||
if (!allOrders.length) return allOrders.concat(currentOrder); | ||
// find order in final array with this orderNumber | ||
const existingOrder = (0, _lodash.find)(allOrders, ['orderNumber', currentOrder.orderNumber]); | ||
// find order in final array with this orderNumber | ||
var existingOrder = (0, _lodash.find)(allOrders, ['orderNumber', currentOrder.orderNumber]); | ||
// if currentOrder (with this orderNumber) haven't been inserted yet | ||
// push it directly into final array | ||
if (!existingOrder) return allOrders.concat(currentOrder); | ||
// if currentOrder (with this orderNumber) haven't been inserted yet | ||
// push it directly into final array | ||
if (!existingOrder) return allOrders.concat(currentOrder); | ||
// if there is already an order with this orderNumber | ||
// get all returnInfos with same returnId | ||
const existingReturnInfos = (0, _lodash.filter)(existingOrder.returnInfo, ['_returnId', currentOrder.returnInfo[0]._returnId]); | ||
// if there is already an order with this orderNumber | ||
// get all returnInfos with same returnId | ||
var existingReturnInfos = (0, _lodash.filter)(existingOrder.returnInfo, ['_returnId', currentOrder.returnInfo[0]._returnId]); | ||
// if there is no returnInfo with this returnId push those from currentOrder | ||
if (!existingReturnInfos.length) existingOrder.returnInfo.push(...currentOrder.returnInfo);else | ||
// else concat items from currentOrder | ||
existingReturnInfos.forEach(returnInfo => { | ||
returnInfo.items.push(...currentOrder.returnInfo[0].items); | ||
}); | ||
// if there is no returnInfo with this returnId push those from currentOrder | ||
if (!existingReturnInfos.length) (_existingOrder$return = existingOrder.returnInfo).push.apply(_existingOrder$return, _toConsumableArray(currentOrder.returnInfo));else | ||
// else concat items from currentOrder | ||
existingReturnInfos.forEach(function (returnInfo) { | ||
var _returnInfo$items; | ||
return allOrders; | ||
} | ||
} | ||
(_returnInfo$items = returnInfo.items).push.apply(_returnInfo$items, _toConsumableArray(currentOrder.returnInfo[0].items)); | ||
}); | ||
return allOrders; | ||
} | ||
}]); | ||
return AddReturnInfoParser; | ||
}(_abstractParser2.default); | ||
exports.default = AddReturnInfoParser; |
@@ -7,2 +7,6 @@ 'use strict'; | ||
var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); | ||
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); | ||
var _lodash = require('lodash'); | ||
@@ -34,230 +38,283 @@ | ||
class DeliveriesParser extends _abstractParser2.default { | ||
constructor(config) { | ||
super(config, 'deliveries'); | ||
} | ||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } | ||
parse(input, output) { | ||
this.logger.info('Starting Deliveries CSV conversion'); | ||
this._streamInput(input, output).reduce([], DeliveriesParser._groupByDeliveryId).stopOnError(err => { | ||
this.logger.error(err); | ||
return output.emit('error', err); | ||
}).flatMap(data => (0, _highland2.default)(DeliveriesParser._cleanOrders(data))).pipe(_JSONStream2.default.stringify(false)).pipe(output); | ||
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } | ||
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } | ||
var DeliveriesParser = function (_AbstractParser) { | ||
_inherits(DeliveriesParser, _AbstractParser); | ||
function DeliveriesParser(config) { | ||
_classCallCheck(this, DeliveriesParser); | ||
return _possibleConstructorReturn(this, (DeliveriesParser.__proto__ || Object.getPrototypeOf(DeliveriesParser)).call(this, config, 'deliveries')); | ||
} | ||
// Take objectized CSV row and create an order object from it | ||
_processData(data) { | ||
this.logger.verbose('Processing data to CTP format'); | ||
const csvHeaders = Object.keys(data); | ||
const headerDiff = _lodash2.default.difference(_constants2.default.requiredHeaders.deliveries, csvHeaders); | ||
_createClass(DeliveriesParser, [{ | ||
key: 'parse', | ||
value: function parse(input, output) { | ||
var _this2 = this; | ||
if (headerDiff.length) return Promise.reject(new Error(`Required headers missing: '${headerDiff.join(',')}'`)); | ||
this.logger.info('Starting Deliveries CSV conversion'); | ||
this._streamInput(input, output).reduce([], DeliveriesParser._groupByDeliveryId).stopOnError(function (err) { | ||
_this2.logger.error(err); | ||
return output.emit('error', err); | ||
}).flatMap(function (data) { | ||
return (0, _highland2.default)(DeliveriesParser._cleanOrders(data)); | ||
}).pipe(_JSONStream2.default.stringify(false)).pipe(output); | ||
} | ||
/** | ||
* Sample delivery object that the API supports | ||
* { | ||
* "id": String, | ||
* "createdAt": DateTime, | ||
* "items": [ | ||
* { | ||
* "id": String, | ||
* "quantity": Number | ||
* } | ||
* ], | ||
* "parcels": [ | ||
* { | ||
* "id": String, | ||
* "createdAt": DateTime, | ||
* "items": [ | ||
* { | ||
* "id": String", | ||
* "quantity": Number | ||
* } | ||
* ] | ||
* "measurements": { | ||
* "heightInMillimeter": Number, | ||
* "lengthInMillimeter": Number, | ||
* "widthInMillimeter": Number, | ||
* "weightInGram": Number | ||
* } | ||
* "trackingData": { | ||
* "trackingId": String, | ||
* "provider": String, | ||
* "providerTransaction": String, | ||
* "carrier": String, | ||
* "isReturn": Boolean | ||
* } | ||
* } | ||
* ] | ||
* } | ||
*/ | ||
// Take objectized CSV row and create an order object from it | ||
/** | ||
* Sample result - order object with shippingInfo.deliveries | ||
* { | ||
* "orderNumber": String | ||
* "shippingInfo": { | ||
* "deliveries": [ | ||
* ... | ||
* ] | ||
* } | ||
* } | ||
*/ | ||
}, { | ||
key: '_processData', | ||
value: function _processData(data) { | ||
this.logger.verbose('Processing data to CTP format'); | ||
var csvHeaders = Object.keys(data); | ||
var headerDiff = _lodash2.default.difference(_constants2.default.requiredHeaders.deliveries, csvHeaders); | ||
// Basic delivery object with delivery item | ||
const delivery = { | ||
id: data['delivery.id'], | ||
items: [{ | ||
// there can be multiple delivery items with same item.id and | ||
// item.quantity therefore we use unique identifier _itemGroupId | ||
_groupId: data._itemGroupId, | ||
id: data['item.id'], | ||
quantity: parseInt(data['item.quantity'], 10) | ||
}] | ||
if (headerDiff.length) return Promise.reject(new Error(`Required headers missing: '${headerDiff.join(',')}'`)); | ||
// Add parcel info if it is present | ||
};if (data['parcel.id']) { | ||
const parcel = DeliveriesParser._parseParcelInfo(data); | ||
/** | ||
* Sample delivery object that the API supports | ||
* { | ||
* "id": String, | ||
* "createdAt": DateTime, | ||
* "items": [ | ||
* { | ||
* "id": String, | ||
* "quantity": Number | ||
* } | ||
* ], | ||
* "parcels": [ | ||
* { | ||
* "id": String, | ||
* "createdAt": DateTime, | ||
* "items": [ | ||
* { | ||
* "id": String", | ||
* "quantity": Number | ||
* } | ||
* ] | ||
* "measurements": { | ||
* "heightInMillimeter": Number, | ||
* "lengthInMillimeter": Number, | ||
* "widthInMillimeter": Number, | ||
* "weightInGram": Number | ||
* } | ||
* "trackingData": { | ||
* "trackingId": String, | ||
* "provider": String, | ||
* "providerTransaction": String, | ||
* "carrier": String, | ||
* "isReturn": Boolean | ||
* } | ||
* } | ||
* ] | ||
* } | ||
*/ | ||
if (parcel.measurements && Object.keys(parcel.measurements).length !== 4) return Promise.reject(new Error('All measurement fields are mandatory')); | ||
/** | ||
* Sample result - order object with shippingInfo.deliveries | ||
* { | ||
* "orderNumber": String | ||
* "shippingInfo": { | ||
* "deliveries": [ | ||
* ... | ||
* ] | ||
* } | ||
* } | ||
*/ | ||
delivery.parcels = [parcel]; | ||
} | ||
// Basic delivery object with delivery item | ||
var delivery = { | ||
id: data['delivery.id'], | ||
items: [{ | ||
// there can be multiple delivery items with same item.id and | ||
// item.quantity therefore we use unique identifier _itemGroupId | ||
_groupId: data._itemGroupId, | ||
id: data['item.id'], | ||
quantity: parseInt(data['item.quantity'], 10) | ||
}] | ||
const order = { | ||
orderNumber: data.orderNumber, | ||
shippingInfo: { | ||
deliveries: [delivery] | ||
// Add parcel info if it is present | ||
};if (data['parcel.id']) { | ||
var parcel = DeliveriesParser._parseParcelInfo(data); | ||
if (parcel.measurements && Object.keys(parcel.measurements).length !== 4) return Promise.reject(new Error('All measurement fields are mandatory')); | ||
delivery.parcels = [parcel]; | ||
} | ||
}; | ||
return Promise.resolve(order); | ||
} | ||
// remove internal properties | ||
static _cleanOrders(orders) { | ||
orders.forEach(order => order.shippingInfo.deliveries.forEach(delivery => delivery.items.forEach(item => { | ||
// eslint-disable-next-line no-param-reassign | ||
delete item._groupId; | ||
}))); | ||
return [orders]; | ||
} | ||
var order = { | ||
orderNumber: data.orderNumber, | ||
shippingInfo: { | ||
deliveries: [delivery] | ||
} | ||
}; | ||
return Promise.resolve(order); | ||
} | ||
// Will merge newOrder with orders in results array | ||
static _groupByDeliveryId(results, newOrder) { | ||
/* | ||
Merge orders in following steps: | ||
1. Group all orders by orderNumber | ||
1. Group all delivery items by _itemGroupId | ||
2. Group all parcel items by parcel.id | ||
*/ | ||
// remove internal properties | ||
// if newOrder is the first record, just push it to the results | ||
if (!results.length) return [newOrder]; | ||
}], [{ | ||
key: '_cleanOrders', | ||
value: function _cleanOrders(orders) { | ||
orders.forEach(function (order) { | ||
return order.shippingInfo.deliveries.forEach(function (delivery) { | ||
return delivery.items.forEach(function (item) { | ||
// eslint-disable-next-line no-param-reassign | ||
delete item._groupId; | ||
}); | ||
}); | ||
}); | ||
return [orders]; | ||
} | ||
// find newOrder in results using its orderNumber | ||
const existingOrder = results.find(order => order.orderNumber === newOrder.orderNumber); | ||
// Will merge newOrder with orders in results array | ||
if (!existingOrder) results.push(newOrder);else { | ||
const oldDeliveries = existingOrder.shippingInfo.deliveries; | ||
const newDelivery = newOrder.shippingInfo.deliveries[0]; | ||
}, { | ||
key: '_groupByDeliveryId', | ||
value: function _groupByDeliveryId(results, newOrder) { | ||
/* | ||
Merge orders in following steps: | ||
1. Group all orders by orderNumber | ||
1. Group all delivery items by _itemGroupId | ||
2. Group all parcel items by parcel.id | ||
*/ | ||
// find newDelivery in results using its id | ||
const existingDelivery = oldDeliveries.find(delivery => delivery.id === newDelivery.id); | ||
// if newOrder is the first record, just push it to the results | ||
if (!results.length) return [newOrder]; | ||
// if this delivery is not yet in results array, insert it | ||
if (!existingDelivery) oldDeliveries.push(newDelivery);else { | ||
DeliveriesParser._mergeDeliveryItems(existingDelivery.items, newDelivery.items[0], existingDelivery); | ||
// find newOrder in results using its orderNumber | ||
var existingOrder = results.find(function (order) { | ||
return order.orderNumber === newOrder.orderNumber; | ||
}); | ||
// if delivery have parcels, merge them | ||
if (newDelivery.parcels) DeliveriesParser._mergeDeliveryParcels(existingDelivery.parcels, newDelivery.parcels[0], existingDelivery); | ||
if (!existingOrder) results.push(newOrder);else { | ||
var oldDeliveries = existingOrder.shippingInfo.deliveries; | ||
var newDelivery = newOrder.shippingInfo.deliveries[0]; | ||
// find newDelivery in results using its id | ||
var existingDelivery = oldDeliveries.find(function (delivery) { | ||
return delivery.id === newDelivery.id; | ||
}); | ||
// if this delivery is not yet in results array, insert it | ||
if (!existingDelivery) oldDeliveries.push(newDelivery);else { | ||
DeliveriesParser._mergeDeliveryItems(existingDelivery.items, newDelivery.items[0], existingDelivery); | ||
// if delivery have parcels, merge them | ||
if (newDelivery.parcels) DeliveriesParser._mergeDeliveryParcels(existingDelivery.parcels, newDelivery.parcels[0], existingDelivery); | ||
} | ||
} | ||
return results; | ||
} | ||
return results; | ||
} | ||
// merge delivery parcels to one array based on parcel.id field | ||
// merge delivery parcels to one array based on parcel.id field | ||
static _mergeDeliveryParcels(allParcels, newParcel, delivery) { | ||
// try to find this parcel in array using parcel id | ||
const duplicitParcel = allParcels.find(parcel => parcel.id === newParcel.id); | ||
}, { | ||
key: '_mergeDeliveryParcels', | ||
value: function _mergeDeliveryParcels(allParcels, newParcel, delivery) { | ||
// try to find this parcel in array using parcel id | ||
var duplicitParcel = allParcels.find(function (parcel) { | ||
return parcel.id === newParcel.id; | ||
}); | ||
// if this parcel item is not yet in array, insert it | ||
if (!duplicitParcel) return allParcels.push(newParcel); | ||
// if this parcel item is not yet in array, insert it | ||
if (!duplicitParcel) return allParcels.push(newParcel); | ||
// if this parcel is already in array, check if parcels are equal | ||
if (!_lodash2.default.isEqual(duplicitParcel, newParcel)) throw new Error(`Delivery with id '${delivery.id}' has a parcel with` + ` id '${newParcel.id}' which has different` + ` values across multiple rows. | ||
// if this parcel is already in array, check if parcels are equal | ||
if (!_lodash2.default.isEqual(duplicitParcel, newParcel)) throw new Error(`Delivery with id '${delivery.id}' has a parcel with` + ` id '${newParcel.id}' which has different` + ` values across multiple rows. | ||
Original parcel: '${JSON.stringify(duplicitParcel)}' | ||
Invalid parcel: '${JSON.stringify(newParcel)}'`); | ||
return allParcels; | ||
} | ||
return allParcels; | ||
} | ||
// merge delivery items to one array based on _groupId field | ||
static _mergeDeliveryItems(allItems, newItem, delivery) { | ||
const duplicitItem = allItems.find(item => item._groupId === newItem._groupId); | ||
// merge delivery items to one array based on _groupId field | ||
// if an item is not yet in array, insert it | ||
if (!duplicitItem) return allItems.push(newItem); | ||
}, { | ||
key: '_mergeDeliveryItems', | ||
value: function _mergeDeliveryItems(allItems, newItem, delivery) { | ||
var duplicitItem = allItems.find(function (item) { | ||
return item._groupId === newItem._groupId; | ||
}); | ||
// if this item is already in array, check if items are equal | ||
if (!_lodash2.default.isEqual(duplicitItem, newItem)) throw new Error(`Delivery with id '${delivery.id}' has an item` + ` with itemGroupId '${newItem._groupId}' which has different` + ` values across multiple rows. | ||
// if an item is not yet in array, insert it | ||
if (!duplicitItem) return allItems.push(newItem); | ||
// if this item is already in array, check if items are equal | ||
if (!_lodash2.default.isEqual(duplicitItem, newItem)) throw new Error(`Delivery with id '${delivery.id}' has an item` + ` with itemGroupId '${newItem._groupId}' which has different` + ` values across multiple rows. | ||
Original row: '${JSON.stringify(duplicitItem)}' | ||
Invalid row: '${JSON.stringify(newItem)}'`); | ||
return allItems; | ||
} | ||
return allItems; | ||
} | ||
}, { | ||
key: '_parseParcelInfo', | ||
value: function _parseParcelInfo(data) { | ||
var transitionMap = { | ||
'parcel.height': 'measurements.heightInMillimeter', | ||
'parcel.length': 'measurements.lengthInMillimeter', | ||
'parcel.width': 'measurements.widthInMillimeter', | ||
'parcel.weight': 'measurements.weightInGram', | ||
'parcel.trackingId': 'trackingData.trackingId', | ||
'parcel.providerTransaction': 'trackingData.providerTransaction', | ||
'parcel.provider': 'trackingData.provider', | ||
'parcel.carrier': 'trackingData.carrier', | ||
'parcel.isReturn': 'trackingData.isReturn', | ||
'parcel.items': 'items' | ||
}; | ||
static _parseParcelInfo(data) { | ||
const transitionMap = { | ||
'parcel.height': 'measurements.heightInMillimeter', | ||
'parcel.length': 'measurements.lengthInMillimeter', | ||
'parcel.width': 'measurements.widthInMillimeter', | ||
'parcel.weight': 'measurements.weightInGram', | ||
'parcel.trackingId': 'trackingData.trackingId', | ||
'parcel.providerTransaction': 'trackingData.providerTransaction', | ||
'parcel.provider': 'trackingData.provider', | ||
'parcel.carrier': 'trackingData.carrier', | ||
'parcel.isReturn': 'trackingData.isReturn', | ||
'parcel.items': 'items' | ||
}; | ||
var parcel = { | ||
id: data['parcel.id'] | ||
const parcel = { | ||
id: data['parcel.id'] | ||
// Build parcel object | ||
};Object.keys(data).forEach(function (fieldName) { | ||
if (!transitionMap[fieldName]) return; | ||
// Build parcel object | ||
};Object.keys(data).forEach(fieldName => { | ||
if (!transitionMap[fieldName]) return; | ||
// All values are loaded as a string | ||
var fieldValue = data[fieldName]; | ||
// All values are loaded as a string | ||
let fieldValue = data[fieldName]; | ||
// do not set empty values | ||
if (fieldValue === '') return; | ||
// do not set empty values | ||
if (fieldValue === '') return; | ||
// Cast measurements to Number | ||
if (/^measurements/.test(transitionMap[fieldName])) fieldValue = Number(fieldValue); | ||
// Cast measurements to Number | ||
if (/^measurements/.test(transitionMap[fieldName])) fieldValue = Number(fieldValue); | ||
// Cast isReturn field to Boolean | ||
if (fieldName === 'parcel.isReturn') fieldValue = fieldValue === '1' || fieldValue.toLowerCase() === 'true'; | ||
// Cast isReturn field to Boolean | ||
if (fieldName === 'parcel.isReturn') fieldValue = fieldValue === '1' || fieldValue.toLowerCase() === 'true'; | ||
if (fieldName === 'parcel.items') fieldValue = DeliveriesParser._parseParcelItems(fieldValue); | ||
if (fieldName === 'parcel.items') fieldValue = DeliveriesParser._parseParcelItems(fieldValue); | ||
_objectPath2.default.set(parcel, transitionMap[fieldName], fieldValue); | ||
}); | ||
_objectPath2.default.set(parcel, transitionMap[fieldName], fieldValue); | ||
}); | ||
return parcel; | ||
} | ||
}, { | ||
key: '_parseParcelItems', | ||
value: function _parseParcelItems(parcelItemsAsString) { | ||
if (!parcelItemsAsString) return []; | ||
return parcel; | ||
} | ||
return parcelItemsAsString.split(';').map(function (parcelItemString) { | ||
var _parcelItemString$spl = parcelItemString.split(':'), | ||
_parcelItemString$spl2 = _slicedToArray(_parcelItemString$spl, 2), | ||
id = _parcelItemString$spl2[0], | ||
quantity = _parcelItemString$spl2[1]; | ||
static _parseParcelItems(parcelItemsAsString) { | ||
if (!parcelItemsAsString) return []; | ||
return { | ||
id, | ||
quantity: Number(quantity) | ||
}; | ||
}); | ||
} | ||
}]); | ||
return parcelItemsAsString.split(';').map(parcelItemString => { | ||
const [id, quantity] = parcelItemString.split(':'); | ||
return { | ||
id, | ||
quantity: Number(quantity) | ||
}; | ||
}); | ||
} | ||
} | ||
return DeliveriesParser; | ||
}(_abstractParser2.default); | ||
exports.default = DeliveriesParser; |
@@ -7,2 +7,4 @@ 'use strict'; | ||
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); | ||
var _JSONStream = require('JSONStream'); | ||
@@ -18,48 +20,78 @@ | ||
class LineItemStateParser extends _abstractParser2.default { | ||
constructor(config) { | ||
super(config, 'lineItemState'); | ||
} | ||
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } | ||
parse(input, output) { | ||
this.logger.info('Starting LineItemState CSV conversion'); | ||
this._streamInput(input, output).reduce([], LineItemStateParser._groupByOrderNumber).stopOnError(err => { | ||
this.logger.error(err); | ||
return output.emit('error', err); | ||
}).flatMap(data => data).pipe(_JSONStream2.default.stringify()).pipe(output); | ||
} | ||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } | ||
// Will merge newLineItemState with lineItems in results array | ||
static _groupByOrderNumber(results, newLineItemState) { | ||
const existingItem = results.find(lineItem => lineItem.orderNumber === newLineItemState.orderNumber); | ||
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } | ||
if (existingItem) existingItem.lineItems.push(...newLineItemState.lineItems);else results.push(newLineItemState); | ||
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } | ||
return results; | ||
var LineItemStateParser = function (_AbstractParser) { | ||
_inherits(LineItemStateParser, _AbstractParser); | ||
function LineItemStateParser(config) { | ||
_classCallCheck(this, LineItemStateParser); | ||
return _possibleConstructorReturn(this, (LineItemStateParser.__proto__ || Object.getPrototypeOf(LineItemStateParser)).call(this, config, 'lineItemState')); | ||
} | ||
_processData(data) { | ||
this.logger.verbose('Processing data to CTP format'); | ||
_createClass(LineItemStateParser, [{ | ||
key: 'parse', | ||
value: function parse(input, output) { | ||
var _this2 = this; | ||
const missingHeaders = this._getMissingHeaders(data); | ||
if (missingHeaders.length) return Promise.reject(new Error(`Required headers missing: '${missingHeaders.join(',')}'`)); | ||
this.logger.info('Starting LineItemState CSV conversion'); | ||
this._streamInput(input, output).reduce([], LineItemStateParser._groupByOrderNumber).stopOnError(function (err) { | ||
_this2.logger.error(err); | ||
return output.emit('error', err); | ||
}).flatMap(function (data) { | ||
return data; | ||
}).pipe(_JSONStream2.default.stringify()).pipe(output); | ||
} | ||
const state = { | ||
quantity: parseInt(data.quantity, 10), | ||
fromState: data.fromState, | ||
toState: data.toState | ||
}; | ||
// Will merge newLineItemState with lineItems in results array | ||
if (data._fromStateQty) state._fromStateQty = parseInt(data._fromStateQty, 10); | ||
}, { | ||
key: '_processData', | ||
value: function _processData(data) { | ||
this.logger.verbose('Processing data to CTP format'); | ||
const result = { | ||
orderNumber: data.orderNumber, | ||
lineItems: [{ | ||
id: data.lineItemId, | ||
state: [state] | ||
}] | ||
}; | ||
return Promise.resolve(result); | ||
} | ||
} | ||
var missingHeaders = this._getMissingHeaders(data); | ||
if (missingHeaders.length) return Promise.reject(new Error(`Required headers missing: '${missingHeaders.join(',')}'`)); | ||
var state = { | ||
quantity: parseInt(data.quantity, 10), | ||
fromState: data.fromState, | ||
toState: data.toState | ||
}; | ||
if (data._fromStateQty) state._fromStateQty = parseInt(data._fromStateQty, 10); | ||
var result = { | ||
orderNumber: data.orderNumber, | ||
lineItems: [{ | ||
id: data.lineItemId, | ||
state: [state] | ||
}] | ||
}; | ||
return Promise.resolve(result); | ||
} | ||
}], [{ | ||
key: '_groupByOrderNumber', | ||
value: function _groupByOrderNumber(results, newLineItemState) { | ||
var _existingItem$lineIte; | ||
var existingItem = results.find(function (lineItem) { | ||
return lineItem.orderNumber === newLineItemState.orderNumber; | ||
}); | ||
if (existingItem) (_existingItem$lineIte = existingItem.lineItems).push.apply(_existingItem$lineIte, _toConsumableArray(newLineItemState.lineItems));else results.push(newLineItemState); | ||
return results; | ||
} | ||
}]); | ||
return LineItemStateParser; | ||
}(_abstractParser2.default); | ||
exports.default = LineItemStateParser; |
{ | ||
"name": "@commercetools/csv-parser-orders", | ||
"version": "1.3.0", | ||
"version": "1.3.1", | ||
"description": "Module that parses order csv to json", | ||
@@ -34,3 +34,3 @@ "keywords": [ | ||
"scripts": { | ||
"build": "cross-env NODE_ENV=cli babel src --out-dir lib" | ||
"build": "babel src --out-dir lib" | ||
}, | ||
@@ -37,0 +37,0 @@ "dependencies": { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
33379
641