Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

ag-grid-mongo-query-builder

Package Overview
Dependencies
Maintainers
1
Versions
37
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ag-grid-mongo-query-builder - npm Package Compare versions

Comparing version 0.4.1 to 0.4.2

2

package.json
{
"name": "ag-grid-mongo-query-builder",
"version": "0.4.1",
"version": "0.4.2",
"description": "Utility to generate Mongo DB aggregation pipeline queries starting from AgGrid server side params",

@@ -5,0 +5,0 @@ "main": "index.js",

@@ -889,28 +889,102 @@ //const mongoose = require('mongoose');

const generateCSVData = async(req, res, dbModel) => {
const newcounPayload = {...req.body};
delete(newcounPayload.startRow);
delete(newcounPayload.endRow);
const countQuery = buildCountQuery(newcounPayload);
if(req.body.defaultFilters) {
countQuery.unshift(req.body.defaultFilters)
}
let totalCount = await getTotalCount(dbModel, countQuery, res);
let recordsPerPage = process.env.CSV_RECORDS_PER_PAGE ? process.env.CSV_RECORDS_PER_PAGE : 500;
let requestPerBatch = process.env.CSV_REQUEST_PER_BATCH ? process.env.CSV_REQUEST_PER_BATCH : 10;
let startRow = 0;
let lastRow = 0;
let keys = req.body.displayFields ? req.body.displayFields : null;
if(totalCount && keys) {
const numPages = Math.ceil(totalCount/recordsPerPage);
const numBatches = Math.ceil(numPages/requestPerBatch);
console.log("Total no of pages" , numPages);
console.log("Total no of batch" , numBatches);
return await getCSVDataByBatches(req, res, keys, numBatches, totalCount, requestPerBatch, recordsPerPage, startRow, lastRow, [], 1, dbModel);
//// This block handles grouping export
if(req.body.rowGroupCols && req.body.rowGroupCols.length>0) {
const groupsFields = req.body.rowGroupCols.reduce((groupArr, group) => {
groupArr.push(`${group.displayName}`);
return groupArr;
}, []);
const missingObjects = getMissingObject(req.body.rowGroupCols, keys, req.body.column);
console.log(missingObjects);
if(missingObjects) {
req.body.displayFields.push(...missingObjects)
}
// const groupObj = req.body.rowGroupCols.reduce((groupObj, group) => {
// groupObj[group.field] = `$${group.field}`;
// return groupObj;
// }, {});
// const filterModel = req.body.filterModel;
// const isFiltering = Object.keys(filterModel).length > 0;
// let matchQuery = {};
// const filterQuery = isFiltering ? buildFilterQuery(filterModel, false) : [];
// if(filterQuery && filterQuery.query && filterQuery.query[0] && filterQuery.query[0].$match) {
// matchQuery = filterQuery.query[0].$match;
// }
// const groupsDataQuery = [{"$match": matchQuery},{"$group": {_id : groupObj }}, {"$sort" : {"_id" : 1}} ];
// if(groupsDataQuery) {
// const groupData = await getGroupData(dbModel, groupsDataQuery, res);
// console.log('-------------Groups data');
// console.log(JSON.stringify(groupData))
// if(groupData && groupData.length > 0) {
// //let groupsDataArray = [];
// req.body.pivotCols = [];
// req.body.pivotMode = false;
// let groupsDataArray = await groupData.reduce(async (accumulatorPromise, group) => {
// const accumulator = await accumulatorPromise;
// req.body.groupKeys = Object.values(group._id);
// keys.unshift({dataType: "groupColumn",fieldName: "Group", label: "Group"});
// const results = await prepaireCSVDataPayload(req, res, dbModel, keys);
// if(req.body.groupKeys) {
// let groupObj = createEmptyGroupObj(keys);
// const groupRow = formatExportObj(groupObj, keys, true, req.body.groupKeys);
// results.unshift(groupRow);
// }
// console.log('=======================Results');
// console.log(JSON.stringify(results))
// return [...accumulator, ...results];
// }, Promise.resolve([]));
// return groupsDataArray;
// }
// }
if(!req.body.groupKeys || req.body.groupKeys.length <= 0) {
req.body.rowGroupCols = [];
}
keys.unshift({dataType: "groupColumn",fieldName: "Group", label: "Group"});
const csvData = await prepaireCSVDataPayload(req, res, dbModel, keys);
const modifiedRes = await convertGroupsRes(csvData, groupsFields, req.body.groupKeys);
return modifiedRes;
} else {
console.log('export total count not found:');
res.status(400).send("export total count not found !");
///// This logic is for normal export other then grouping
return await prepaireCSVDataPayload(req, res, dbModel, keys);
}
}
const prepaireCSVDataPayload = async(req,res,dbModel, keys) => {
const newcounPayload = {...req.body};
delete(newcounPayload.startRow);
delete(newcounPayload.endRow);
const countQuery = buildCountQuery(newcounPayload);
if(req.body.defaultFilters) {
countQuery.unshift(req.body.defaultFilters)
}
let totalCount = await getTotalCount(dbModel, countQuery, res);
let recordsPerPage = process.env.CSV_RECORDS_PER_PAGE ? process.env.CSV_RECORDS_PER_PAGE : 500;
let requestPerBatch = process.env.CSV_REQUEST_PER_BATCH ? process.env.CSV_REQUEST_PER_BATCH : 10;
let startRow = 0;
let lastRow = 0;
if(totalCount && keys) {
const numPages = Math.ceil(totalCount/recordsPerPage);
const numBatches = Math.ceil(numPages/requestPerBatch);
console.log("Total no of pages" , numPages);
console.log("Total no of batch" , numBatches);
return await getCSVDataByBatches(req, res, keys, numBatches, totalCount, requestPerBatch, recordsPerPage, startRow, lastRow, [], 1, dbModel);
} else {
console.log('export total count not found:');
res.status(400).send("export total count not found !");
}
}
//////Recurssive function to get CSV data by batches

@@ -923,2 +997,3 @@ const getCSVDataByBatches = async(req, res, keys, numBatches, totalCount, requestPerBatch, recordsPerPage, startRow, lastRow, updatedExportResult, currBatch, dbModel ) => {

let flagValue = req.body && req.body.flagFilters ? req.body.flagFilters : null;
let groupKeys = req.body && req.body.groupKeys ? req.body.groupKeys : null;
const currentBatchLimit = (currBatch * requestPerBatch * recordsPerPage);

@@ -948,3 +1023,3 @@ if(currentBatchLimit > totalCount) {

promiseRes.forEach(obj => {
const formattedObj = formatExportObj(obj,keys);
const formattedObj = formatExportObj(obj, keys, groupKeys);
const flagObj = addFlagToExportObj(obj, flagColumns, flagValue);

@@ -966,23 +1041,37 @@ const finalObj = {...formattedObj, ...flagObj};

/////Function to format CSV data object
function formatExportObj(obj, keys) {
let tempDataObj = {};
if(Object.keys(obj).length > 0) {
for (let i in keys) {
if (obj[keys[i].fieldName] && Array.isArray(obj[keys[i].fieldName])) {
tempDataObj[keys[i].label] = obj[keys[i].fieldName].toString();
} else if (keys[i].dataType && (obj[keys[i].fieldName] == "" || obj[keys[i].fieldName] == null || obj[keys[i].fieldName] == undefined)) {
tempDataObj[keys[i].label] = "";
} else if(keys[i].dataType) {
tempDataObj[keys[i].label] = obj[keys[i].fieldName];
function formatExportObj(obj, keys, groupKeys= null) {
if(groupKeys!=null) {
return createCSVObj(keys, obj, groupKeys);
} else {
return createCSVObj(keys, obj);
}
}
////Format and Create CSV Row Objects
function createCSVObj(keys, obj, groupKeys=null) {
return keys.reduce((tempDataObj, key) => {
if (Object.keys(obj).length > 0) {
if (obj[key.fieldName] && Array.isArray(obj[key.fieldName])) {
tempDataObj[key.label] = removeLineBreaks(obj[key.fieldName].toString());
} else if (key.dataType && (obj[key.fieldName] == "" || obj[key.fieldName] == null || obj[key.fieldName] == undefined)) {
tempDataObj[key.label] = "";
} else if (key.dataType && key.dataType !== 'agDateColumnFilter') {
tempDataObj[key.label] = removeLineBreaks(obj[key.fieldName]);
}
if (key.dataType === 'agDateColumnFilter') {
const dateVal = obj[key.fieldName] ? new Date(obj[key.fieldName]) : null;
if (dateVal) {
tempDataObj[key.label] = moment(dateVal).format("DD/MM/YYYY");
}
}
if(keys[i].dataType === 'agDateColumnFilter') {
const dateVal = obj[keys[i].fieldName] ? new Date(obj[keys[i].fieldName]) : null;
if(dateVal) {
tempDataObj[keys[i].label] = moment(dateVal).format("DD/MM/YYYY");
}
}
if(groupKeys) {
if (key.dataType === 'groupColumn') {
tempDataObj[key.label] = groupKeys.join(" -> ");
}
}
}
}
return tempDataObj;
return tempDataObj;
}, {});
}

@@ -1038,3 +1127,3 @@

let query = dbModel.aggregate(aggregationPipeline.aggregationPipeline);
let query = dbModel.aggregate(aggregationPipeline.aggregationPipeline).allowDiskUse(true);

@@ -1072,2 +1161,20 @@ if(req.body.collationQuery && req.body.collationQuery!=null) {

///////////// Below block to get groups data to export by keys
// const getGroupData = async(dbModel, groupQuery, res)=>{
// const query = dbModel.aggregate(groupQuery);
// return await query.exec()
// .then((results) => {
// console.log('CSV Export: group query successful execution ..');
// if(results && results.length > 0) {
// return results;
// } else {
// return null
// }
// })
// .catch((error) => {
// console.log('CSV Export: got the error in count query execution:', error);
// res.status(400).send("CSV Export: Error getting total count!");
// })
// }
function escapeSpecialChars(stringValue) {

@@ -1089,2 +1196,81 @@ if(stringValue && stringValue!='' && checkSpecialChars(stringValue)) {

////////Remove CSV Chars which cause line break
function removeLineBreaks(stringValue) {
if(stringValue && stringValue!='') {
return stringValue.replace(/[^\x20-\x7E]/gmi, "");
} else return stringValue;
}
////////// Creates Empty Object
// function createEmptyGroupObj(keys) {
// return keys.reduce((acc, column) => {
// acc[column.fieldName] = '';
// return acc;
// }, {});
// }
////////////Below block group data by keys and export with extra line to highligh new groups starting point
const convertGroupsRes = async (dataRes, groupKeys, groupSelector) => {
dataRes.sort((a, b) => {
return groupKeys.reduce((result, key) => {
if (result !== 0) {
return result; // If a non-zero result was already found, return it.
}
return a[key].localeCompare(b[key]);
}, 0);
});
const modifiedRes = dataRes.reduce((acc, currentObj, index) => {
let currentKey = groupKeys.map(key => currentObj[key]).join(' -> ');
if (index === 0 || currentKey !== groupKeys.map(key => acc[acc.length - 1][key]).join(' -> ')) {
const groupObject = Object.keys(currentObj).reduce((obj, key) => {
obj[key] = '';
return obj;
}, {});
if(groupSelector && groupSelector.length> 0){
groupObject["Group"] = createGroupKeyString(groupSelector,currentKey) ;
} else groupObject["Group"] = currentKey;
acc.push(groupObject);
}
if(groupSelector && groupSelector.length> 0){
currentObj['Group'] = '';
}
acc.push(currentObj);
return acc;
}, []);
return await modifiedRes;
}
///////// find missing groups cols from payload and add into disply cols array
function getMissingObject(rowGroup, displayFields, columns) {
return rowGroup && rowGroup.reduce((missingGroup, group) => {
if (!displayFields.find(item => item.fieldName === group.id)) {
const matchingColumn = columns && columns.find(column => column.field === group.id);
if (matchingColumn) {
missingGroup.push({
fieldName: matchingColumn.field,
dataType: matchingColumn.filter,
label: group.displayName,
});
}
}
return missingGroup;
}, []);
}
//////// Create custom key string to indicate multi level of grouping
function createGroupKeyString(groupKeys, currentKey) {
// Split the currentKey into its parts while removing empty strings
const keyParts = currentKey.split(" -> ").filter(part => part.trim() !== "");
// Find missing parts by comparing with groupKeys
const missingParts = groupKeys.filter(key => !keyParts.includes(key));
// Create the updated currentKey by joining the missing parts and the existing key parts
const updatedCurrentKey = missingParts.concat(keyParts).join(" -> ");
return updatedCurrentKey;
}
module.exports.buildQuery=buildQuery;

@@ -1091,0 +1277,0 @@ module.exports.buildCountQuery=buildCountQuery;

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc