Socket
Socket
Sign inDemoInstall

ag-grid-mongo-query-builder

Package Overview
Dependencies
0
Maintainers
1
Versions
37
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 0.3.9 to 0.4.0

2

package.json
{
"name": "ag-grid-mongo-query-builder",
"version": "0.3.9",
"version": "0.4.0",
"description": "Utility to generate Mongo DB aggregation pipeline queries starting from AgGrid server side params",

@@ -5,0 +5,0 @@ "main": "index.js",

@@ -897,3 +897,2 @@ //const mongoose = require('mongoose');

let requestPerBatch = process.env.CSV_REQUEST_PER_BATCH ? process.env.CSV_REQUEST_PER_BATCH : 10;
let finalExportResultArray = [];
let startRow = 0;

@@ -907,13 +906,13 @@ let lastRow = 0;

console.log("Total no of batch" , numBatches);
await getCSVDataByBatches(req, res, keys, numBatches, totalCount, requestPerBatch, recordsPerPage, startRow, lastRow, finalExportResultArray, 1, dbModel);
return finalExportResultArray;
} else{
console.log('Total count not found:');
res.status(400).send(" Total count not found !");
return await getCSVDataByBatches(req, res, keys, numBatches, totalCount, requestPerBatch, recordsPerPage, startRow, lastRow, [], 1, dbModel);
} else {
console.log('export total count not found:');
res.status(400).send("export total count not found !");
}
}
const getCSVDataByBatches = async(req, res, keys, numBatches, totalCount, requestPerBatch, recordsPerPage, startRow, lastRow, finalExportResultArray, currBatch, dbModel ) => {
if(currBatch > numBatches) return finalExportResultArray;
//////Recurssive function to get CSV data by batches
const getCSVDataByBatches = async(req, res, keys, numBatches, totalCount, requestPerBatch, recordsPerPage, startRow, lastRow, updatedExportResult, currBatch, dbModel ) => {
if(currBatch > numBatches) return updatedExportResult;
console.log(' Processing batch ',currBatch);

@@ -925,79 +924,80 @@ let reqCurrBatch = requestPerBatch;

if(currentBatchLimit > totalCount) {
const batchLimit = ((currBatch - 1) * requestPerBatch * recordsPerPage);
const reqDiff = totalCount - batchLimit;
reqCurrBatch = Math.ceil(reqDiff/recordsPerPage);
const batchLimit = ((currBatch - 1) * requestPerBatch * recordsPerPage);
const reqDiff = totalCount - batchLimit;
reqCurrBatch = Math.ceil(reqDiff/recordsPerPage);
}
///// Creating promise requests for each batch
const reArray = Array.from({length: reqCurrBatch}, (x, i) => i + 1);
const requests = reArray.map(itra => {
if(itra === 1 && currBatch === 1) {
startRow = startRow;
} else {
startRow = startRow + recordsPerPage;
}
lastRow = startRow + recordsPerPage;
return getCSVDataByPages(req, startRow, lastRow, res, dbModel)
const promiseRequests = reArray.map(itra => {
if(itra === 1 && currBatch === 1) {
startRow = startRow;
} else {
startRow = startRow + recordsPerPage;
}
lastRow = startRow + recordsPerPage;
return getCSVDataByPages(req, startRow, lastRow, res, dbModel)
});
try {
const response = await Promise.all(requests);
console.log('------Export Response Length');
console.log(response.length);
if(response && response.length > 0) {
console.log('------------Keys');
console.log(keys);
response.map(eachRes => {
console.log('------Each batch Response Length');
console.log(eachRes.length);
console.log(JSON.stringify(eachRes));
eachRes.map(function(obj) {
let tempDataObj = {};
if(Object.keys(obj).length > 0) {
for (let i in keys) {
///// Processing each batch responses with promise all
const promiseAllResponse = await Promise.all(promiseRequests);
if(promiseAllResponse && promiseAllResponse.length > 0) {
const batchResponseArr = promiseAllResponse.reduce((acc, promiseRes) => {
promiseRes.forEach(obj => {
const formattedObj = formatExportObj(obj,keys);
const flagObj = addFlagToExportObj(obj, flagColumns, flagValue);
const finalObj = {...formattedObj, ...flagObj};
acc.push(finalObj);
});
return acc;
}, []);
updatedExportResult = [...updatedExportResult, ...batchResponseArr];
}
currBatch = currBatch + 1;
return await getCSVDataByBatches(req, res, keys, numBatches, totalCount, requestPerBatch, recordsPerPage, startRow, lastRow, updatedExportResult, currBatch, dbModel);
} catch(err) {
console.log(`error in processing export data batches: `, err);
}
}
if (obj[keys[i].fieldName] && Array.isArray(obj[keys[i].fieldName])) {
tempDataObj[keys[i].fieldName] = obj[keys[i].fieldName].toString();
} else if (keys[i].dataType && (obj[keys[i].fieldName] == "" || obj[keys[i].fieldName] == null || obj[keys[i].fieldName] == undefined)) {
tempDataObj[keys[i].fieldName] = "";
} else if(keys[i].fieldName && obj[keys[i].fieldName]) {
tempDataObj[keys[i].fieldName] = obj[keys[i].fieldName];
} else {
console.log('-----------key value not found in each resposne');
console.log(JSON.stringify(obj));
tempDataObj[keys[i].fieldName] = "";
}
/////Function to format CSV data object
function formatExportObj(obj, keys) {
let tempDataObj = {};
if(Object.keys(obj).length > 0) {
for (let i in keys) {
if (obj[keys[i].fieldName] && Array.isArray(obj[keys[i].fieldName])) {
tempDataObj[keys[i].label] = obj[keys[i].fieldName].toString();
} else if (keys[i].dataType && (obj[keys[i].fieldName] == "" || obj[keys[i].fieldName] == null || obj[keys[i].fieldName] == undefined)) {
tempDataObj[keys[i].label] = "";
} else if(keys[i].dataType) {
tempDataObj[keys[i].label] = obj[keys[i].fieldName];
}
if(keys[i].dataType === 'agDateColumnFilter') {
const dateVal = obj[keys[i].fieldName] ? new Date(obj[keys[i].fieldName]) : null;
if(dateVal) {
tempDataObj[keys[i].fieldName] = moment(dateVal).format("DD/MM/YYYY");
}
}
}
if(keys[i].dataType === 'agDateColumnFilter') {
const dateVal = obj[keys[i].fieldName] ? new Date(obj[keys[i].fieldName]) : null;
if(dateVal) {
tempDataObj[keys[i].label] = moment(dateVal).format("DD/MM/YYYY");
}
}
}
}
return tempDataObj;
}
flagValue.map(eachVal => {
let match = false;
flagColumns.some(flagCol => {
const typeOfVal = typeof(obj[flagCol.field]);
match = checkFlagCondition(typeOfVal, eachVal, obj[flagCol.field]);
if(match) return match
});
tempDataObj[`Keyword - ${eachVal}`] = match ? '1' : '0';
})
} else {
console.log('------------No key found in each resposne obj');
}
finalExportResultArray.push(tempDataObj);
});
//// Fucntion to add flagging columns to export data
function addFlagToExportObj(obj, flagColumns, flagValue) {
let flagObj = {};
flagValue.map(eachVal => {
let match = false;
flagColumns.some(flagCol => {
const typeOfVal = typeof(obj[flagCol.field]);
match = checkFlagCondition(typeOfVal, eachVal, obj[flagCol.field]);
if(match) return match
});
}
currBatch = currBatch + 1;
await getCSVDataByBatches(req, res, keys, numBatches, totalCount, requestPerBatch, recordsPerPage, startRow, lastRow, finalExportResultArray, currBatch, dbModel);
} catch(err) {
console.log(`error: `, err);
}
flagObj[`Flag - ${eachVal}`] = match ? '1' : '0';
});
return flagObj;
}
function checkFlagCondition(type, flagVal, value) {

@@ -1041,5 +1041,2 @@

console.log('---------------Export Batch Query');
console.log(JSON.stringify(query));
return await query.exec().then((results) => {

@@ -1046,0 +1043,0 @@ return results;

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc