Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@synatic/noql

Package Overview
Dependencies
Maintainers
3
Versions
47
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@synatic/noql - npm Package Compare versions

Comparing version 2.1.18 to 3.0.0

6

lib/isSelectAll.js

@@ -11,5 +11,9 @@ /**

}
return columns === '*';
return typeof columns === 'string'
? columns === '*'
: columns.length === 1 &&
columns[0].expr.column === '*' &&
!columns[0].expr.table;
}
module.exports = {isSelectAll};

@@ -21,2 +21,3 @@ /**

windowFields: [],
subQueryRootProjections: [],
};

@@ -23,0 +24,0 @@ }

4

lib/make/index.js

@@ -71,3 +71,5 @@ const {parseSQLtoAST} = require('../parseSQLtoAST');

});
parsedQuery.projection = result.parsedProject.$project;
if (Object.keys(result.parsedProject.$project).length) {
parsedQuery.projection = result.parsedProject.$project;
}
if (result.unset && result.unset.$unset) {

@@ -74,0 +76,0 @@ for (const colToUnset of result.unset.$unset) {

@@ -421,5 +421,7 @@ const groupByColumnParserModule = require('./groupByColumnParser');

}
const tableAs = stripJoinHints(ast.from[0].as);
pipeline = makeAggregatePipeline(ast.from[0].expr.ast, context)
.concat([{$project: {[ast.from[0].as]: '$$ROOT'}}])
.concat([{$project: {[tableAs]: '$$ROOT'}}])
.concat(pipeline);
result.subQueryRootProjections.push(tableAs);
}

@@ -437,5 +439,5 @@

pipeline.push({
$sort: ast.orderby.reduce((a, v) => {
$sort: ast.orderby.reduce((sortObj, currentSort) => {
const asMapped = result.asMapping.find(
(c) => c.column === v.expr.column
(c) => c.column === currentSort.expr.column
);

@@ -446,18 +448,19 @@ let key = '';

} else {
if (v.expr.table) {
if (
(ast.from[0].table && !ast.from[0].table) ||
v.expr.table !== ast.from[0].table
) {
key = `${v.expr.table}.${v.expr.column}`;
} else {
key = v.expr.column || v.expr.value;
}
if (
currentSort.expr.table &&
result.subQueryRootProjections &&
result.subQueryRootProjections.indexOf(
currentSort.expr.table
) >= 0
) {
key = `${currentSort.expr.table}.${
currentSort.expr.column || currentSort.expr.value
}`;
} else {
key = v.expr.column || v.expr.value;
key = currentSort.expr.column || currentSort.expr.value;
}
}
a[key] = v.type === 'DESC' ? -1 : 1;
sortObj[key] = currentSort.type === 'DESC' ? -1 : 1;
return a;
return sortObj;
}, {}),

@@ -553,4 +556,318 @@ });

}
if (ast._next && ast.set_op && ast.set_op === 'intersect') {
handleIntersect(ast, context, pipeline);
}
if (ast._next && ast.set_op && ast.set_op === 'except') {
handleExcept(ast, context, pipeline);
}
return pipeline;
}
/**
*
* @param {import('../types').AST} ast - the ast to make an aggregate pipeline from
* @param {import('../types').NoqlContext} context - The Noql context to use when generating the output
* @param {import('../types').PipelineFn[]} pipeline
*/
function handleIntersect(ast, context, pipeline) {
const otherPipeline = makeAggregatePipeline(ast._next, context);
// can be an object with a list of fields, or else '*' : '$*' or a mix of both
let firstQueryFields = mapColumnsToNameValuePairs(ast.columns);
let secondQueryFields = mapColumnsToNameValuePairs(ast._next.columns);
if (firstQueryFields.length !== secondQueryFields.length) {
throw new Error(
`each EXCEPT query must have the same number of columns`
);
}
const intersectionCollectionName =
ast._next.from[0].table ||
(ast._next.from[0].expr &&
ast._next.from[0].expr.ast &&
ast._next.from[0].expr.ast.from &&
ast._next.from[0].expr.ast.from[0] &&
ast._next.from[0].expr.ast.from[0].table
? ast._next.from[0].expr.ast.from[0].table
: null) ||
null;
if (!intersectionCollectionName) {
throw new Error('No collection to EXCEPT with');
}
const sortStep = extractSortFromPipeline(otherPipeline);
pipeline.push({
$unionWith: {
coll: intersectionCollectionName,
pipeline: otherPipeline,
},
});
const firstHasSelectAll = hasSelectAll(firstQueryFields);
const secondHasSelectAll = hasSelectAll(secondQueryFields);
if (firstHasSelectAll || secondHasSelectAll) {
if (firstHasSelectAll !== secondHasSelectAll) {
throw new Error(
`each INTERSECT query must have the same number of columns and if one has an "*" both must`
);
}
if (!context.schemas) {
throw new Error(
'Cannot perform an INTERSECT using "*" without schemas being provided'
);
}
if (ast.from[0].expr || ast._next.from[0].expr) {
throw new Error(
'Cannot perform an INTERSECT on subqueries using "*" '
);
}
const firstCollectionName = ast.from[0].as || ast.from[0].table;
if (!firstCollectionName) {
throw new Error(
'Unable to find the first collection name while using INTERSECT'
);
}
const firstSchema = context.schemas[firstCollectionName];
if (!firstSchema) {
throw new Error(
`Schema for INTERSECT not found: ${firstCollectionName}`
);
}
const secondSchema = context.schemas[intersectionCollectionName];
if (!secondSchema) {
throw new Error(
`Schema for INTERSECT not found: ${intersectionCollectionName}`
);
}
firstQueryFields = getNameValuePairsFromSchema(
firstSchema,
firstCollectionName
);
secondQueryFields = getNameValuePairsFromSchema(
secondSchema,
intersectionCollectionName
);
}
const _idField = firstQueryFields.reduce((obj, {name, value}, index) => {
const {name: otherName, value: otherValue} = secondQueryFields[index];
if (name === otherName) {
obj[name] = value;
} else {
obj[name] = {
$ifNull: [
value,
{
$ifNull: [otherValue, null],
},
],
};
}
return obj;
}, {});
pipeline.push({
$group: {
_id: _idField,
count: {$sum: 1},
},
});
pipeline.push({
$match: {count: {$gt: 1}},
});
pipeline.push({$replaceRoot: {newRoot: '$_id'}});
if (sortStep) {
pipeline.push(sortStep);
}
}
/**
*
* @param {import('../types').AST} ast - the ast to make an aggregate pipeline from
* @param {import('../types').NoqlContext} context - The Noql context to use when generating the output
* @param {import('../types').PipelineFn[]} pipeline
*/
function handleExcept(ast, context, pipeline) {
const otherPipeline = makeAggregatePipeline(ast._next, context);
// can be an object with a list of fields, or else '*' : '$*' or a mix of both
let firstQueryFields = mapColumnsToNameValuePairs(ast.columns);
let secondQueryFields = mapColumnsToNameValuePairs(ast._next.columns);
if (firstQueryFields.length !== secondQueryFields.length) {
throw new Error(
`each INTERSECT query must have the same number of columns`
);
}
const intersectionCollectionName =
ast._next.from[0].table ||
(ast._next.from[0].expr &&
ast._next.from[0].expr.ast &&
ast._next.from[0].expr.ast.from &&
ast._next.from[0].expr.ast.from[0] &&
ast._next.from[0].expr.ast.from[0].table
? ast._next.from[0].expr.ast.from[0].table
: null) ||
null;
if (!intersectionCollectionName) {
throw new Error('No collection to EXCEPT with');
}
pipeline.push({
$addFields: {
___is_primary: true,
},
});
const sortStep = extractSortFromPipeline(otherPipeline);
pipeline.push({
$unionWith: {
coll: intersectionCollectionName,
pipeline: otherPipeline,
},
});
const firstHasSelectAll = hasSelectAll(firstQueryFields);
const secondHasSelectAll = hasSelectAll(secondQueryFields);
if (firstHasSelectAll || secondHasSelectAll) {
if (firstHasSelectAll !== secondHasSelectAll) {
throw new Error(
`each EXCEPT query must have the same number of columns and if one has an "*" both must`
);
}
if (!context.schemas) {
throw new Error(
'Cannot perform an EXCEPT using "*" without schemas being provided'
);
}
if (ast.from[0].expr || ast._next.from[0].expr) {
throw new Error(
'Cannot perform an EXCEPT on subqueries using "*" '
);
}
const firstCollectionName = ast.from[0].as || ast.from[0].table;
if (!firstCollectionName) {
throw new Error(
'Unable to find the first collection name while using EXCEPT'
);
}
const firstSchema = context.schemas[firstCollectionName];
if (!firstSchema) {
throw new Error(
`Schema for EXCEPT not found: ${firstCollectionName}`
);
}
const secondSchema = context.schemas[intersectionCollectionName];
if (!secondSchema) {
throw new Error(
`Schema for EXCEPT not found: ${intersectionCollectionName}`
);
}
firstQueryFields = getNameValuePairsFromSchema(
firstSchema,
firstCollectionName
);
secondQueryFields = getNameValuePairsFromSchema(
secondSchema,
intersectionCollectionName
);
}
const _idField = firstQueryFields.reduce((obj, {name, value}, index) => {
const {name: otherName, value: otherValue} = secondQueryFields[index];
if (name === otherName) {
obj[name] = value;
} else {
obj[name] = {
$ifNull: [
value,
{
$ifNull: [otherValue, null],
},
],
};
}
return obj;
}, {});
pipeline.push({
$group: {
_id: _idField,
count: {$sum: 1},
___is_primary: {$first: '$___is_primary'},
},
});
pipeline.push({
$match: {count: {$lte: 1}, ___is_primary: true},
});
pipeline.push({$replaceRoot: {newRoot: '$_id'}});
if (sortStep) {
pipeline.push(sortStep);
}
}
/**
*
* @param {string} input
* @returns {string}
*/
function stripJoinHints(input) {
return input
.replace('|first', '')
.replace('|last', '')
.replace('|unwind', '');
}
/**
*
* @param {import('../types').Columns} columns
* @returns {{name:string,value:string}[]}
*/
function mapColumnsToNameValuePairs(columns) {
if (typeof columns === 'string') {
return [];
}
return columns
.map((c) => c.as || c.expr.column)
.filter(Boolean)
.map((columnName) => {
return {name: columnName, value: `$${columnName}`};
});
}
/**
*
* @param {{name:string,value:string}[]} column
* @returns {boolean}
*/
function hasSelectAll(column) {
return column.map((f) => f.name).indexOf('*') >= 0;
}
/**
*
* @param {import('json-schema').JSONSchema6} schema
* @param {string} collectionName
* @returns {{name:string,value:string}[]}
*/
function getNameValuePairsFromSchema(schema, collectionName) {
if (!schema.properties) {
throw new Error(`Schema for "${collectionName}" has no properties`);
}
if (typeof schema.properties === 'boolean') {
throw new Error(
`Schema for "${collectionName}" had properties of type boolean`
);
}
return Object.keys(schema.properties)
.map((name) => {
return {
name,
value: `$${name}`,
};
})
.filter((col) => col.name !== '_id');
}
/**
*
* @param {import('../types').PipelineFn[]} pipeline
* @returns {import('../types').PipelineFn|null}
*/
function extractSortFromPipeline(pipeline) {
const index = pipeline.findIndex((p) => !!p.$sort);
const sortStep = pipeline[index];
pipeline.splice(index, 1);
return sortStep;
}

@@ -106,5 +106,9 @@ const makeProjectionExpressionPartModule = require('./makeProjectionExpressionPart');

if (
['bool', 'number', 'string', 'single_quote_string'].includes(
queryPart.type
)
[
'bool',
'number',
'string',
'single_quote_string',
'double_quote_string',
].includes(queryPart.type)
) {

@@ -224,4 +228,11 @@ return queryPart.value;

}
const likeVal = queryPart.right.value;
const regex = sqlStringToRegex(likeVal);
/** @type {string} */
let regex;
if (queryPart.right.value) {
regex = sqlStringToRegex(queryPart.right.value);
} else {
regex = queryPart.right.table
? `$${queryPart.right.table}.${queryPart.right.column}`
: queryPart.right.column;
}

@@ -228,0 +239,0 @@ return {

@@ -98,3 +98,3 @@ const makeFilterConditionModule = require('./makeFilterCondition');

const joinQuery = makeFilterConditionModule.makeFilterCondition(
join.on,
sanitizeOnCondition(join.on, joinAliases),
context,

@@ -237,2 +237,39 @@ false,

*
* @param {import('../types').Expression} condition
* @param {string[]}joinAliases
* @returns {import('../types').Expression}
*/
function sanitizeOnCondition(condition, joinAliases) {
// clone condition to prevent issues?
if (
condition.left.type === 'function' &&
condition.left.args &&
condition.left.args.value &&
Array.isArray(condition.left.args.value)
) {
condition.left.args.value = condition.left.args.value.map((a) => {
if (a.table && joinAliases.indexOf(a.table) < 0) {
return {...a, table: ''};
}
return a;
});
}
if (
condition.right.type === 'function' &&
condition.right.args &&
condition.right.args.value &&
Array.isArray(condition.right.args.value)
) {
condition.right.args.value = condition.right.args.value.map((a) => {
if (a.table && joinAliases.indexOf(a.table) < 0) {
return {...a, table: ''};
}
return a;
});
}
return condition;
}
/**
*
* @param {import('../types').TableDefinition} join

@@ -239,0 +276,0 @@ * @param {import('../types').TableDefinition} previousJoin

@@ -21,10 +21,10 @@ const getParsedValueFromBinaryExpressionModule = require('./getParsedValueFromBinaryExpression');

const columnTable = column.expr.table || tableAlias;
const columnValue = column.expr.column;
if (column.as && column.as.toUpperCase() === '$$ROOT') {
result.replaceRoot = {
$replaceRoot: {newRoot: `$${column.expr.column}`},
$replaceRoot: {newRoot: `$${columnValue}`},
};
return;
}
if (column.expr.column === '*' && column.expr.table) {
if (columnValue === '*' && column.expr.table) {
result.parsedProject.$project[

@@ -35,9 +35,14 @@ column.as || column.expr.table

}
if (column.expr.column === '*') {
if (columnValue === '*') {
result.exprToMerge.push('$$ROOT');
return;
}
result.parsedProject.$project[column.as || column.expr.column] = `$${
columnTable ? columnTable + '.' : ''
}${column.expr.column}`;
const expression =
columnTable === columnValue.split('.')[0]
? `$${columnValue}`
: `$${columnTable ? columnTable + '.' : ''}${
column.expr.column
}`;
result.parsedProject.$project[column.as || column.expr.column] =
expression;
return;

@@ -294,2 +299,3 @@ }

.replace(/\$/g, '');
order.type = order.type || 'ASC';
const direction = order.type === 'ASC' ? 1 : -1;

@@ -296,0 +302,0 @@ setWindowFunction.sortBy[res] = direction;

@@ -20,5 +20,3 @@ const {ColumnDoesNotExistError, TableDoesNotExistError} = require('./errors');

) {
options = options || {
database: 'PostgresQL',
};
options = options || {};
const {parsedAst} = parseSQLtoAST(statement, options);

@@ -41,3 +39,6 @@ return await getResultSchema(parsedAst, statement, getSchemaFunction);

const {ast} = tableColumnAst;
if (ast.columns === '*') {
if (
ast.columns === '*' ||
(ast.columns.length === 1 && ast.columns[0].expr.column === '*')
) {
return await processSelectAllStatement(

@@ -237,3 +238,2 @@ ast,

results.length = 0;
console.log({firstObj, secondObj});
for (const col of firstObj) {

@@ -240,0 +240,0 @@ results.push(col);

@@ -45,3 +45,3 @@ const {Parser} = require('node-sql-parser');

parsedAst = parser.parse(sql, {
database: options.database,
database: 'noql',
type: options.type,

@@ -48,0 +48,0 @@ });

@@ -70,3 +70,3 @@ import type {Document, Sort} from 'mongodb';

name?: string;
args?: Expression | Expression[];
args?: Expression;
from?: TableDefinition[];

@@ -121,2 +121,3 @@ value?: any;

expr?: Expression;
on?: Expression;
}

@@ -158,3 +159,3 @@

$match?: {[key: string]: any};
$group?: {_id: any};
$group?: {_id: any; [key: string]: any};
$replaceRoot?: {[key: string]: any};

@@ -172,2 +173,3 @@ $map?: {[key: string]: any};

$setWindowFields?: SetWindowFields;
$addFields?: {[key: string]: any};
}

@@ -189,4 +191,2 @@

unwindJoins?: boolean;
/** Specifies the type of database that Nodejs SQL Parser will use, e.g. 'PostgresQL' */
database?: string;
/** Specifies the type that Nodejs SQL Parser will use e.g. 'table', 'column'*/

@@ -206,3 +206,3 @@ type?: string;

tables: string[];
fullAst:TableColumnAst;
fullAst: TableColumnAst;
}

@@ -244,2 +244,3 @@

windowFields: SetWindowFields[];
subQueryRootProjections: string[];
}

@@ -246,0 +247,0 @@

{
"name": "@synatic/noql",
"version": "2.1.18",
"version": "3.0.0",
"description": "Convert SQL statements to mongo queries or aggregates",

@@ -67,3 +67,3 @@ "main": "index.js",

"@synatic/type-magic": "^1.0.0",
"bson": "^5.4.0",
"bson": "^4.7.2",
"check-types": "11.2.2",

@@ -73,3 +73,3 @@ "clone-deep": "4.0.1",

"lodash": "^4.17.21",
"node-sql-parser": "4.9.0"
"node-sql-parser": "^4.18.0"
},

@@ -86,3 +86,3 @@ "devDependencies": {

"eslint-plugin-import": "^2.26.0",
"eslint-plugin-jsdoc": "^39.3.2",
"eslint-plugin-jsdoc": "^38.1.6",
"eslint-plugin-node": "^11.1.0",

@@ -95,7 +95,8 @@ "eslint-plugin-prettier": "^4.0.0",

"mocha": "^10.0.0",
"mongodb": "^5.7.0",
"moment": "^2.29.4",
"mongodb": "^4.17.2",
"mongodb-language-model": "^2.3.0",
"npm-check": "^6.0.1",
"nyc": "^15.1.0",
"pg": "^8.11.3",
"prettier": "^2.8.8",

@@ -102,0 +103,0 @@ "underscore.string": "^3.3.5",

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc