Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

ddf-query-validator

Package Overview
Dependencies
Maintainers
1
Versions
14
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ddf-query-validator - npm Package Compare versions

Comparing version 1.0.3 to 1.0.4

test/common.ts

5

lib/dataset-manager.service.d.ts

@@ -1,2 +0,3 @@

export declare function getDatasetPath(basePath: any, queryParam: any): string;
export declare function extendQueryParamWithDatasetProps(queryParam: any, options?: {}): Promise<string | void>;
export declare function getRepositoryPath(basePath: any, queryParam: any): string;
export declare function getFilePath(repositoryPath: any, filePath?: string): string;
export declare function extendQueryWithRepository(queryParam: any, config?: {}): Error | void;

90

lib/dataset-manager.service.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const isNil = require("lodash.isnil");
const includes = require("lodash.includes");
const get = require("lodash.get");
const path = require("path");
const helper_service_1 = require("./helper.service");
function getDatasetPath(basePath, queryParam) {
function getRepositoryPath(basePath, queryParam) {
const { dataset, branch, commit } = queryParam;
return `${basePath}${dataset}/${branch}-${commit}`;
}
exports.getDatasetPath = getDatasetPath;
function getDatapackagePath(datasetPath) {
return path.resolve(datasetPath, 'datapackage.json');
exports.getRepositoryPath = getRepositoryPath;
function getFilePath(repositoryPath, filePath = 'datapackage.json') {
return `${repositoryPath}/${filePath}`;
}
function isDatasetPathAlreadyInBasePath(fileReader, basePath) {
return new Promise((resolve) => {
fileReader.readText(getDatapackagePath(basePath), (error) => {
return resolve(!error);
});
});
}
function extendQueryParamWithDatasetProps(queryParam, options = {}) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const datasetsConfig = get(options, 'datasetsConfig', {
[helper_service_1.DEFAULT_DATASET_NAME]: { [helper_service_1.DEFAULT_DATASET_BRANCH]: [helper_service_1.DEFAULT_DATASET_COMMIT] },
default: {
dataset: helper_service_1.DEFAULT_DATASET_NAME,
branch: helper_service_1.DEFAULT_DATASET_BRANCH,
commit: helper_service_1.DEFAULT_DATASET_COMMIT
}
});
const { 'default': { dataset: DEFAULT_DATASET, branch: DEFAULT_BRANCH, commit: DEFAULT_COMMIT } } = datasetsConfig;
const { dataset: originDataset, branch: originBranch, commit: originCommit } = queryParam;
let { dataset = DEFAULT_DATASET, branch = DEFAULT_BRANCH, commit = DEFAULT_COMMIT } = queryParam;
const basePath = get(options, 'basePath', helper_service_1.DEFAULT_DATASET_DIR);
const fileReader = get(options, 'fileReader');
const datasetName = dataset;
if (isNil(datasetsConfig[dataset])) {
throw new Error(`No ${isNil(originDataset) ? 'default ' : ''}dataset '${dataset}' was found`);
exports.getFilePath = getFilePath;
function extendQueryWithRepository(queryParam, config = {}) {
const REPOSITORY_DESCRIPTORS = get(config, 'repositoryDescriptors', {});
const IS_DEFAULT_DATASET = isNil(queryParam.dataset) ? 'default ' : '';
if (!IS_DEFAULT_DATASET) {
const [originDataset, originBranch] = queryParam.dataset.split('#');
if (!queryParam.branch && originBranch) {
queryParam.branch = originBranch;
queryParam.dataset = originDataset;
}
if (isNil(datasetsConfig[dataset][branch])) {
throw new Error(`No ${isNil(originBranch) ? 'default ' : ''}branch '${branch}' in ${isNil(originDataset) ? 'default ' : ''}dataset '${dataset}' was found`);
}
if (!includes(datasetsConfig[dataset][branch], commit)) {
throw new Error(`No ${isNil(originCommit) ? 'default ' : ''}commit '${commit}' in ${isNil(originBranch) ? 'default ' : ''}branch '${branch}' in ${isNil(originDataset) ? 'default ' : ''}dataset '${dataset}' was found`);
}
let datasetPath;
let datapackagePath;
try {
const isAlreadyDataset = yield isDatasetPathAlreadyInBasePath(fileReader, basePath);
if (isAlreadyDataset) {
dataset = basePath;
branch = null;
commit = null;
datasetPath = basePath;
datapackagePath = getDatapackagePath(basePath);
}
else {
datasetPath = getDatasetPath(basePath, { dataset, branch, commit });
datapackagePath = getDatapackagePath(datasetPath);
}
}
catch (error) {
throw error;
}
Object.assign(queryParam, { dataset, branch, commit });
Object.assign(options, { datasetPath, datapackagePath, datasetName });
return queryParam;
});
}
const IS_DEFAULT_BRANCH = isNil(queryParam.branch) ? 'default ' : '';
const IS_DEFAULT_COMMIT = isNil(queryParam.commit) ? 'default ' : '';
const { dataset = get(config, 'defaultRepository', helper_service_1.DEFAULT_REPOSITORY_NAME), branch = get(config, 'defaultRepositoryBranch', helper_service_1.DEFAULT_REPOSITORY_BRANCH), commit = get(config, 'defaultRepositoryCommit', helper_service_1.DEFAULT_REPOSITORY_HASH) } = queryParam;
if (isNil(REPOSITORY_DESCRIPTORS[dataset])) {
throw new Error(`No ${IS_DEFAULT_DATASET}dataset '${dataset}' was found`);
}
if (isNil(REPOSITORY_DESCRIPTORS[dataset][branch])) {
throw new Error(`No ${IS_DEFAULT_BRANCH}branch '${branch}' in ${IS_DEFAULT_DATASET}dataset '${dataset}' was found`);
}
if (!includes(REPOSITORY_DESCRIPTORS[dataset][branch], commit)) {
throw new Error(`No ${IS_DEFAULT_COMMIT}commit '${commit}' in ${IS_DEFAULT_BRANCH}branch '${branch}' in ${IS_DEFAULT_DATASET}dataset '${dataset}' was found`);
}
const repositoryPath = getRepositoryPath('', { dataset, branch, commit });
Object.assign(queryParam, { repositoryPath });
}
exports.extendQueryParamWithDatasetProps = extendQueryParamWithDatasetProps;
exports.extendQueryWithRepository = extendQueryWithRepository;
//# sourceMappingURL=dataset-manager.service.js.map

@@ -23,3 +23,3 @@ "use strict";

if (!isQueryValid) {
return reject(`Too many query definition errors [repo: ${query.dataset}]: \n* ${validationResult.join('\n* ')}`);
return reject(`Too many query definition errors [repo: ${options.basePath}]: \n* ${validationResult.join('\n* ')}`);
}

@@ -26,0 +26,0 @@ return resolve();

@@ -20,6 +20,5 @@ export declare const SCHEMAS: Set<string>;

export declare const AVAILABLE_ORDER_BY_CLAUSE_VALUES: Set<string | number>;
export declare const DEFAULT_DATASET_NAME: string;
export declare const DEFAULT_DATASET_BRANCH: string;
export declare const DEFAULT_DATASET_COMMIT = "HEAD";
export declare const DEFAULT_DATASET_DIR: string;
export declare const DEFAULT_REPOSITORY_NAME: string;
export declare const DEFAULT_REPOSITORY_BRANCH: string;
export declare const DEFAULT_REPOSITORY_HASH = "HEAD";
export declare function isSchemaQuery(query: any): boolean;

@@ -26,0 +25,0 @@ export declare function isDatapointsQuery(query: any): boolean;

@@ -31,6 +31,5 @@ "use strict";

]);
exports.DEFAULT_DATASET_NAME = process.env.DEFAULT_DATASET_NAME || 'systema_globalis';
exports.DEFAULT_DATASET_BRANCH = process.env.DEFAULT_DATASET_BRANCH || 'master';
exports.DEFAULT_DATASET_COMMIT = 'HEAD';
exports.DEFAULT_DATASET_DIR = process.env.DEFAULT_DATASET_DIR || './datasets';
exports.DEFAULT_REPOSITORY_NAME = process.env.DEFAULT_REPOSITORY_NAME || 'systema_globalis';
exports.DEFAULT_REPOSITORY_BRANCH = process.env.DEFAULT_REPOSITORY_BRANCH || 'master';
exports.DEFAULT_REPOSITORY_HASH = 'HEAD';
function isSchemaQuery(query) {

@@ -37,0 +36,0 @@ const fromClause = get(query, 'from');

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
const isEmpty = require("lodash.isempty");

@@ -8,5 +9,7 @@ const isNil = require("lodash.isnil");

const size = require("lodash.size");
const cloneDeep = require("lodash.clonedeep");
const values = require("lodash.values");
const keys = require("lodash.keys");
const map = require("lodash.map");
const flatMap = require("lodash.flatmap");
const first = require("lodash.first");

@@ -33,3 +36,3 @@ const filter = require("lodash.filter");

...validateJoinStructure(query, options),
...validateOrderByStructure(query, options)
...validateOrderByStructure(query, options),
];

@@ -146,2 +149,14 @@ const isQueryValid = isEmpty(validationResult);

}
function validateSubqueries(query, options) {
return flatMap(query.join, (join, joinID) => tslib_1.__awaiter(this, void 0, void 0, function* () {
return yield validateQueryStructure({
select: { key: [join.key] },
where: join.where,
from: query.from === 'entities' ? 'concepts' : 'entities',
dataset: query.dataset,
branch: query.branch,
commit: query.commit
}, Object.assign({ joinID }, cloneDeep(options)));
}));
}
function checkIfSelectIsEmpty(selectClause) {

@@ -148,0 +163,0 @@ if (isNil(selectClause)) {

{
"name": "ddf-query-validator",
"version": "1.0.3",
"version": "1.0.4",
"description": "DDF query validator",
"main": "lib/index.js",
"scripts": {
"test": "npm run e2e",
"test": "npm run unit",
"test-travis": "npm run e2e && nyc report --reporter=text-lcov > coverage.lcov && codecov",
"pree2e": "npm i",
"e2e": "nyc mocha",
"tslint": "./node_modules/.bin/tslint -c ./tslint.json 'src/**/*.ts'",
"tsc": "./node_modules/.bin/tsc --project tsconfig.json && npm run tslint"
"tsc": "./node_modules/.bin/tsc --project tsconfig.json && npm run tslint",
"coverage:clean": "rm -rf coverage && rm -rf .nyc_output && rm -rf coverage.lcov",
"preunit": "npm run coverage:clean && npm i && npm run tslint && npm run tsc",
"unit": "nyc mocha"
},

@@ -40,2 +41,4 @@ "nyc": {

"lodash.filter": "4.6.0",
"lodash.find": "^4.6.0",
"lodash.findindex": "^4.6.0",
"lodash.first": "3.0.0",

@@ -56,3 +59,3 @@ "lodash.flatmap": "4.5.0",

"lodash.size": "4.2.0",
"lodash.startswith": "4.2.1",
"lodash.startswith": "^4.2.1",
"lodash.trimstart": "4.5.1",

@@ -76,4 +79,5 @@ "lodash.uniq": "4.5.0",

"tslint": "5.11.0",
"tslint-no-unused-expression-chai": "^0.1.3",
"typescript": "3.0.1"
}
}
import * as isNil from 'lodash.isnil';
import * as includes from 'lodash.includes';
import * as get from 'lodash.get';
import * as path from 'path';
import {
DEFAULT_DATASET_BRANCH,
DEFAULT_DATASET_COMMIT,
DEFAULT_DATASET_DIR,
DEFAULT_DATASET_NAME
} from './helper.service';
import { IReader } from './interfaces';
import { DEFAULT_REPOSITORY_BRANCH, DEFAULT_REPOSITORY_HASH, DEFAULT_REPOSITORY_NAME } from './helper.service';
export function getDatasetPath(basePath, queryParam) {
export function getRepositoryPath (basePath, queryParam) {
const {

@@ -22,81 +15,44 @@ dataset,

function getDatapackagePath(datasetPath): string {
return path.resolve(datasetPath, 'datapackage.json');
export function getFilePath (repositoryPath, filePath = 'datapackage.json'): string {
return `${repositoryPath}/${filePath}`;
}
function isDatasetPathAlreadyInBasePath(fileReader: IReader, basePath: string): Promise<boolean> {
return new Promise((resolve) => {
fileReader.readText(getDatapackagePath(basePath), (error) => {
return resolve(!error);
});
});
}
export function extendQueryWithRepository (queryParam, config = {}): Error | void {
// TODO: refactor unit tests
// const REPOSITORY_DESCRIPTORS = get(config, 'repositoryDescriptors', {[DEFAULT_REPOSITORY]: {[DEFAULT_BRANCH]: [DEFAULT_HASH]}});
const REPOSITORY_DESCRIPTORS = get(config, 'repositoryDescriptors', {});
const IS_DEFAULT_DATASET = isNil(queryParam.dataset) ? 'default ' : '';
export async function extendQueryParamWithDatasetProps(queryParam, options = {}): Promise<string | void> {
const datasetsConfig = get(options, 'datasetsConfig', {
[DEFAULT_DATASET_NAME]: {[DEFAULT_DATASET_BRANCH]: [DEFAULT_DATASET_COMMIT]},
default: {
dataset: DEFAULT_DATASET_NAME,
branch: DEFAULT_DATASET_BRANCH,
commit: DEFAULT_DATASET_COMMIT
if (!IS_DEFAULT_DATASET) {
const [originDataset, originBranch] = queryParam.dataset.split('#');
if (!queryParam.branch && originBranch) {
queryParam.branch = originBranch;
queryParam.dataset = originDataset;
}
});
}
const IS_DEFAULT_BRANCH = isNil(queryParam.branch) ? 'default ' : '';
const IS_DEFAULT_COMMIT = isNil(queryParam.commit) ? 'default ' : '';
const {
'default': {
dataset: DEFAULT_DATASET,
branch: DEFAULT_BRANCH,
commit: DEFAULT_COMMIT
}
} = datasetsConfig;
const {
dataset: originDataset,
branch: originBranch,
commit: originCommit
dataset = get(config, 'defaultRepository', DEFAULT_REPOSITORY_NAME),
branch = get(config, 'defaultRepositoryBranch', DEFAULT_REPOSITORY_BRANCH),
commit = get(config, 'defaultRepositoryCommit', DEFAULT_REPOSITORY_HASH)
} = queryParam;
let {
dataset = DEFAULT_DATASET,
branch = DEFAULT_BRANCH,
commit = DEFAULT_COMMIT
} = queryParam;
const basePath = get(options, 'basePath', DEFAULT_DATASET_DIR);
const fileReader = get(options, 'fileReader');
const datasetName = dataset;
if (isNil(datasetsConfig[dataset])) {
throw new Error(`No ${isNil(originDataset) ? 'default ' : ''}dataset '${dataset}' was found`);
if (isNil(REPOSITORY_DESCRIPTORS[ dataset ])) {
throw new Error(`No ${IS_DEFAULT_DATASET}dataset '${dataset}' was found`);
}
if (isNil(datasetsConfig[dataset][branch])) {
throw new Error(`No ${isNil(originBranch) ? 'default ' : ''}branch '${branch}' in ${isNil(originDataset) ? 'default ' : ''}dataset '${dataset}' was found`);
if (isNil(REPOSITORY_DESCRIPTORS[ dataset ][ branch ])) {
throw new Error(`No ${IS_DEFAULT_BRANCH}branch '${branch}' in ${IS_DEFAULT_DATASET}dataset '${dataset}' was found`);
}
if (!includes(datasetsConfig[dataset][branch], commit)) {
throw new Error(`No ${isNil(originCommit) ? 'default ' : ''}commit '${commit}' in ${isNil(originBranch) ? 'default ' : ''}branch '${branch}' in ${isNil(originDataset) ? 'default ' : ''}dataset '${dataset}' was found`);
if (!includes(REPOSITORY_DESCRIPTORS[ dataset ][ branch ], commit)) {
throw new Error(`No ${IS_DEFAULT_COMMIT}commit '${commit}' in ${IS_DEFAULT_BRANCH}branch '${branch}' in ${IS_DEFAULT_DATASET}dataset '${dataset}' was found`);
}
let datasetPath;
let datapackagePath;
const repositoryPath = getRepositoryPath('', { dataset, branch, commit });
try {
const isAlreadyDataset = await isDatasetPathAlreadyInBasePath(fileReader, basePath);
if (isAlreadyDataset) {
dataset = basePath;
branch = null;
commit = null;
datasetPath = basePath;
datapackagePath = getDatapackagePath(basePath);
} else {
datasetPath = getDatasetPath(basePath, {dataset, branch, commit});
datapackagePath = getDatapackagePath(datasetPath);
}
} catch (error) {
throw error;
}
Object.assign(queryParam, {dataset, branch, commit});
Object.assign(options, {datasetPath, datapackagePath, datasetName});
return queryParam;
Object.assign(queryParam, { repositoryPath });
}

@@ -41,3 +41,3 @@ import * as isEmpty from 'lodash.isempty';

if (!isQueryValid) {
return reject(`Too many query definition errors [repo: ${query.dataset}]: \n* ${validationResult.join('\n* ')}`);
return reject(`Too many query definition errors [repo: ${(options as any).basePath}]: \n* ${validationResult.join('\n* ')}`);
}

@@ -44,0 +44,0 @@

import * as get from 'lodash.get';
import * as includes from 'lodash.includes';
export const SCHEMAS = new Set(['concepts.schema', 'entities.schema', 'datapoints.schema', '*.schema']);
export const SCHEMAS = new Set([ 'concepts.schema', 'entities.schema', 'datapoints.schema', '*.schema' ]);
export const DATAPOINTS = 'datapoints';

@@ -36,8 +36,7 @@ export const ENTITIES = 'entities';

export const DEFAULT_DATASET_NAME = process.env.DEFAULT_DATASET_NAME || 'systema_globalis';
export const DEFAULT_DATASET_BRANCH = process.env.DEFAULT_DATASET_BRANCH || 'master';
export const DEFAULT_DATASET_COMMIT = 'HEAD';
export const DEFAULT_DATASET_DIR = process.env.DEFAULT_DATASET_DIR || './datasets';
export const DEFAULT_REPOSITORY_NAME = process.env.DEFAULT_REPOSITORY_NAME || 'systema_globalis';
export const DEFAULT_REPOSITORY_BRANCH = process.env.DEFAULT_REPOSITORY_BRANCH || 'master';
export const DEFAULT_REPOSITORY_HASH = 'HEAD';
export function isSchemaQuery(query) {
export function isSchemaQuery (query) {
const fromClause = get(query, 'from');

@@ -47,3 +46,3 @@ return SCHEMAS.has(fromClause);

export function isDatapointsQuery(query) {
export function isDatapointsQuery (query) {
const fromClause = get(query, 'from');

@@ -53,3 +52,3 @@ return fromClause === DATAPOINTS;

export function isEntitiesQuery(query) {
export function isEntitiesQuery (query) {
const fromClause = get(query, 'from');

@@ -59,3 +58,3 @@ return fromClause === ENTITIES;

export function isConceptsQuery(query) {
export function isConceptsQuery (query) {
const fromClause = get(query, 'from');

@@ -67,12 +66,12 @@ return fromClause === CONCEPTS;

export function isEntityDomainOrSet(conceptType: string, allowedValues: string[]): boolean {
export function isEntityDomainOrSet (conceptType: string, allowedValues: string[]): boolean {
return includes(allowedValues, conceptType);
}
export function isMeasure(conceptType: string): boolean {
return includes([CONCEPT_TYPE_MEASURE], conceptType);
export function isMeasure (conceptType: string): boolean {
return includes([ CONCEPT_TYPE_MEASURE ], conceptType);
}
export function isIndicator(conceptType: string): boolean {
return includes([CONCEPT_TYPE_MEASURE, CONCEPT_TYPE_STRING], conceptType);
export function isIndicator (conceptType: string): boolean {
return includes([ CONCEPT_TYPE_MEASURE, CONCEPT_TYPE_STRING ], conceptType);
}

@@ -6,5 +6,7 @@ import * as isEmpty from 'lodash.isempty';

import * as size from 'lodash.size';
import * as cloneDeep from 'lodash.clonedeep';
import * as values from 'lodash.values';
import * as keys from 'lodash.keys';
import * as map from 'lodash.map';
import * as flatMap from 'lodash.flatmap';
import * as first from 'lodash.first';

@@ -40,3 +42,4 @@ import * as filter from 'lodash.filter';

...validateJoinStructure(query, options),
...validateOrderByStructure(query, options)
...validateOrderByStructure(query, options),
// ...validateSubqueries(query, options)
];

@@ -217,2 +220,15 @@

function validateSubqueries (query, options): string[] {
return flatMap(query.join, async (join: {key: string, where: object}, joinID: string) => {
return await validateQueryStructure({
select: {key: [join.key]},
where: join.where,
from: query.from === 'entities' ? 'concepts' : 'entities',
dataset: query.dataset,
branch: query.branch,
commit: query.commit
}, Object.assign({joinID}, cloneDeep(options)));
});
}
// Common structure errors

@@ -219,0 +235,0 @@ function checkIfSelectIsEmpty (selectClause): string | void {

@@ -8,2 +8,3 @@ {

"rules": {
"no-unused-expression-chai": true,
"ban-types": false,

@@ -23,3 +24,5 @@ "quotemark": [true, "single"],

},
"rulesDirectory": []
}
"rulesDirectory": [
"tslint-no-unused-expression-chai"
]
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc