Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

ddf-validation

Package Overview
Dependencies
Maintainers
2
Versions
110
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ddf-validation - npm Package Compare versions

Comparing version 0.6.0 to 0.10.0

.github/ISSUE_TEMPLATE.md

103

index.js

@@ -8,2 +8,3 @@ #! /usr/bin/env node

const DdfIndexGenerator = require('./lib/ddf-definitions/ddf-index-generator');
const DdfJsonCorrector = require('./lib/ddf-definitions/ddf-json-corrector');
const DdfDataSet = require('./lib/ddf-definitions/ddf-data-set');

@@ -14,61 +15,77 @@ const ddfRules = require('./lib/ddf-rules');

if (utils.settings.isIndexGenerationMode === true) {
if (utils.settings.isIndexGenerationMode) {
new DdfIndexGenerator(utils.ddfRootFolder).writeIndex();
return;
}
if (utils.settings.isIndexGenerationMode === false) {
const ddfDataSet = new DdfDataSet(utils.ddfRootFolder);
if (utils.settings.isJsonAutoCorrectionMode) {
const ddfJsonCorrector = new DdfJsonCorrector(utils.ddfRootFolder);
let out = [];
ddfJsonCorrector.correct((correctorError, csvFileDescriptors) => {
if (correctorError) {
logger.notice(correctorError);
return;
}
ddfDataSet.load(() => {
ddfRules.forEach(ruleSet => {
Object.getOwnPropertySymbols(ruleSet).forEach(key => {
const result = ruleSet[key](ddfDataSet);
ddfJsonCorrector.write(csvFileDescriptors, writeError => {
logger.notice(writeError ? writeError : 'ok...');
});
});
if (!_.isArray(result) && !_.isEmpty(result)) {
out.push(result.view());
}
return;
}
if (_.isArray(result) && !_.isEmpty(result)) {
result.forEach(resultRecord => {
out.push(resultRecord.view());
});
}
});
});
const ddfDataSet = new DdfDataSet(utils.ddfRootFolder);
function prepareDataPointProcessor(detail) {
return cb => {
ddfDataSet.getDataPoint().loadDetail(detail, () => {
Object.getOwnPropertySymbols(ddfDataPointRules).forEach(key => {
const result = ddfDataPointRules[key](ddfDataSet, detail);
let out = [];
if (!_.isEmpty(result)) {
out = out.concat(result.map(issue => issue.view()));
}
});
ddfDataSet.load(() => {
ddfRules.forEach(ruleSet => {
Object.getOwnPropertySymbols(ruleSet).forEach(key => {
const result = ruleSet[key](ddfDataSet);
ddfDataSet.getDataPoint().removeAllData();
cb();
if (!_.isArray(result) && !_.isEmpty(result)) {
out.push(result.view());
}
if (_.isArray(result) && !_.isEmpty(result)) {
result.forEach(resultRecord => {
out.push(resultRecord.view());
});
};
}
}
});
});
const dataPointActions = [];
function prepareDataPointProcessor(detail) {
return cb => {
ddfDataSet.getDataPoint().loadDetail(detail, () => {
Object.getOwnPropertySymbols(ddfDataPointRules).forEach(key => {
const result = ddfDataPointRules[key](ddfDataSet, detail);
ddfDataSet.getDataPoint().details.forEach(detail => {
dataPointActions.push(prepareDataPointProcessor(detail));
});
if (!_.isEmpty(result)) {
out = out.concat(result.map(issue => issue.view()));
}
});
async.waterfall(dataPointActions, err => {
if (err) {
throw err;
}
ddfDataSet.getDataPoint().removeAllData();
cb();
});
};
}
logger.notice(JSON.stringify(out));
const dataPointActions = [];
ddfDataSet.dismiss();
});
ddfDataSet.getDataPoint().details.forEach(detail => {
dataPointActions.push(prepareDataPointProcessor(detail));
});
}
async.waterfall(dataPointActions, err => {
if (err) {
throw err;
}
logger.notice(JSON.stringify(out));
ddfDataSet.dismiss();
});
});

@@ -37,2 +37,3 @@ 'use strict';

let headerWasRead = false;
let lineNumber = 1;

@@ -49,4 +50,6 @@ fileStream.on('error', err => cb(err));

ddfRecord.$$source = _path;
ddfRecord.$$lineNumber = lineNumber;
collection.insert(ddfRecord);
ddfRecord = {};
lineNumber++;
})

@@ -53,0 +56,0 @@ .on('column', (key, value) => {

'use strict';
const _ = require('lodash');
const COLLECTION_NAME = 'concepts';

@@ -16,2 +17,6 @@

getDataByFiles() {
return _.groupBy(this.getAllData(), record => record.$$source);
}
getDictionary(concepts, field) {

@@ -18,0 +23,0 @@ const result = {};

'use strict';
const _ = require('lodash');
const COLLECTION_NAME = 'entities';

@@ -16,2 +17,6 @@

getDataByFiles() {
return _.groupBy(this.getAllData(), record => record.$$source);
}
getDataBy(condition) {

@@ -18,0 +23,0 @@ return this.collection.find(condition);

@@ -27,4 +27,4 @@ 'use strict';

result = new Issue(registry.CONCEPT_ID_IS_NOT_UNIQUE)
.fillPath(paths)
.fillData(nonUniqueConceptIds);
.setPath(paths)
.setData(nonUniqueConceptIds);
}

@@ -31,0 +31,0 @@

@@ -37,4 +37,4 @@ 'use strict';

const issue = new Issue(registry.DATA_POINT_VALUE_NOT_NUMERIC)
.fillPath(dataPointDetail.fileDescriptor.fullPath)
.fillData(data);
.setPath(dataPointDetail.fileDescriptor.fullPath)
.setData(data);

@@ -71,4 +71,4 @@ result.push(issue);

const issue = new Issue(registry.DATA_POINT_UNEXPECTED_ENTITY_VALUE)
.fillPath(dataPointDetail.fileDescriptor.fullPath)
.fillData(data);
.setPath(dataPointDetail.fileDescriptor.fullPath)
.setData(data);

@@ -98,4 +98,4 @@ result.push(issue);

const issue = new Issue(registry.DATA_POINT_UNEXPECTED_TIME_VALUE)
.fillPath(dataPointDetail.fileDescriptor.fullPath)
.fillData(data);
.setPath(dataPointDetail.fileDescriptor.fullPath)
.setData(data);

@@ -102,0 +102,0 @@ result.push(issue);

@@ -38,5 +38,5 @@ 'use strict';

const issue = new Issue(registry.ENTITY_HEADER_IS_NOT_CONCEPT)
.fillPath(detail.fileDescriptor.fullPath)
.fillData(recordParam)
.fillSuggestions(suggestions);
.setPath(detail.fileDescriptor.fullPath)
.setData(recordParam)
.setSuggestions(suggestions);

@@ -43,0 +43,0 @@ result.push(issue);

'use strict';
const _ = require('lodash');
const registry = require('./registry');

@@ -11,2 +12,134 @@ const Issue = require('./issue');

function isValidJSON(json) {
let result = true;
try {
JSON.parse(json);
} catch (ex) {
result = false;
}
return result;
}
function correctBoolean(json) {
return json.replace(/true|false/ig, booleanValue => booleanValue.toLowerCase());
}
function ensureJsonIsSafe(json) {
return json.replace(/\)[;\n]/g, '');
}
function correctJSON(wrongJSON) {
let result = null;
/*eslint-disable no-new-func */
try {
const convertFun = new Function(`return JSON.stringify(${ensureJsonIsSafe(correctBoolean(wrongJSON))})`);
result = convertFun();
} catch (ex) {
result = null;
}
/*eslint-enable no-new-func */
return result;
}
function charCount(string, chars) {
return chars.reduce((res, ch) => {
res[ch] = string.split(ch).length - 1;
return res;
}, {});
}
function isJsonLike(str) {
if (_.isEmpty(str)) {
return false;
}
const charCounts = charCount(str, ['[', ']', '{', '}']);
function squareBrackets() {
return charCounts['['] > 0 && charCounts['['] === charCounts[']'];
}
function braces() {
return charCounts['{'] > 0 && charCounts['{'] === charCounts['}'];
}
return squareBrackets() || braces();
}
function detectColumnsHavingJson(ddfDataSet) {
const conceptDataByFiles = ddfDataSet.getConcept().getDataByFiles();
const entityDataByFiles = ddfDataSet.getEntity().getDataByFiles();
const dataByFiles = _.extend(conceptDataByFiles, entityDataByFiles);
const serviceFields = ['$$source', '$$lineNumber', 'meta', '$loki'];
const result = {};
Object.keys(dataByFiles).forEach(fileName => {
const columnNames = Object.keys(_.head(dataByFiles[fileName]))
.filter(key => !_.includes(serviceFields, key));
const jsonColumns = [];
columnNames.forEach(column => {
let jsonValuesCount = 0;
dataByFiles[fileName].forEach(record => {
if (isJsonLike(record[column])) {
jsonValuesCount++;
}
});
if (jsonValuesCount > 0) {
jsonColumns.push(column);
}
});
result[fileName] = jsonColumns;
});
return result;
}
function isCorrectionPossible(record, column, fileName) {
return record[column] && record.$$source === fileName && !isValidJSON(record[column]);
}
function getIncorrectJsonIssues(ddfDataSet, ddfDataWrapper) {
const result = [];
const columnsHavingJson = detectColumnsHavingJson(ddfDataSet);
ddfDataWrapper.getAllData().forEach(conceptRecord => {
Object.keys(columnsHavingJson).forEach(fileName => {
columnsHavingJson[fileName].forEach(jsonColumn => {
if (isCorrectionPossible(conceptRecord, jsonColumn, fileName)) {
const data = {
column: jsonColumn,
line: conceptRecord.$$lineNumber + 1,
value: conceptRecord[jsonColumn]
};
const correctedJSON = correctJSON(conceptRecord[jsonColumn]);
const issue = new Issue(registry.INCORRECT_JSON_FIELD)
.setPath(conceptRecord.$$source)
.setData(data);
if (correctedJSON) {
issue
.setSuggestions([correctedJSON])
.warning();
}
result.push(issue);
}
});
});
});
return result;
}
module.exports = {

@@ -19,3 +152,3 @@ [registry.NON_DDF_DATA_SET]: ddfDataSet => {

const issue = new Issue(registry.NON_DDF_DATA_SET)
.fillPath(ddfDataSet.ddfRoot.path);
.setPath(ddfDataSet.ddfRoot.path);

@@ -29,4 +162,7 @@ result.push(issue);

.map(directoryDescriptor => new Issue(registry.NON_DDF_FOLDER)
.fillPath(directoryDescriptor.dir)
.warning())
.setPath(directoryDescriptor.dir)
.warning()),
[registry.INCORRECT_JSON_FIELD]: ddfDataSet =>
getIncorrectJsonIssues(ddfDataSet, ddfDataSet.getConcept())
.concat(getIncorrectJsonIssues(ddfDataSet, ddfDataSet.getEntity()))
};

@@ -14,3 +14,3 @@ 'use strict';

.filter(issue => issue && issue.type === registry.INCORRECT_FILE)
.map(issue => new Issue(issue.type).fillPath(issue.path).fillData(issue.data))
.map(issue => new Issue(issue.type).setPath(issue.path).setData(issue.data))
)

@@ -21,3 +21,3 @@ ),

.map(directoryDescriptor => new Issue(registry.INDEX_IS_NOT_FOUND)
.fillPath(directoryDescriptor.dir).warning())
.setPath(directoryDescriptor.dir).warning())
};

@@ -11,3 +11,3 @@ 'use strict';

fillPath(path) {
setPath(path) {
this.path = path;

@@ -18,3 +18,3 @@

fillData(data) {
setData(data) {
this.data = data;

@@ -25,3 +25,3 @@

fillSuggestions(suggestions) {
setSuggestions(suggestions) {
if (suggestions) {

@@ -28,0 +28,0 @@ this.suggestions = suggestions;

@@ -7,2 +7,3 @@ 'use strict';

exports.INCORRECT_FILE = Symbol.for('INCORRECT_FILE');
exports.INCORRECT_JSON_FIELD = Symbol.for('INCORRECT_JSON_FIELD');
exports.CONCEPT_ID_IS_NOT_UNIQUE = Symbol.for('CONCEPT_ID_IS_NOT_UNIQUE');

@@ -19,2 +20,3 @@ exports.ENTITY_HEADER_IS_NOT_CONCEPT = Symbol.for('ENTITY_HEADER_IS_NOT_CONCEPT');

[exports.INCORRECT_FILE]: 'Incorrect file',
[exports.INCORRECT_JSON_FIELD]: 'Incorrect JSON field',
[exports.CONCEPT_ID_IS_NOT_UNIQUE]: 'Concept Id is not unique',

@@ -21,0 +23,0 @@ [exports.ENTITY_HEADER_IS_NOT_CONCEPT]: 'Entity header is not correct',

@@ -8,5 +8,7 @@ 'use strict';

.demand(ROOT_PARAMETER_IS_REQUIRED)
.example('$0 ../ddf-example', 'validate DDF datasets for the root')
.example('$0 ../ddf-example -i', 'generate ddf--index file')
.example('$0 ../ddf-example -j', 'fix JSONs for this DDF dataset')
.describe('i', 'Generate index file')
.describe('c', 'Console (non UI) output')
.describe('j', 'Fix wrong JSONs')
.argv;

@@ -23,3 +25,4 @@

settings.isIndexGenerationMode = !!argv.i;
settings.isJsonAutoCorrectionMode = !!argv.j;
return settings;
};

@@ -14,2 +14,22 @@ 'use strict';

function copyFile(source, target, onFileCopied) {
const rd = fs.createReadStream(source);
const wr = fs.createWriteStream(target);
let cbCalled = false;
rd.on('error', err => done(err));
wr.on('error', err => done(err));
wr.on('close', () => done());
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
onFileCopied(err);
cbCalled = true;
}
}
}
function norm(folder) {

@@ -75,9 +95,9 @@ let normFolder = folder;

function writeFile(path, content, cb) {
function writeFile(path, content, onFileWrote) {
fs.writeFile(path, content, err => {
if (err) {
return cb(err);
return onFileWrote(err);
}
cb();
onFileWrote();
});

@@ -116,3 +136,3 @@ }

function readFile(filePath, cb) {
function readFile(filePath, onFileRead) {
const csvStream = csv.createStream(CSV_OPTIONS);

@@ -124,3 +144,3 @@ const fileStream = fs.createReadStream(filePath);

fileStream.on('error', err => cb(err));
fileStream.on('error', err => onFileRead(err));
fileStream.on('readable', () => {

@@ -130,3 +150,3 @@ fileStream

.on('error', err => {
cb(err);
onFileRead(err);
})

@@ -144,6 +164,24 @@ .on('data', () => {

fileStream.on('end', () => {
cb(null, content);
onFileRead(null, content);
});
}
function backupFile(filePath, onBackupCreated) {
fs.stat(filePath, (statErr, stats) => {
if (statErr) {
throw statErr;
}
if (stats.isFile()) {
copyFile(filePath, filePath + '.backup', fileErr => {
if (fileErr) {
throw fileErr;
}
onBackupCreated();
});
}
});
}
exports.norm = norm;

@@ -155,1 +193,2 @@ exports.walkDir = walk;

exports.readFile = readFile;
exports.backupFile = backupFile;
{
"name": "ddf-validation",
"version": "0.6.0",
"version": "0.10.0",
"description": "DDF validion tool",

@@ -13,3 +13,8 @@ "main": "index.js",

"test-travis": "npm run eslint && ./node_modules/.bin/istanbul cover ./node_modules/mocha/bin/_mocha -- -R spec test/**/*.spec.js && ./node_modules/.bin/codecov",
"eslint": "./node_modules/.bin/eslint --ignore-path .gitignore --ext js --fix . .config"
"eslint": "./node_modules/.bin/eslint --ignore-path .gitignore --ext js --fix . .config",
"changelog": "./node_modules/.bin/conventional-changelog -i CHANGELOG.md -s -p angular",
"github-release": "./node_modules/.bin/conventional-github-releaser -p angular",
"preversion": "npm test",
"version": "npm run changelog && git add CHANGELOG.md",
"postversion": "git push origin master && git push --tags && npm run github-release"
},

@@ -28,3 +33,3 @@ "author": "Dmitriy Shekhovtsov<valorkin@gmail.com>",

"csv-stream": "0.1.3",
"ddf-time-utils": "^0.1.0",
"ddf-time-utils": "0.1.1",
"fs": "0.0.2",

@@ -44,2 +49,6 @@ "json2csv": "3.3.0",

"codecov": "1.0.1",
"compare-version": "0.1.2",
"conventional-changelog": "1.1.0",
"conventional-changelog-cli": "1.1.1",
"conventional-github-releaser": "1.1.2",
"eslint": "2.8.0",

@@ -49,7 +58,6 @@ "eslint-config-valorsoft": "0.0.10",

"mocha": "2.4.5",
"shelljs": "0.7.0",
"sinon": "1.17.3",
"sinon-chai": "2.8.0",
"shelljs": "0.6.0",
"compare-version": "0.1.2"
"sinon-chai": "2.8.0"
}
}

@@ -28,1 +28,40 @@ # ddf-validation

[you can see it here](doc/developer-guide.md)
## Release
1. `npm run changelog` - generates content for `CHANGELOG.md` file with changes that have happened since last release
2. `npm version` - this one is a bit more complicated. Let's start with what it needs in order to run.
- `CONVENTIONAL_GITHUB_RELEASER_TOKEN` environment variable should be set up for this command:
Example: `CONVENTIONAL_GITHUB_RELEASER_TOKEN=aaaaaaaaaabbbbbbbbbbccccccccccffffffffff npm version minor`
- this command understands following parameters:
- `major` (having initially version **0.0.0** by applying this option it will be changed to **1.0.0**).
Example:
```
CONVENTIONAL_GITHUB_RELEASER_TOKEN=aaaaaaaaaabbbbbbbbbbccccccccccffffffffff npm version major
```
- `minor` (having initially version **0.0.0** by applying this option it will be changed to **0.1.0**)
Example:
```
CONVENTIONAL_GITHUB_RELEASER_TOKEN=aaaaaaaaaabbbbbbbbbbccccccccccffffffffff npm version minor
```
- `patch` (having initially version **0.0.0** by applying this option it will be changed to **0.0.1**)
Example:
```
CONVENTIONAL_GITHUB_RELEASER_TOKEN=aaaaaaaaaabbbbbbbbbbccccccccccffffffffff npm version patch
```
During the release process two files will be changed and pushed to github:
1. CHANGELOG.md - because of added history.
2. package.json - because of bumped version.
**Note:** `aaaaaaaaaabbbbbbbbbbccccccccccffffffffff` - is the fake token. In order to generate proper one you need to do following: [github tutorial](https://help.github.com/articles/creating-an-access-token-for-command-line-use)
**Important note:** you should merge `development` branch into `master` and **performing `npm verison` on `master`** branch according to our [gitflow](https://github.com/valor-software/valor-style-guides/tree/master/gitflow)
**Even more important note:** while generating token (using tutorial given above) you need to choose which permissions should be granted to it. For our *release purposes* you need to choose all permissions under the section `repo`

@@ -35,2 +35,12 @@ 'use strict';

});
it('there should be no issues for "INCORRECT_JSON_FIELD" rule', done => {
ddfDataSet.load(() => {
const result = generalRules[rulesRegistry.INCORRECT_JSON_FIELD](ddfDataSet);
expect(result.length).to.equal(0);
done();
});
});
});

@@ -41,4 +51,5 @@

const ddfDataSet = new DdfDataSet(folder);
const expectedRules = [rulesRegistry.NON_DDF_DATA_SET, rulesRegistry.NON_DDF_FOLDER];
Object.getOwnPropertySymbols(generalRules).forEach(generalRuleKey => {
expectedRules.forEach(generalRuleKey => {
it(`one issue should be detected for "${Symbol.keyFor(generalRuleKey)}" rule`, done => {

@@ -77,2 +88,76 @@ ddfDataSet.load(() => {

});
describe(`when concepts in DDF folder contain wrong JSON fields
(fixtures/rules-cases/incorrect-json-field)`, () => {
const folder = './test/fixtures/rules-cases/incorrect-json-field';
const ddfDataSet = new DdfDataSet(folder);
it('4 issues should be found', done => {
ddfDataSet.load(() => {
const EXPECTED_ISSUES_QUANTITY = 4;
const result = generalRules[rulesRegistry.INCORRECT_JSON_FIELD](ddfDataSet);
expect(result.length).to.equal(EXPECTED_ISSUES_QUANTITY);
done();
});
});
it('all of issues should be a valid type', done => {
ddfDataSet.load(() => {
const result = generalRules[rulesRegistry.INCORRECT_JSON_FIELD](ddfDataSet);
result.forEach(issue => {
expect(issue.type).to.equal(rulesRegistry.INCORRECT_JSON_FIELD);
});
done();
});
});
it('suggestion for 3 first issues should be expected', done => {
ddfDataSet.load(() => {
const LAST_WARNING_INDEX = 2;
const expectedSuggestions = [
'{"selectable":false,"palette":{"sub_saharan_africa":"#4e7af0","east_asia_pacific":"#f03838",' +
'"america":"#ebcc21","south_asia":"#35d1d1","middle_east_north_africa":"#5be56b",' +
'"europe_central_asia":"#f49d37"}}',
'["ordinal"]',
'{"palette":{"0":"#62CCE3","1":"#B4DE79","2":"#E1CE00","3":"#F77481"}}'
];
const result = generalRules[rulesRegistry.INCORRECT_JSON_FIELD](ddfDataSet);
for (let count = 0; count <= LAST_WARNING_INDEX; count++) {
const suggestion = _.head(result[count].suggestions);
expect(suggestion).to.equal(expectedSuggestions[count]);
}
done();
});
});
it('3 first issues should be warnings', done => {
ddfDataSet.load(() => {
const LAST_WARNING_INDEX = 2;
const result = generalRules[rulesRegistry.INCORRECT_JSON_FIELD](ddfDataSet);
expect(_.every(result.slice(0, LAST_WARNING_INDEX + 1), val => val.isWarning)).to.equal(true);
done();
});
});
it('last issue should be an error', done => {
ddfDataSet.load(() => {
const result = generalRules[rulesRegistry.INCORRECT_JSON_FIELD](ddfDataSet);
const lastIssue = _.tail(result);
expect(!!lastIssue.isWarning).to.equal(false);
done();
});
});
});
});
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc