hunspell-reader
Advanced tools
Comparing version 3.0.1 to 3.1.1
# Release Notes | ||
## 3.1.0 | ||
- Support limiting the max depth to apply suffixes. This allows the reader to read the Hungarian dictionary. | ||
## 3.0.0 | ||
@@ -35,2 +38,2 @@ - **Breaking Change** dropping support for Node 8 and 9 | ||
## 1.0.0 | ||
- Initial release. | ||
- Initial release. |
@@ -16,3 +16,3 @@ import { Converter } from './converter'; | ||
replace: RegExp; | ||
extra?: string[]; | ||
extra?: string; | ||
} | ||
@@ -104,3 +104,6 @@ export interface SubstitutionSet { | ||
protected _iConv: Converter; | ||
private _maxSuffixDepth; | ||
constructor(affInfo: AffInfo); | ||
get maxSuffixDepth(): number; | ||
set maxSuffixDepth(value: number); | ||
/** | ||
@@ -114,4 +117,4 @@ * Takes a line from a hunspell.dic file and applies the rules found in the aff file. | ||
*/ | ||
applyRulesToWord(affWord: AffWord): AffWord[]; | ||
applyAffixesToWord(affixRules: Fx[], affWord: AffWord): AffWord[]; | ||
applyRulesToWord(affWord: AffWord, remainingDepth: number): AffWord[]; | ||
applyAffixesToWord(affixRules: Fx[], affWord: AffWord, remainingDepth: number): AffWord[]; | ||
applyAffixToWord(affix: Fx, affWord: AffWord, combinableSfx: string): AffWord[]; | ||
@@ -122,4 +125,4 @@ substitute(affix: Fx, affWord: AffWord, sub: Substitution): AffWord; | ||
separateRules(rules: string): string[]; | ||
readonly iConv: Converter; | ||
readonly oConv: Converter; | ||
get iConv(): Converter; | ||
get oConv(): Converter; | ||
} | ||
@@ -126,0 +129,0 @@ export declare function processRules(affInfo: AffInfo): Map<string, Rule>; |
@@ -9,6 +9,7 @@ "use strict"; | ||
const log = false; | ||
; | ||
const DefaultMaxDepth = 5; | ||
class Aff { | ||
constructor(affInfo) { | ||
this.affInfo = affInfo; | ||
this._maxSuffixDepth = DefaultMaxDepth; | ||
this.rules = processRules(affInfo); | ||
@@ -18,2 +19,8 @@ this._iConv = new converter_1.Converter(affInfo.ICONV || []); | ||
} | ||
get maxSuffixDepth() { | ||
return this._maxSuffixDepth; | ||
} | ||
set maxSuffixDepth(value) { | ||
this._maxSuffixDepth = value; | ||
} | ||
/** | ||
@@ -26,4 +33,4 @@ * Takes a line from a hunspell.dic file and applies the rules found in the aff file. | ||
const [word, rules = ''] = lineLeft.split('/', 2); | ||
return this.applyRulesToWord(asAffWord(word, rules)) | ||
.map(affWord => (Object.assign({}, affWord, { word: this._oConv.convert(affWord.word) }))); | ||
return this.applyRulesToWord(asAffWord(word, rules), this.maxSuffixDepth) | ||
.map(affWord => (Object.assign(Object.assign({}, affWord), { word: this._oConv.convert(affWord.word) }))); | ||
} | ||
@@ -33,3 +40,3 @@ /** | ||
*/ | ||
applyRulesToWord(affWord) { | ||
applyRulesToWord(affWord, remainingDepth) { | ||
const { word, base, suffix, prefix, dic } = affWord; | ||
@@ -41,3 +48,3 @@ const allRules = this.getMatchingRules(affWord.rules); | ||
rulesApplied: [acc.rulesApplied, rule.id].join(' '), | ||
flags: Object.assign({}, acc.flags, rule.flags), | ||
flags: Object.assign(Object.assign({}, acc.flags), rule.flags), | ||
}), { rulesApplied: affWord.rulesApplied, flags: affWord.flags }); | ||
@@ -49,3 +56,3 @@ const rules = this.joinRules(allRules.filter(rule => !rule.flags).map(rule => rule.id)); | ||
wordWithFlags, | ||
...this.applyAffixesToWord(affixRules, Object.assign({}, wordWithFlags, { rules })) | ||
...this.applyAffixesToWord(affixRules, Object.assign(Object.assign({}, wordWithFlags), { rules }), remainingDepth) | ||
] | ||
@@ -55,3 +62,6 @@ .filter(({ flags }) => !flags.isNeedAffix) | ||
} | ||
applyAffixesToWord(affixRules, affWord) { | ||
applyAffixesToWord(affixRules, affWord, remainingDepth) { | ||
if (remainingDepth <= 0) { | ||
return []; | ||
} | ||
const combineableRules = affixRules | ||
@@ -65,3 +75,3 @@ .filter(rule => rule.type === 'SFX') | ||
.reduce((a, b) => a.concat(b), []) | ||
.map(affWord => this.applyRulesToWord(affWord)) | ||
.map(affWord => this.applyRulesToWord(affWord, remainingDepth - 1)) | ||
.reduce((a, b) => a.concat(b), []); | ||
@@ -75,6 +85,6 @@ return r; | ||
: ''; | ||
const flags = Object.assign({}, affWord.flags, { isNeedAffix: false }); | ||
const flags = Object.assign(Object.assign({}, affWord.flags), { isNeedAffix: false }); | ||
const matchingSubstitutions = [...affix.substitutionSets.values()] | ||
.filter(sub => sub.match.test(word)); | ||
const partialAffWord = Object.assign({}, affWord, { flags, rules: combineRules }); | ||
const partialAffWord = Object.assign(Object.assign({}, affWord), { flags, rules: combineRules }); | ||
return matchingSubstitutions | ||
@@ -136,7 +146,7 @@ .map(sub => sub.substitutions) | ||
case 'long': | ||
return rules.replace(/(..)/g, '$1//').split('//').slice(0, -1); | ||
return [...new Set(rules.replace(/(..)/g, '$1//').split('//').slice(0, -1))]; | ||
case 'num': | ||
return rules.split(','); | ||
return [...new Set(rules.split(','))]; | ||
} | ||
return rules.split(''); | ||
return [...new Set(rules.split(''))]; | ||
} | ||
@@ -199,3 +209,3 @@ get iConv() { | ||
function affWordToColoredString(affWord) { | ||
return util.inspect(Object.assign({}, affWord, { flags: flagsToString(affWord.flags) }), { showHidden: false, depth: 5, colors: true }).replace(/(\s|\n|\r)+/g, ' '); | ||
return util.inspect(Object.assign(Object.assign({}, affWord), { flags: flagsToString(affWord.flags) }), { showHidden: false, depth: 5, colors: true }).replace(/(\s|\n|\r)+/g, ' '); | ||
} | ||
@@ -202,0 +212,0 @@ exports.affWordToColoredString = affWordToColoredString; |
@@ -1,2 +0,2 @@ | ||
import { AffInfo, Aff } from './aff'; | ||
import { AffInfo, Aff, Fx } from './aff'; | ||
export interface ConvEntry { | ||
@@ -6,4 +6,30 @@ from: string; | ||
} | ||
declare function tablePfxOrSfx(fieldValue: Afx | undefined, line: AffLine): Afx; | ||
interface AffixRule { | ||
type: 'PFX' | 'SFX'; | ||
flag: string; | ||
stripping: string; | ||
replace: RegExp; | ||
affix: string; | ||
condition: RegExp; | ||
extra?: string; | ||
} | ||
/** | ||
* `PFX|SFX flag stripping prefix [condition [morphological_fields...]]` | ||
*/ | ||
declare function parseAffixRule(line: AffLine): AffixRule | undefined; | ||
export declare function parseAffFile(filename: string, encoding?: string): Promise<AffInfo>; | ||
export declare function parseAff(affFileContent: string, _encoding?: string): AffInfo; | ||
export declare function parseAffFileToAff(filename: string, encoding?: string): Promise<Aff>; | ||
declare function parseLine(line: string): AffLine; | ||
export interface AffLine { | ||
option: string; | ||
value: string | undefined; | ||
} | ||
declare type Afx = Map<string, Fx>; | ||
export declare const testing: { | ||
parseAffixRule: typeof parseAffixRule; | ||
tablePfxOrSfx: typeof tablePfxOrSfx; | ||
parseLine: typeof parseLine; | ||
}; | ||
export {}; |
"use strict"; | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
@@ -18,22 +19,26 @@ }); | ||
}; | ||
const emptyZeroRegex = /^0$/; | ||
const yesRegex = /[yY]/; | ||
const spaceRegex = /\s+/; | ||
const commentRegex = /(?:^\s*#.*)|(?:\s+#.*)/; | ||
const affixLine = /^\s*([^\s]+)\s+(.*)?$/; | ||
const UTF8 = 'UTF-8'; | ||
function convEntry(fieldValue, _, args) { | ||
function convEntry(fieldValue, line) { | ||
if (fieldValue === undefined) { | ||
return []; | ||
} | ||
const args = (line.value || '').split(spaceRegex); | ||
fieldValue.push({ from: args[0], to: args[1] }); | ||
return fieldValue; | ||
} | ||
function afEntry(fieldValue, _, args) { | ||
function afEntry(fieldValue, line) { | ||
if (fieldValue === undefined) { | ||
return ['']; | ||
} | ||
fieldValue.push(args[0]); | ||
if (line.value) { | ||
fieldValue.push(line.value); | ||
} | ||
return fieldValue; | ||
} | ||
function simpleTable(fieldValue, _, args) { | ||
function simpleTable(fieldValue, line) { | ||
const args = (line.value || '').split(spaceRegex); | ||
if (fieldValue === undefined) { | ||
@@ -47,4 +52,3 @@ const [count, ...extraValues] = args; | ||
} | ||
const regExpStartsWithPlus = /^\+/; | ||
function tablePfxOrSfx(fieldValue, _, args, type) { | ||
function tablePfxOrSfx(fieldValue, line) { | ||
/* | ||
@@ -60,45 +64,25 @@ Fields of an affix rules: | ||
(Dot signs an arbitrary character. Characters in braces sign an arbitrary character from the character subset. | ||
Dash hasn't got special meaning, but circumflex (^) next the first brace sets the complementer character set.) | ||
Dash hasn't got special meaning, but circumflex (^) next the first brace sets the complimenter character set.) | ||
(5) Optional morphological fields separated by spaces or tabulators. | ||
*/ | ||
const posCondition = 2; | ||
if (fieldValue === undefined) { | ||
fieldValue = new Map(); | ||
} | ||
const [subField, ...subValues] = args; | ||
const [subField] = (line.value || '').split(spaceRegex); | ||
if (!fieldValue.has(subField)) { | ||
const id = subField; | ||
const [combinable, count, ...extra] = subValues; | ||
fieldValue.set(subField, { | ||
id, | ||
type, | ||
combinable: !!combinable.match(yesRegex), | ||
count, | ||
extra, | ||
substitutionSets: new Map() | ||
}); | ||
const fx = parseAffixCreation(line); | ||
fieldValue.set(fx.id, fx); | ||
return fieldValue; | ||
} | ||
if (subValues.length < 2) { | ||
console.log(`Affix rule missing values: ${args.join(' ')}`); | ||
return; | ||
const rule = parseAffixRule(line); | ||
if (!rule) { | ||
console.log(`Affix rule missing values: ${line.option} ${line.value}`); | ||
return fieldValue; | ||
} | ||
if (regExpStartsWithPlus.test(subValues[posCondition] || '')) { | ||
// sometimes the condition is left off, but there are morphological fields | ||
// so we need to inject a '.'. | ||
subValues.splice(posCondition, 0, '.'); | ||
} | ||
const fixRuleSet = fieldValue.get(subField); | ||
const substitutionSets = fixRuleSet.substitutionSets; | ||
const [removeValue, attach, ruleAsString = '.', ...extraValues] = subValues; | ||
const [attachText, attachRules] = attach.split('/', 2); | ||
const extra = extraValues.length ? extraValues : undefined; | ||
const remove = removeValue.replace(emptyZeroRegex, ''); | ||
const insertText = attachText.replace(emptyZeroRegex, ''); | ||
const fixUp = fixRegex[type]; | ||
const replace = new RegExp(remove.replace(fixUp.m, fixUp.r)); | ||
const ruleAsString = rule.condition.source; | ||
if (!substitutionSets.has(ruleAsString)) { | ||
const match = new RegExp(ruleAsString.replace(fixUp.m, fixUp.r)); | ||
substitutionSets.set(ruleAsString, { | ||
match, | ||
match: rule.condition, | ||
substitutions: [], | ||
@@ -108,21 +92,74 @@ }); | ||
const substitutionSet = substitutionSets.get(ruleAsString); | ||
substitutionSet.substitutions.push({ remove, replace, attach: insertText, attachRules, extra }); | ||
const [attachText, attachRules] = rule.affix.split('/', 2); | ||
substitutionSet.substitutions.push({ | ||
remove: rule.stripping, replace: rule.replace, attach: attachText, attachRules, extra: rule.extra | ||
}); | ||
return fieldValue; | ||
} | ||
function asPfx(fieldValue, field, args) { | ||
return tablePfxOrSfx(fieldValue, field, args, 'PFX'); | ||
/** | ||
* Parse Affix creation line: | ||
* `PFX|SFX flag cross_product number` | ||
*/ | ||
function parseAffixCreation(line) { | ||
const [flag, combinable, count, ...extra] = (line.value || '').split(spaceRegex); | ||
const fx = { | ||
id: flag, | ||
type: line.option, | ||
combinable: !!combinable.match(yesRegex), | ||
count, | ||
extra, | ||
substitutionSets: new Map() | ||
}; | ||
return fx; | ||
} | ||
function asSfx(fieldValue, field, args) { | ||
return tablePfxOrSfx(fieldValue, field, args, 'SFX'); | ||
const affixRuleRegEx = /^(\S+)\s+(\S+)\s+(\S+)\s*(.*)/; | ||
const affixRuleConditionRegEx = /^((?:\[.*\]|\S+)+)\s*(.*)/; | ||
/** | ||
* `PFX|SFX flag stripping prefix [condition [morphological_fields...]]` | ||
*/ | ||
function parseAffixRule(line) { | ||
const [, flag, strip, affix, optional = ''] = (line.value || '').match(affixRuleRegEx) || []; | ||
if (!flag || !strip || !affix) { | ||
return undefined; | ||
} | ||
const [, rawCondition = '.', extra] = optional.match(affixRuleConditionRegEx) || []; | ||
const type = line.option === 'SFX' ? 'SFX' : 'PFX'; | ||
const condition = fixMatch(type, rawCondition); | ||
const affixRule = { | ||
type, | ||
flag, | ||
stripping: strip, | ||
replace: fixMatch(type, strip), | ||
affix, | ||
condition, | ||
extra, | ||
}; | ||
return affixRule; | ||
} | ||
function asString(_fieldValue, _field, args) { | ||
return args[0]; | ||
function fixMatch(type, match) { | ||
const exp = affixMatchToRegExpString(match); | ||
const fix = fixRegex[type]; | ||
return new RegExp(exp.replace(fix.m, fix.r)); | ||
} | ||
function asBoolean(_fieldValue, _field, args) { | ||
const [value = '1'] = args; | ||
function affixMatchToRegExpString(match) { | ||
if (match === '0') | ||
return ''; | ||
return match.replace(/([\-?*])/g, '\\$1'); | ||
} | ||
function asPfx(fieldValue, line) { | ||
return tablePfxOrSfx(fieldValue, line); | ||
} | ||
function asSfx(fieldValue, line) { | ||
return tablePfxOrSfx(fieldValue, line); | ||
} | ||
function asString(_fieldValue, line) { | ||
return line.value || ''; | ||
} | ||
function asBoolean(_fieldValue, line) { | ||
const { value = '1' } = line; | ||
const iValue = parseInt(value); | ||
return !!iValue; | ||
} | ||
function asNumber(_fieldValue, _field, args) { | ||
const [value = '0'] = args; | ||
function asNumber(_fieldValue, line) { | ||
const { value = '0' } = line; | ||
return parseInt(value); | ||
@@ -188,11 +225,11 @@ } | ||
.filter(line => line.trim() !== '') | ||
.map(line => line.split(spaceRegex)) | ||
.map(parseLine) | ||
.reduce((aff, line) => { | ||
const [field, ...args] = line; | ||
const field = line.option; | ||
const fn = affTableField[field]; | ||
if (fn) { | ||
aff[field] = fn(aff[field], field, args); | ||
aff[field] = fn(aff[field], line); | ||
} | ||
else { | ||
aff[field] = args; | ||
aff[field] = line.value; | ||
} | ||
@@ -208,2 +245,12 @@ return aff; | ||
exports.parseAffFileToAff = parseAffFileToAff; | ||
function parseLine(line) { | ||
const result = line.match(affixLine) || ['', '']; | ||
const [, option, value] = result; | ||
return { option, value: value || undefined }; | ||
} | ||
exports.testing = { | ||
parseAffixRule, | ||
tablePfxOrSfx, | ||
parseLine, | ||
}; | ||
//# sourceMappingURL=affReader.js.map |
#!/usr/bin/env node | ||
"use strict"; | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
@@ -37,2 +38,3 @@ }); | ||
.option('-p, --progress', 'Show progress.') | ||
.option('-m, --max_depth <limit>', 'Maximum depth to apply suffix rules.') | ||
.option('-n, --number <limit>', 'Limit the number of words to output.') | ||
@@ -53,9 +55,9 @@ .description('Output all the words in the <hunspell.dic> file.') | ||
function affWordToInfix(aff) { | ||
return Object.assign({}, aff, { word: aff.prefix + '<' + aff.base + '>' + aff.suffix }); | ||
return Object.assign(Object.assign({}, aff), { word: aff.prefix + '<' + aff.base + '>' + aff.suffix }); | ||
} | ||
function mapWord(map) { | ||
return (aff) => (Object.assign({}, aff, { word: map(aff.word) })); | ||
return (aff) => (Object.assign(Object.assign({}, aff), { word: map(aff.word) })); | ||
} | ||
function appendRules(aff) { | ||
return Object.assign({}, aff, { word: aff.word + '\t[' + aff.rulesApplied + ' ]\t' + '(' + aff.dic + ')' }); | ||
return Object.assign(Object.assign({}, aff), { word: aff.word + '\t[' + aff.rulesApplied + ' ]\t' + '(' + aff.dic + ')' }); | ||
} | ||
@@ -109,3 +111,3 @@ function writeSeqToFile(seq, outFile) { | ||
displayHelp = false; | ||
const { sort = false, unique = false, output: outputFile, lower_case: lowerCase = false, transform = true, infix = false, rules = false, progress: showProgress = false, } = options; | ||
const { sort = false, unique = false, output: outputFile, lower_case: lowerCase = false, transform = true, infix = false, rules = false, progress: showProgress = false, max_depth, } = options; | ||
logStream = outputFile ? process.stdout : process.stderr; | ||
@@ -123,2 +125,5 @@ const log = notify; | ||
const reader = yield IterableHunspellReader_1.IterableHunspellReader.createFromFiles(affFile, dicFile); | ||
if (max_depth && Number.parseInt(max_depth) >= 0) { | ||
reader.maxDepth = Number.parseInt(max_depth); | ||
} | ||
const transformers = []; | ||
@@ -153,3 +158,3 @@ if (infix) { | ||
.map(a => a.word + '\n'); | ||
const words = options.number ? allWords.take(options.number) : allWords; | ||
const words = options.number ? allWords.take(Number.parseInt(options.number)) : allWords; | ||
if (sort) { | ||
@@ -156,0 +161,0 @@ log('Sorting...'); |
import { Aff } from './aff'; | ||
import { Sequence } from 'gensequence'; | ||
import { WordInfo } from './types'; | ||
export interface WordInfo { | ||
word: string; | ||
rules: string; | ||
} | ||
export { WordInfo } from './types'; | ||
export interface HunspellSrcData { | ||
@@ -16,3 +13,5 @@ aff: Aff; | ||
constructor(src: HunspellSrcData); | ||
readonly dic: string[]; | ||
get dic(): string[]; | ||
set maxDepth(value: number); | ||
get maxDepth(): number; | ||
/** | ||
@@ -28,2 +27,8 @@ * @internal | ||
[Symbol.iterator](): Sequence<string>; | ||
/** | ||
* create an iterable sequence of the words in the dictionary. | ||
* | ||
* @param tapPreApplyRules -- optional function to be called before rules are applied to a word. | ||
* It is mostly used for monitoring progress. | ||
*/ | ||
seqAffWords(tapPreApplyRules?: (w: string) => any): Sequence<import("./aff").AffWord>; | ||
@@ -30,0 +35,0 @@ /** |
"use strict"; | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
@@ -24,2 +25,8 @@ }); | ||
} | ||
set maxDepth(value) { | ||
this.aff.maxSuffixDepth = value; | ||
} | ||
get maxDepth() { | ||
return this.aff.maxSuffixDepth; | ||
} | ||
/** | ||
@@ -45,2 +52,8 @@ * @internal | ||
[Symbol.iterator]() { return this.seqWords(); } | ||
/** | ||
* create an iterable sequence of the words in the dictionary. | ||
* | ||
* @param tapPreApplyRules -- optional function to be called before rules are applied to a word. | ||
* It is mostly used for monitoring progress. | ||
*/ | ||
seqAffWords(tapPreApplyRules) { | ||
@@ -47,0 +60,0 @@ const seq = gensequence_1.genSequence(this.src.dic); |
{ | ||
"name": "hunspell-reader", | ||
"version": "3.0.1", | ||
"version": "3.1.1", | ||
"description": "A library for reading Hunspell Dictionary Files", | ||
@@ -42,19 +42,19 @@ "bin": "bin.js", | ||
"devDependencies": { | ||
"@types/chai": "^4.1.7", | ||
"@types/fs-extra": "^7.0.0", | ||
"@types/mocha": "^5.2.6", | ||
"@types/node": "^10.14.7", | ||
"@types/chai": "^4.2.4", | ||
"@types/fs-extra": "^8.0.1", | ||
"@types/mocha": "^5.2.7", | ||
"@types/node": "^10.17.5", | ||
"chai": "^4.2.0", | ||
"coveralls": "^3.0.3", | ||
"mocha": "^6.1.4", | ||
"coveralls": "^3.0.7", | ||
"mocha": "^6.2.2", | ||
"nyc": "^14.1.1", | ||
"rimraf": "^2.6.3", | ||
"ts-node": "^8.1.0", | ||
"typescript": "^3.4.5" | ||
"rimraf": "^2.7.1", | ||
"ts-node": "^8.5.0", | ||
"typescript": "^3.7.2" | ||
}, | ||
"dependencies": { | ||
"commander": "^2.20.0", | ||
"commander": "^2.20.3", | ||
"fs-extra": "^8.0.1", | ||
"gensequence": "^2.1.2", | ||
"iconv-lite": "^0.4.24" | ||
"gensequence": "^2.1.3", | ||
"iconv-lite": "^0.5.0" | ||
}, | ||
@@ -61,0 +61,0 @@ "eslintConfig": { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
45654
1102
+ Addediconv-lite@0.5.2(transitive)
- Removediconv-lite@0.4.24(transitive)
Updatedcommander@^2.20.3
Updatedgensequence@^2.1.3
Updatediconv-lite@^0.5.0