hunspell-reader
Advanced tools
Comparing version 5.15.2 to 5.15.3
@@ -0,181 +1,3 @@ | ||
import type { AffInfo, AffWord, AffWordFlags, Fx, Rule, Substitution } from './affDef'; | ||
import { Converter } from './converter'; | ||
export interface Fx { | ||
type: string; | ||
id: string; | ||
combinable: boolean; | ||
substitutionSets: Substitutions; | ||
count?: string; | ||
extra?: string[]; | ||
} | ||
export declare type Substitutions = Map<string, SubstitutionSet>; | ||
export interface Substitution { | ||
remove: string; | ||
attach: string; | ||
attachRules?: string; | ||
replace: RegExp; | ||
extra?: string; | ||
} | ||
export interface SubstitutionSet { | ||
match: RegExp; | ||
substitutions: Substitution[]; | ||
} | ||
export interface Rep { | ||
match: string; | ||
replaceWith: string; | ||
} | ||
export interface Conv { | ||
from: string; | ||
to: string; | ||
} | ||
export interface AffTransformFlags { | ||
KEEPCASE?: string; | ||
WARN?: string; | ||
NEEDAFFIX?: string; | ||
FORCEUCASE?: string; | ||
FORBIDDENWORD?: string; | ||
NOSUGGEST?: string; | ||
COMPOUNDBEGIN?: string; | ||
COMPOUNDEND?: string; | ||
COMPOUNDFLAG?: string; | ||
COMPOUNDFORBIDFLAG?: string; | ||
COMPOUNDMIDDLE?: string; | ||
COMPOUNDPERMITFLAG?: string; | ||
ONLYINCOMPOUND?: string; | ||
} | ||
export interface AffInfo extends AffTransformFlags { | ||
SET: string; | ||
TRY?: string; | ||
KEY?: string; | ||
WORDCHARS?: string; | ||
NOSPLITSUGS?: boolean; | ||
MAXCPDSUGS?: number; | ||
ONLYMAXDIFF?: boolean; | ||
MAXDIFF?: number; | ||
BREAK?: number; | ||
FLAG?: string; | ||
MAP?: string[]; | ||
ICONV?: Conv[]; | ||
OCONV?: Conv[]; | ||
REP?: Rep[]; | ||
AF?: string[]; | ||
COMPOUNDMIN?: number; | ||
COMPOUNDRULE?: string[]; | ||
CHECKCOMPOUNDCASE?: boolean; | ||
CHECKCOMPOUNDDUP?: boolean; | ||
CHECKCOMPOUNDREP?: boolean; | ||
CHECKCOMPOUNDPATTERN?: string[][]; | ||
PFX?: Map<string, Fx>; | ||
SFX?: Map<string, Fx>; | ||
} | ||
export interface Rule { | ||
id: string; | ||
type: string; | ||
flags?: AffWordFlags; | ||
pfx?: Fx; | ||
sfx?: Fx; | ||
} | ||
/** | ||
* AffWordFlags are the flags applied to a word after the hunspell rules have been applied. | ||
* They are either `true` or `undefined`. | ||
*/ | ||
export interface AffWordFlags { | ||
/** | ||
* COMPOUNDFLAG flag | ||
* | ||
* Words signed with COMPOUNDFLAG may be in compound words (except when word shorter than COMPOUNDMIN). | ||
* Affixes with COMPOUNDFLAG also permits compounding of affixed words. | ||
* | ||
*/ | ||
isCompoundPermitted?: true; | ||
/** | ||
* COMPOUNDBEGIN flag | ||
* | ||
* Words signed with COMPOUNDBEGIN (or with a signed affix) may be first elements in compound words. | ||
* | ||
*/ | ||
canBeCompoundBegin?: true; | ||
/** | ||
* COMPOUNDMIDDLE flag | ||
* | ||
* Words signed with COMPOUNDMIDDLE (or with a signed affix) may be middle elements in compound words. | ||
* | ||
*/ | ||
canBeCompoundMiddle?: true; | ||
/** | ||
* COMPOUNDLAST flag | ||
* | ||
* Words signed with COMPOUNDLAST (or with a signed affix) may be last elements in compound words. | ||
* | ||
*/ | ||
canBeCompoundEnd?: true; | ||
/** | ||
* COMPOUNDPERMITFLAG flag | ||
* | ||
* Prefixes are allowed at the beginning of compounds, suffixes are allowed at the end of compounds by default. | ||
* Affixes with COMPOUNDPERMITFLAG may be inside of compounds. | ||
* | ||
*/ | ||
isOnlyAllowedInCompound?: true; | ||
/** | ||
* COMPOUNDFORBIDFLAG flag | ||
* | ||
* Suffixes with this flag forbid compounding of the affixed word. | ||
* | ||
*/ | ||
isCompoundForbidden?: true; | ||
/** | ||
* WARN flag | ||
* | ||
* This flag is for rare words, which are also often spelling mistakes, see option -r of command line Hunspell and FORBIDWARN. | ||
*/ | ||
isWarning?: true; | ||
/** | ||
* KEEPCASE flag | ||
* | ||
* Forbid uppercased and capitalized forms of words signed with KEEPCASE flags. Useful for special orthographies (measurements and | ||
* currency often keep their case in uppercased texts) and writing systems (e.g. keeping lower case of IPA characters). Also valuable | ||
* for words erroneously written in the wrong case. | ||
*/ | ||
isKeepCase?: true; | ||
/** | ||
* FORCEUCASE flag | ||
* | ||
* Last word part of a compound with flag FORCEUCASE forces capitalization of the whole compound word. | ||
* Eg. Dutch word "straat" (street) with FORCEUCASE flags will allowed only in capitalized compound forms, | ||
* according to the Dutch spelling rules for proper names. | ||
*/ | ||
isForceUCase?: true; | ||
/** | ||
* FORBIDDENWORD flag | ||
* | ||
* This flag signs forbidden word form. Because affixed forms are also forbidden, we can subtract a subset from set of the | ||
* accepted affixed and compound words. Note: useful to forbid erroneous words, generated by the compounding mechanism. | ||
*/ | ||
isForbiddenWord?: true; | ||
/** | ||
* NOSUGGEST flag | ||
* | ||
* Words signed with NOSUGGEST flag are not suggested (but still accepted when typed correctly). Proposed flag for vulgar | ||
* and obscene words (see also SUBSTANDARD). | ||
*/ | ||
isNoSuggest?: true; | ||
/** | ||
* NEEDAFFIX flag | ||
* | ||
* This flag signs virtual stems in the dictionary, words only valid when affixed. Except, if the dictionary word has a homonym | ||
* or a zero affix. NEEDAFFIX works also with prefixes and prefix + suffix combinations (see tests/pseudoroot5.*). | ||
*/ | ||
isNeedAffix?: true; | ||
} | ||
export interface AffWord { | ||
word: string; | ||
rules: string; | ||
flags: AffWordFlags; | ||
rulesApplied: string; | ||
/** prefix + base + suffix == word */ | ||
base: string; | ||
suffix: string; | ||
prefix: string; | ||
dic: string; | ||
} | ||
/** The `word` field in a Converted AffWord has been converted using the OCONV mapping */ | ||
@@ -182,0 +4,0 @@ export declare type ConvertedAffWord = AffWord; |
@@ -23,6 +23,6 @@ "use strict"; | ||
exports.debug = exports.filterAff = exports.compareAff = exports.asAffWord = exports.flagsToString = exports.affWordToColoredString = exports.logAffWord = exports.processRules = exports.Aff = void 0; | ||
const GS = __importStar(require("gensequence")); | ||
const gensequence_1 = require("gensequence"); | ||
const util = __importStar(require("util")); | ||
const converter_1 = require("./converter"); | ||
const gensequence_1 = require("gensequence"); | ||
const GS = __importStar(require("gensequence")); | ||
const util_1 = require("./util"); | ||
@@ -77,3 +77,3 @@ const log = false; | ||
const rules = this.joinRules(allRules.filter((rule) => !rule.flags).map((rule) => rule.id)); | ||
const affixRules = allRules.map((rule) => rule.sfx || rule.pfx).filter((a) => !!a); | ||
const affixRules = allRules.map((rule) => rule.sfx || rule.pfx).filter(util_1.isDefined); | ||
const wordWithFlags = { word, flags, rulesApplied, rules: '', base, suffix, prefix, dic }; | ||
@@ -146,6 +146,7 @@ return [wordWithFlags, ...this.applyAffixesToWord(affixRules, { ...wordWithFlags, rules }, remainingDepth)] | ||
const { AF = [] } = this.affInfo; | ||
const rulesToSplit = AF[rules] || rules; | ||
const idx = parseInt(rules, 10); | ||
const rulesToSplit = AF[idx] || rules; | ||
return this.separateRules(rulesToSplit) | ||
.map((key) => this.rules[key]) | ||
.filter((a) => !!a); | ||
.map((key) => this.rules.get(key)) | ||
.filter(util_1.isDefined); | ||
} | ||
@@ -181,3 +182,3 @@ joinRules(rules) { | ||
const sig = Object.entries(flags) | ||
.filter((e) => e[1]) | ||
.filter((e) => !!e[1]) | ||
.map((f) => flagToStringMap[f[0]]) | ||
@@ -197,2 +198,3 @@ .sort() | ||
.filter(([key, value]) => !!affFlag[key] && !!value) | ||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion | ||
.map(([key, value]) => ({ id: value, type: 'flag', flags: affFlag[key] })); | ||
@@ -203,3 +205,3 @@ const rules = sfxRules | ||
.reduce((acc, rule) => { | ||
acc[rule.id] = rule; | ||
acc.set(rule.id, rule); | ||
return acc; | ||
@@ -225,3 +227,3 @@ }, new Map()); | ||
}; | ||
const flagToStringMap = { | ||
const _FlagToStringMap = { | ||
isCompoundPermitted: 'C', | ||
@@ -240,2 +242,18 @@ canBeCompoundBegin: 'B', | ||
}; | ||
const _FlagToLongStringMap = { | ||
isCompoundPermitted: 'CompoundPermitted', | ||
canBeCompoundBegin: 'CompoundBegin', | ||
canBeCompoundMiddle: 'CompoundMiddle', | ||
canBeCompoundEnd: 'CompoundEnd', | ||
isOnlyAllowedInCompound: 'OnlyInCompound', | ||
isWarning: 'Warning', | ||
isKeepCase: 'KeepCase', | ||
isForceUCase: 'ForceUpperCase', | ||
isForbiddenWord: 'Forbidden', | ||
isNoSuggest: 'NoSuggest', | ||
isNeedAffix: 'NeedAffix', | ||
isCompoundForbidden: 'CompoundForbidden', | ||
}; | ||
const flagToStringMap = _FlagToStringMap; | ||
const flagToLongStringMap = _FlagToLongStringMap; | ||
function logAffWord(affWord, message) { | ||
@@ -259,9 +277,7 @@ /* istanbul ignore if */ | ||
function flagsToString(flags) { | ||
return (GS.sequenceFromObject(flags) | ||
return [...Object.entries(flags)] | ||
.filter(([, v]) => !!v) | ||
// convert the key to a string | ||
.map(([k]) => flagToStringMap[k]) | ||
.toArray() | ||
.map(([k]) => flagToLongStringMap[k]) | ||
.sort() | ||
.join('_')); | ||
.join(':'); | ||
} | ||
@@ -311,3 +327,3 @@ exports.flagsToString = flagsToString; | ||
} | ||
const { isCompoundPermitted, ...flags } = affWord.flags; | ||
const { isCompoundPermitted: _, ...flags } = affWord.flags; | ||
affWord.flags = flags; | ||
@@ -314,0 +330,0 @@ return affWord; |
@@ -1,2 +0,3 @@ | ||
import { AffInfo, Aff, Fx } from './aff'; | ||
import { Aff } from './aff'; | ||
import type { AffInfo, Fx } from './affDef'; | ||
export interface ConvEntry { | ||
@@ -3,0 +4,0 @@ from: string; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.testing = exports.parseAffFileToAff = exports.parseAff = exports.parseAffFile = void 0; | ||
const aff_1 = require("./aff"); | ||
const assert_1 = __importDefault(require("assert")); | ||
const fs_extra_1 = require("fs-extra"); | ||
const iconv_lite_1 = require("iconv-lite"); | ||
const aff_1 = require("./aff"); | ||
const util_1 = require("./util"); | ||
const fixRegex = { | ||
@@ -16,29 +21,52 @@ SFX: { m: /$/, r: '$' }, | ||
const UTF8 = 'UTF-8'; | ||
function convEntry(fieldValue, line) { | ||
if (fieldValue === undefined) { | ||
return []; | ||
} | ||
const args = (line.value || '').split(spaceRegex); | ||
fieldValue.push({ from: args[0], to: args[1] }); | ||
return fieldValue; | ||
function convEntry() { | ||
let fieldValue; | ||
return { | ||
addLine: (line) => { | ||
if (fieldValue === undefined) { | ||
fieldValue = []; | ||
return; | ||
} | ||
const args = (line.value || '').split(spaceRegex); | ||
fieldValue.push({ from: args[0], to: args[1] }); | ||
}, | ||
getValue: () => fieldValue, | ||
}; | ||
} | ||
function afEntry(fieldValue, line) { | ||
if (fieldValue === undefined) { | ||
return ['']; | ||
function afEntry() { | ||
let fieldValue; | ||
return { | ||
addLine: (line) => { | ||
if (fieldValue === undefined) { | ||
// Add empty entry because rules start at 1 | ||
fieldValue = ['']; | ||
return; | ||
} | ||
if (line.value) { | ||
fieldValue.push(line.value); | ||
} | ||
}, | ||
getValue: () => fieldValue, | ||
}; | ||
} | ||
function simpleTable(map) { | ||
let data; | ||
function getValue() { | ||
if (data === null || data === void 0 ? void 0 : data.values) | ||
return map(data.values); | ||
return undefined; | ||
} | ||
if (line.value) { | ||
fieldValue.push(line.value); | ||
function addLine(line) { | ||
const args = (line.value || '').split(spaceRegex); | ||
if (data === undefined) { | ||
const [count, ...extraValues] = args; | ||
const extra = extraValues.length ? extraValues : undefined; | ||
const values = []; | ||
data = { count, extra, values }; | ||
return; | ||
} | ||
data.values.push(args); | ||
} | ||
return fieldValue; | ||
return { addLine, getValue }; | ||
} | ||
function simpleTable(fieldValue, line) { | ||
const args = (line.value || '').split(spaceRegex); | ||
if (fieldValue === undefined) { | ||
const [count, ...extraValues] = args; | ||
const extra = extraValues.length ? extraValues : undefined; | ||
return { count, extra, values: [] }; | ||
} | ||
fieldValue.values.push(args); | ||
return fieldValue; | ||
} | ||
function tablePfxOrSfx(fieldValue, line) { | ||
@@ -73,2 +101,3 @@ /* | ||
const fixRuleSet = fieldValue.get(subField); | ||
(0, assert_1.default)(fixRuleSet); | ||
const substitutionSets = fixRuleSet.substitutionSets; | ||
@@ -83,2 +112,3 @@ const ruleAsString = rule.condition.source; | ||
const substitutionSet = substitutionSets.get(ruleAsString); | ||
(0, assert_1.default)(substitutionSet); | ||
const [attachText, attachRules] = rule.affix.split('/', 2); | ||
@@ -149,20 +179,43 @@ substitutionSet.substitutions.push({ | ||
} | ||
function asPfx(fieldValue, line) { | ||
return tablePfxOrSfx(fieldValue, line); | ||
function collectFx() { | ||
let value; | ||
function addLine(line) { | ||
value = tablePfxOrSfx(value, line); | ||
} | ||
return { | ||
addLine, | ||
getValue: () => value, | ||
}; | ||
} | ||
function asSfx(fieldValue, line) { | ||
return tablePfxOrSfx(fieldValue, line); | ||
const asPfx = collectFx; | ||
const asSfx = collectFx; | ||
const asString = () => collectPrimitive((v) => v, ''); | ||
const asBoolean = () => collectPrimitive((v) => !!parseInt(v), '1'); | ||
const asNumber = () => collectPrimitive(parseInt, '0'); | ||
function collectPrimitive(map, defaultValue = '') { | ||
let primitive; | ||
function getValue() { | ||
return primitive; | ||
} | ||
function addLine(line) { | ||
const { value = defaultValue } = line; | ||
primitive = map(value); | ||
} | ||
return { addLine, getValue }; | ||
} | ||
function asString(_fieldValue, line) { | ||
return line.value || ''; | ||
function toRep(values) { | ||
return values.map((v) => ({ match: v[0], replaceWith: v[1] })); | ||
} | ||
function asBoolean(_fieldValue, line) { | ||
const { value = '1' } = line; | ||
const iValue = parseInt(value); | ||
return !!iValue; | ||
function toSingleStrings(values) { | ||
return values.map((v) => v[0]).filter(util_1.isDefined); | ||
} | ||
function asNumber(_fieldValue, line) { | ||
const { value = '0' } = line; | ||
return parseInt(value); | ||
function toAffMap(values) { | ||
return toSingleStrings(values); | ||
} | ||
function toCompoundRule(values) { | ||
return toSingleStrings(values); | ||
} | ||
function toCheckCompoundPattern(values) { | ||
return values; | ||
} | ||
/* | ||
@@ -174,40 +227,82 @@ cspell:ignore COMPOUNDBEGIN COMPOUNDEND COMPOUNDMIDDLE COMPOUNDMIN COMPOUNDPERMITFLAG COMPOUNDRULE COMPOUNDFORBIDFLAG COMPOUNDFLAG | ||
// prettier-ignore | ||
const affTableField = { | ||
AF: afEntry, | ||
BREAK: asNumber, | ||
CHECKCOMPOUNDCASE: asBoolean, | ||
CHECKCOMPOUNDDUP: asBoolean, | ||
CHECKCOMPOUNDPATTERN: simpleTable, | ||
CHECKCOMPOUNDREP: asBoolean, | ||
COMPOUNDBEGIN: asString, | ||
COMPOUNDEND: asString, | ||
COMPOUNDMIDDLE: asString, | ||
COMPOUNDMIN: asNumber, | ||
COMPOUNDFLAG: asString, | ||
COMPOUNDPERMITFLAG: asString, | ||
COMPOUNDFORBIDFLAG: asString, | ||
COMPOUNDRULE: simpleTable, | ||
FLAG: asString, | ||
FORBIDDENWORD: asString, | ||
FORCEUCASE: asString, | ||
ICONV: convEntry, | ||
KEEPCASE: asString, | ||
KEY: asString, | ||
MAP: simpleTable, | ||
MAXCPDSUGS: asNumber, | ||
MAXDIFF: asNumber, | ||
NEEDAFFIX: asString, | ||
NOSPLITSUGS: asBoolean, | ||
NOSUGGEST: asString, | ||
OCONV: convEntry, | ||
ONLYINCOMPOUND: asString, | ||
ONLYMAXDIFF: asBoolean, | ||
PFX: asPfx, | ||
REP: simpleTable, | ||
SET: asString, | ||
SFX: asSfx, | ||
TRY: asString, | ||
WARN: asString, | ||
WORDCHARS: asString, | ||
}; | ||
const createAffFieldTable = () => ({ | ||
AF: afEntry(), | ||
BREAK: simpleTable(toSingleStrings), | ||
CHECKCOMPOUNDCASE: asBoolean(), | ||
CHECKCOMPOUNDDUP: asBoolean(), | ||
CHECKCOMPOUNDPATTERN: simpleTable(toCheckCompoundPattern), | ||
CHECKCOMPOUNDREP: asBoolean(), | ||
COMPOUNDBEGIN: asString(), | ||
COMPOUNDEND: asString(), | ||
COMPOUNDMIDDLE: asString(), | ||
COMPOUNDMIN: asNumber(), | ||
COMPOUNDFLAG: asString(), | ||
COMPOUNDPERMITFLAG: asString(), | ||
COMPOUNDFORBIDFLAG: asString(), | ||
COMPOUNDRULE: simpleTable(toCompoundRule), | ||
FLAG: asString(), | ||
FORBIDDENWORD: asString(), | ||
FORCEUCASE: asString(), | ||
ICONV: convEntry(), | ||
KEEPCASE: asString(), | ||
KEY: asString(), | ||
MAP: simpleTable(toAffMap), | ||
MAXCPDSUGS: asNumber(), | ||
MAXDIFF: asNumber(), | ||
NEEDAFFIX: asString(), | ||
NOSPLITSUGS: asBoolean(), | ||
NOSUGGEST: asString(), | ||
OCONV: convEntry(), | ||
ONLYINCOMPOUND: asString(), | ||
ONLYMAXDIFF: asBoolean(), | ||
PFX: asPfx(), | ||
REP: simpleTable(toRep), | ||
SET: asString(), | ||
SFX: asSfx(), | ||
TRY: asString(), | ||
WARN: asString(), | ||
WORDCHARS: asString(), | ||
}); | ||
function collectionToAffInfo(affFieldCollectionTable, encoding) { | ||
// prettier-ignore | ||
const result = { | ||
AF: affFieldCollectionTable.AF.getValue(), | ||
BREAK: affFieldCollectionTable.BREAK.getValue(), | ||
CHECKCOMPOUNDCASE: affFieldCollectionTable.CHECKCOMPOUNDCASE.getValue(), | ||
CHECKCOMPOUNDDUP: affFieldCollectionTable.CHECKCOMPOUNDDUP.getValue(), | ||
CHECKCOMPOUNDPATTERN: affFieldCollectionTable.CHECKCOMPOUNDPATTERN.getValue(), | ||
CHECKCOMPOUNDREP: affFieldCollectionTable.CHECKCOMPOUNDREP.getValue(), | ||
COMPOUNDBEGIN: affFieldCollectionTable.COMPOUNDBEGIN.getValue(), | ||
COMPOUNDEND: affFieldCollectionTable.COMPOUNDEND.getValue(), | ||
COMPOUNDMIDDLE: affFieldCollectionTable.COMPOUNDMIDDLE.getValue(), | ||
COMPOUNDMIN: affFieldCollectionTable.COMPOUNDMIN.getValue(), | ||
COMPOUNDFLAG: affFieldCollectionTable.COMPOUNDFLAG.getValue(), | ||
COMPOUNDPERMITFLAG: affFieldCollectionTable.COMPOUNDPERMITFLAG.getValue(), | ||
COMPOUNDFORBIDFLAG: affFieldCollectionTable.COMPOUNDFORBIDFLAG.getValue(), | ||
COMPOUNDRULE: affFieldCollectionTable.COMPOUNDRULE.getValue(), | ||
FLAG: affFieldCollectionTable.FLAG.getValue(), | ||
FORBIDDENWORD: affFieldCollectionTable.FORBIDDENWORD.getValue(), | ||
FORCEUCASE: affFieldCollectionTable.FORCEUCASE.getValue(), | ||
ICONV: affFieldCollectionTable.ICONV.getValue(), | ||
KEEPCASE: affFieldCollectionTable.KEEPCASE.getValue(), | ||
KEY: affFieldCollectionTable.KEY.getValue(), | ||
MAP: affFieldCollectionTable.MAP.getValue(), | ||
MAXCPDSUGS: affFieldCollectionTable.MAXCPDSUGS.getValue(), | ||
MAXDIFF: affFieldCollectionTable.MAXDIFF.getValue(), | ||
NEEDAFFIX: affFieldCollectionTable.NEEDAFFIX.getValue(), | ||
NOSPLITSUGS: affFieldCollectionTable.NOSPLITSUGS.getValue(), | ||
NOSUGGEST: affFieldCollectionTable.NOSUGGEST.getValue(), | ||
OCONV: affFieldCollectionTable.OCONV.getValue(), | ||
ONLYINCOMPOUND: affFieldCollectionTable.ONLYINCOMPOUND.getValue(), | ||
ONLYMAXDIFF: affFieldCollectionTable.ONLYMAXDIFF.getValue(), | ||
PFX: affFieldCollectionTable.PFX.getValue(), | ||
REP: affFieldCollectionTable.REP.getValue(), | ||
SET: affFieldCollectionTable.SET.getValue() || encoding, | ||
SFX: affFieldCollectionTable.SFX.getValue(), | ||
TRY: affFieldCollectionTable.TRY.getValue(), | ||
WARN: affFieldCollectionTable.WARN.getValue(), | ||
WORDCHARS: affFieldCollectionTable.WORDCHARS.getValue(), | ||
}; | ||
return (0, util_1.cleanObject)(result); | ||
} | ||
async function parseAffFile(filename, encoding = UTF8) { | ||
@@ -225,18 +320,15 @@ const buffer = await (0, fs_extra_1.readFile)(filename); | ||
const lines = affFileContent.split(/\r?\n/g); | ||
return lines | ||
.map((line) => line.trimLeft()) | ||
const affFieldCollectionTable = createAffFieldTable(); | ||
affFieldCollectionTable.SET.addLine({ option: 'SET', value: encoding }); | ||
lines | ||
.map((line) => line.trimStart()) | ||
.map((line) => line.replace(commentRegex, '')) | ||
.filter((line) => line.trim() !== '') | ||
.map(parseLine) | ||
.reduce((aff, line) => { | ||
.forEach((line) => { | ||
var _a; | ||
const field = line.option; | ||
const fn = affTableField[field]; | ||
if (fn) { | ||
aff[field] = fn(aff[field], line); | ||
} | ||
else { | ||
aff[field] = line.value; | ||
} | ||
return aff; | ||
}, { SET: encoding }); | ||
(_a = affFieldCollectionTable[field]) === null || _a === void 0 ? void 0 : _a.addLine(line); | ||
}); | ||
return collectionToAffInfo(affFieldCollectionTable, encoding); | ||
} | ||
@@ -243,0 +335,0 @@ exports.parseAff = parseAff; |
@@ -31,2 +31,3 @@ "use strict"; | ||
const uniqueHistorySize = 500000; | ||
// eslint-disable-next-line @typescript-eslint/no-var-requires | ||
const packageInfo = require('../package.json'); | ||
@@ -163,3 +164,5 @@ const version = packageInfo['version']; | ||
} | ||
: () => { }; | ||
: () => { | ||
/* void */ | ||
}; | ||
const seqWords = transform ? reader.seqAffWords(callback) : reader.seqRootWords().map(aff_1.asAffWord); | ||
@@ -166,0 +169,0 @@ const filterUnique = unique ? (0, util_1.uniqueFilter)(uniqueHistorySize) : (_) => true; |
export * from './IterableHunspellReader'; | ||
export { IterableHunspellReader as HunspellReader } from './IterableHunspellReader'; | ||
export { parseAffFile as readAffFile, parseAff } from './affReader'; | ||
export type { AffInfo, AffWord } from './affDef'; | ||
//# sourceMappingURL=index.d.ts.map |
@@ -13,6 +13,9 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.HunspellReader = void 0; | ||
exports.parseAff = exports.readAffFile = exports.HunspellReader = void 0; | ||
__exportStar(require("./IterableHunspellReader"), exports); | ||
var IterableHunspellReader_1 = require("./IterableHunspellReader"); | ||
Object.defineProperty(exports, "HunspellReader", { enumerable: true, get: function () { return IterableHunspellReader_1.IterableHunspellReader; } }); | ||
var affReader_1 = require("./affReader"); | ||
Object.defineProperty(exports, "readAffFile", { enumerable: true, get: function () { return affReader_1.parseAffFile; } }); | ||
Object.defineProperty(exports, "parseAff", { enumerable: true, get: function () { return affReader_1.parseAff; } }); | ||
//# sourceMappingURL=index.js.map |
@@ -1,3 +0,4 @@ | ||
import { Aff, AffWord } from './aff'; | ||
import { Sequence } from 'gensequence'; | ||
import { Aff } from './aff'; | ||
import type { AffWord } from './affDef'; | ||
import { WordInfo } from './types'; | ||
@@ -40,3 +41,3 @@ export { WordInfo } from './types'; | ||
*/ | ||
seqAffWords(tapPreApplyRules?: (dicEntry: string, index: number) => any, maxDepth?: number): Sequence<AffWord>; | ||
seqAffWords(tapPreApplyRules?: (dicEntry: string, index: number) => void, maxDepth?: number): Sequence<AffWord>; | ||
/** | ||
@@ -48,3 +49,3 @@ * create an iterable sequence of the words in the dictionary. | ||
*/ | ||
seqTransformDictionaryEntries(tapPreApplyRules?: (dicEntry: string, index: number) => any, maxDepth?: number): Sequence<AffWord[]>; | ||
seqTransformDictionaryEntries(tapPreApplyRules?: (dicEntry: string, index: number) => void, maxDepth?: number): Sequence<AffWord[]>; | ||
/** | ||
@@ -51,0 +52,0 @@ * Iterator for all the words in the dictionary. The words are in the order found in the .dic after the |
@@ -23,6 +23,6 @@ "use strict"; | ||
exports.createMatchingWordsFilter = exports.IterableHunspellReader = void 0; | ||
const affReader_1 = require("./affReader"); | ||
const fs = __importStar(require("fs-extra")); | ||
const gensequence_1 = require("gensequence"); | ||
const fs = __importStar(require("fs-extra")); | ||
const iconv_lite_1 = require("iconv-lite"); | ||
const affReader_1 = require("./affReader"); | ||
const util_1 = require("./util"); | ||
@@ -29,0 +29,0 @@ const defaultEncoding = 'UTF-8'; |
@@ -10,2 +10,9 @@ export declare function hrTimeToSeconds([seconds, nanoseconds]: number[]): number; | ||
export declare function filterOrderedList<T>(compare: (a: T, b: T) => boolean | number): (t: T) => boolean; | ||
export declare function isDefined<T>(v: T | undefined): v is T; | ||
/** | ||
* Remove all `undefined` values from an Object. | ||
* @param obj | ||
* @returns the same object. | ||
*/ | ||
export declare function cleanObject<T>(obj: T): T; | ||
//# sourceMappingURL=util.d.ts.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.filterOrderedList = exports.batch = exports.uniqueFilter = exports.hrTimeToSeconds = void 0; | ||
exports.cleanObject = exports.isDefined = exports.filterOrderedList = exports.batch = exports.uniqueFilter = exports.hrTimeToSeconds = void 0; | ||
function hrTimeToSeconds([seconds, nanoseconds]) { | ||
@@ -56,2 +56,23 @@ return seconds + nanoseconds / 1000000000; | ||
exports.filterOrderedList = filterOrderedList; | ||
function isDefined(v) { | ||
return v !== undefined; | ||
} | ||
exports.isDefined = isDefined; | ||
/** | ||
* Remove all `undefined` values from an Object. | ||
* @param obj | ||
* @returns the same object. | ||
*/ | ||
function cleanObject(obj) { | ||
if (typeof obj != 'object') | ||
return obj; | ||
const r = obj; | ||
for (const [k, v] of Object.entries(r)) { | ||
if (v === undefined) { | ||
delete r[k]; | ||
} | ||
} | ||
return obj; | ||
} | ||
exports.cleanObject = cleanObject; | ||
//# sourceMappingURL=util.js.map |
{ | ||
"name": "hunspell-reader", | ||
"version": "5.15.2", | ||
"version": "5.15.3", | ||
"description": "A library for reading Hunspell Dictionary Files", | ||
@@ -43,7 +43,6 @@ "bin": "bin.js", | ||
"@types/jest": "^27.4.0", | ||
"@types/node": "^17.0.8", | ||
"@types/node": "^17.0.10", | ||
"jest": "^27.4.7", | ||
"prettier": "^2.5.1", | ||
"rimraf": "^3.0.2", | ||
"ts-jest": "^27.1.2", | ||
"ts-jest": "^27.1.3", | ||
"typescript": "^4.5.4" | ||
@@ -57,22 +56,6 @@ }, | ||
}, | ||
"eslintConfig": { | ||
"root": true, | ||
"parserOptions": { | ||
"ecmaVersion": 6, | ||
"sourceType": "module" | ||
}, | ||
"env": { | ||
"node": true, | ||
"mocha": true | ||
}, | ||
"ignorePatterns": [ | ||
"dist/**", | ||
"node_modules/**" | ||
], | ||
"rules": {} | ||
}, | ||
"engines": { | ||
"node": ">=12.13.0" | ||
}, | ||
"gitHead": "b047b5458980010a8f3ab31c6dc9434e0e782d5e" | ||
"gitHead": "8a7c55b7d0b340d3c4e964ba91390a405f2cda2d" | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
64124
7
26
1587