hunspell-reader
Advanced tools
Comparing version 7.0.0-alpha.1 to 7.0.0-alpha.2
#!/usr/bin/env node | ||
'use strict'; | ||
require('./dist/app'); | ||
import './dist/app.js'; |
@@ -1,3 +0,3 @@ | ||
import type { AffInfo, AffWord, AffWordFlags, Fx, Rule, Substitution } from './affDef'; | ||
import { Converter } from './converter'; | ||
import type { AffInfo, AffWord, AffWordFlags, Fx, Rule, Substitution } from './affDef.js'; | ||
import { Converter } from './converter.js'; | ||
/** The `word` field in a Converted AffWord has been converted using the OCONV mapping */ | ||
@@ -4,0 +4,0 @@ export type ConvertedAffWord = AffWord; |
@@ -1,35 +0,9 @@ | ||
"use strict"; | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
var desc = Object.getOwnPropertyDescriptor(m, k); | ||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||
desc = { enumerable: true, get: function() { return m[k]; } }; | ||
} | ||
Object.defineProperty(o, k2, desc); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | ||
Object.defineProperty(o, "default", { enumerable: true, value: v }); | ||
}) : function(o, v) { | ||
o["default"] = v; | ||
}); | ||
var __importStar = (this && this.__importStar) || function (mod) { | ||
if (mod && mod.__esModule) return mod; | ||
var result = {}; | ||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | ||
__setModuleDefault(result, mod); | ||
return result; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.debug = exports.filterAff = exports.compareAff = exports.asAffWord = exports.flagsToString = exports.affWordToColoredString = exports.logAffWord = exports.processRules = exports.Aff = void 0; | ||
const GS = __importStar(require("gensequence")); | ||
const gensequence_1 = require("gensequence"); | ||
const util = __importStar(require("util")); | ||
const converter_1 = require("./converter"); | ||
const util_1 = require("./util"); | ||
import * as GS from 'gensequence'; | ||
import { genSequence as gs } from 'gensequence'; | ||
import * as util from 'util'; | ||
import { Converter } from './converter.js'; | ||
import { filterOrderedList, isDefined } from './util.js'; | ||
const log = false; | ||
const DefaultMaxDepth = 5; | ||
class Aff { | ||
export class Aff { | ||
constructor(affInfo) { | ||
@@ -39,4 +13,4 @@ this.affInfo = affInfo; | ||
this.rules = processRules(affInfo); | ||
this._iConv = new converter_1.Converter(affInfo.ICONV || []); | ||
this._oConv = new converter_1.Converter(affInfo.OCONV || []); | ||
this._iConv = new Converter(affInfo.ICONV || []); | ||
this._oConv = new Converter(affInfo.OCONV || []); | ||
} | ||
@@ -80,3 +54,3 @@ get maxSuffixDepth() { | ||
const rules = this.joinRules(allRules.filter((rule) => !rule.flags).map((rule) => rule.id)); | ||
const affixRules = allRules.map((rule) => rule.sfx || rule.pfx).filter(util_1.isDefined); | ||
const affixRules = allRules.map((rule) => rule.sfx || rule.pfx).filter(isDefined); | ||
const wordWithFlags = { word, flags, rulesApplied, rules: '', base, suffix, prefix, dic }; | ||
@@ -153,3 +127,3 @@ return [wordWithFlags, ...this.applyAffixesToWord(affixRules, { ...wordWithFlags, rules }, remainingDepth)] | ||
.map((key) => this.rules.get(key)) | ||
.filter(util_1.isDefined); | ||
.filter(isDefined); | ||
} | ||
@@ -181,3 +155,2 @@ joinRules(rules) { | ||
} | ||
exports.Aff = Aff; | ||
function signature(aff) { | ||
@@ -192,7 +165,7 @@ const { word, flags } = aff; | ||
} | ||
function processRules(affInfo) { | ||
const sfxRules = (0, gensequence_1.genSequence)(affInfo.SFX || []) | ||
export function processRules(affInfo) { | ||
const sfxRules = gs(affInfo.SFX || []) | ||
.map(([, sfx]) => sfx) | ||
.map((sfx) => ({ id: sfx.id, type: 'sfx', sfx })); | ||
const pfxRules = (0, gensequence_1.genSequence)(affInfo.PFX || []) | ||
const pfxRules = gs(affInfo.PFX || []) | ||
.map(([, pfx]) => pfx) | ||
@@ -213,3 +186,2 @@ .map((pfx) => ({ id: pfx.id, type: 'pfx', pfx })); | ||
} | ||
exports.processRules = processRules; | ||
const affFlag = { | ||
@@ -260,3 +232,3 @@ KEEPCASE: { isKeepCase: true }, | ||
const flagToLongStringMap = _FlagToLongStringMap; | ||
function logAffWord(affWord, message) { | ||
export function logAffWord(affWord, message) { | ||
/* istanbul ignore if */ | ||
@@ -269,5 +241,4 @@ if (log) { | ||
} | ||
exports.logAffWord = logAffWord; | ||
/* istanbul ignore next */ | ||
function affWordToColoredString(affWord) { | ||
export function affWordToColoredString(affWord) { | ||
return util | ||
@@ -277,5 +248,4 @@ .inspect({ ...affWord, flags: flagsToString(affWord.flags) }, { showHidden: false, depth: 5, colors: true }) | ||
} | ||
exports.affWordToColoredString = affWordToColoredString; | ||
/* istanbul ignore next */ | ||
function flagsToString(flags) { | ||
export function flagsToString(flags) { | ||
return [...Object.entries(flags)] | ||
@@ -287,4 +257,3 @@ .filter(([, v]) => !!v) | ||
} | ||
exports.flagsToString = flagsToString; | ||
function asAffWord(word, rules = '', flags = {}) { | ||
export function asAffWord(word, rules = '', flags = {}) { | ||
return { | ||
@@ -301,4 +270,3 @@ word, | ||
} | ||
exports.asAffWord = asAffWord; | ||
function compareAff(a, b) { | ||
export function compareAff(a, b) { | ||
if (a.word !== b.word) { | ||
@@ -311,3 +279,2 @@ return a.word < b.word ? -1 : 1; | ||
} | ||
exports.compareAff = compareAff; | ||
/** | ||
@@ -317,7 +284,6 @@ * Returns a filter function that will filter adjacent AffWords | ||
*/ | ||
function filterAff() { | ||
return (0, util_1.filterOrderedList)((a, b) => a.word !== b.word || signature(a) !== signature(b)); | ||
export function filterAff() { | ||
return filterOrderedList((a, b) => a.word !== b.word || signature(a) !== signature(b)); | ||
} | ||
exports.filterAff = filterAff; | ||
exports.debug = { | ||
export const debug = { | ||
signature, | ||
@@ -324,0 +290,0 @@ }; |
@@ -1,2 +0,1 @@ | ||
"use strict"; | ||
// cspell:words uppercased | ||
@@ -6,3 +5,3 @@ // cspell:words KEEPCASE WARN NEEDAFFIX FORCEUCASE FORBIDDENWORD NOSUGGEST WORDCHARS | ||
// cspell:words MAXDIFF COMPOUNDMIN COMPOUNDRULE COMPOUNDFLAG COMPOUNDLAST FORBIDWARN | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
export {}; | ||
//# sourceMappingURL=affDef.js.map |
@@ -1,3 +0,3 @@ | ||
import { Aff } from './aff'; | ||
import type { AffInfo, Fx } from './affDef'; | ||
import { Aff } from './aff.js'; | ||
import type { AffInfo, Fx } from './affDef.js'; | ||
export interface ConvEntry { | ||
@@ -4,0 +4,0 @@ from: string; |
@@ -1,12 +0,7 @@ | ||
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.testing = exports.parseAffFileToAff = exports.parseAff = exports.parseAffFile = void 0; | ||
const assert_1 = __importDefault(require("assert")); | ||
const promises_1 = require("fs/promises"); | ||
const iconv_lite_1 = require("iconv-lite"); | ||
const aff_1 = require("./aff"); | ||
const util_1 = require("./util"); | ||
import assert from 'assert'; | ||
import { readFile } from 'fs/promises'; | ||
import pkgIconvLite from 'iconv-lite'; | ||
import { Aff } from './aff.js'; | ||
import { cleanObject, isDefined } from './util.js'; | ||
const { decode } = pkgIconvLite; | ||
const fixRegex = { | ||
@@ -100,3 +95,3 @@ SFX: { m: /$/, r: '$' }, | ||
const fixRuleSet = fieldValue.get(subField); | ||
(0, assert_1.default)(fixRuleSet); | ||
assert(fixRuleSet); | ||
const substitutionSets = fixRuleSet.substitutionSets; | ||
@@ -111,3 +106,3 @@ const ruleAsString = rule.condition.source; | ||
const substitutionSet = substitutionSets.get(ruleAsString); | ||
(0, assert_1.default)(substitutionSet); | ||
assert(substitutionSet); | ||
const [attachText, attachRules] = rule.affix.split('/', 2); | ||
@@ -208,3 +203,3 @@ substitutionSet.substitutions.push({ | ||
function toSingleStrings(values) { | ||
return values.map((v) => v[0]).filter(util_1.isDefined); | ||
return values.map((v) => v[0]).filter(isDefined); | ||
} | ||
@@ -304,15 +299,14 @@ function toAffMap(values) { | ||
}; | ||
return (0, util_1.cleanObject)(result); | ||
return cleanObject(result); | ||
} | ||
async function parseAffFile(filename, encoding = UTF8) { | ||
const buffer = await (0, promises_1.readFile)(filename); | ||
const file = (0, iconv_lite_1.decode)(buffer, encoding); | ||
export async function parseAffFile(filename, encoding = UTF8) { | ||
const buffer = await readFile(filename); | ||
const file = decode(buffer, encoding); | ||
const affInfo = parseAff(file, encoding); | ||
if (affInfo.SET && affInfo.SET.toLowerCase() !== encoding.toLowerCase()) { | ||
return parseAff((0, iconv_lite_1.decode)(buffer, affInfo.SET.toLowerCase()), affInfo.SET); | ||
return parseAff(decode(buffer, affInfo.SET.toLowerCase()), affInfo.SET); | ||
} | ||
return affInfo; | ||
} | ||
exports.parseAffFile = parseAffFile; | ||
function parseAff(affFileContent, encoding = UTF8) { | ||
export function parseAff(affFileContent, encoding = UTF8) { | ||
const lines = affFileContent.split(/\r?\n/g); | ||
@@ -332,7 +326,5 @@ const affFieldCollectionTable = createAffFieldTable(); | ||
} | ||
exports.parseAff = parseAff; | ||
function parseAffFileToAff(filename, encoding) { | ||
return parseAffFile(filename, encoding).then((affInfo) => new aff_1.Aff(affInfo)); | ||
export function parseAffFileToAff(filename, encoding) { | ||
return parseAffFile(filename, encoding).then((affInfo) => new Aff(affInfo)); | ||
} | ||
exports.parseAffFileToAff = parseAffFileToAff; | ||
function parseLine(line) { | ||
@@ -343,3 +335,3 @@ const result = line.match(affixLine) || ['', '']; | ||
} | ||
exports.testing = { | ||
export const testing = { | ||
parseAffixRule, | ||
@@ -346,0 +338,0 @@ tablePfxOrSfx, |
import type { DictionaryInformation } from '@cspell/cspell-types'; | ||
import type { AffInfo } from './affDef'; | ||
import type { AffInfo } from './affDef.js'; | ||
export declare function affToDicInfo(aff: AffInfo, locale: string): DictionaryInformation; | ||
//# sourceMappingURL=affToDicInfo.d.ts.map |
@@ -1,7 +0,4 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.affToDicInfo = void 0; | ||
const sync_1 = require("@cspell/cspell-pipe/sync"); | ||
const textUtils_1 = require("./textUtils"); | ||
function affToDicInfo(aff, locale) { | ||
import { opConcatMap, opMap, pipe } from '@cspell/cspell-pipe/sync'; | ||
import { removeAccents, toRange } from './textUtils.js'; | ||
export function affToDicInfo(aff, locale) { | ||
const alphabetInfo = extractAlphabet(aff, locale); | ||
@@ -12,7 +9,6 @@ return { | ||
locale, | ||
alphabet: (0, textUtils_1.toRange)(alphabetInfo.alphabet, 5), | ||
accents: (0, textUtils_1.toRange)([...alphabetInfo.accents].sort().join('')), | ||
alphabet: toRange(alphabetInfo.alphabet, 5), | ||
accents: toRange([...alphabetInfo.accents].sort().join('')), | ||
}; | ||
} | ||
exports.affToDicInfo = affToDicInfo; | ||
function extractAlphabet(aff, locale) { | ||
@@ -100,3 +96,3 @@ const sources = [ | ||
for (const letter of letters) { | ||
const base = (0, textUtils_1.removeAccents)(letter); | ||
const base = removeAccents(letter); | ||
const formCollection = getForm(base); | ||
@@ -117,5 +113,5 @@ formCollection.add(base); | ||
function calc(form) { | ||
return new Set((0, sync_1.pipe)(form, (0, sync_1.opConcatMap)((letter) => calcCapitalizationForms(letter, locale)))); | ||
return new Set(pipe(form, opConcatMap((letter) => calcCapitalizationForms(letter, locale)))); | ||
} | ||
const values = (0, sync_1.pipe)(accentForms, (0, sync_1.opMap)(calc)); | ||
const values = pipe(accentForms, opMap(calc)); | ||
return [...values]; | ||
@@ -135,4 +131,4 @@ } | ||
return undefined; | ||
const substations = (0, sync_1.pipe)(fxm.values(), (0, sync_1.opConcatMap)((f) => f.substitutionSets.values()), (0, sync_1.opConcatMap)((s) => s.substitutions)); | ||
const partials = (0, sync_1.pipe)(substations, (0, sync_1.opConcatMap)((sub) => [sub.remove, sub.attach])); | ||
const substations = pipe(fxm.values(), opConcatMap((f) => f.substitutionSets.values()), opConcatMap((s) => s.substitutions)); | ||
const partials = pipe(substations, opConcatMap((sub) => [sub.remove, sub.attach])); | ||
return [...partials]; | ||
@@ -139,0 +135,0 @@ } |
@@ -1,14 +0,13 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const commander_1 = require("commander"); | ||
const commandDictInfo_1 = require("./commandDictInfo"); | ||
const commandWords_1 = require("./commandWords"); | ||
// eslint-disable-next-line @typescript-eslint/no-var-requires | ||
const packageInfo = require('../package.json'); | ||
import { program } from 'commander'; | ||
import { readFileSync } from 'fs'; | ||
import { getCommand as getDictInfoCommand } from './commandDictInfo.js'; | ||
import { getCommand as commandWords } from './commandWords.js'; | ||
const pkgRaw = readFileSync(new URL('../package.json', import.meta.url), 'utf8'); | ||
const packageInfo = JSON.parse(pkgRaw); | ||
const version = packageInfo['version']; | ||
commander_1.program.version(version); | ||
commander_1.program.addCommand((0, commandWords_1.getCommand)()); | ||
commander_1.program.addCommand((0, commandDictInfo_1.getCommand)()); | ||
commander_1.program.showHelpAfterError(); | ||
commander_1.program.parseAsync(process.argv); | ||
program.version(version); | ||
program.addCommand(commandWords()); | ||
program.addCommand(getDictInfoCommand()); | ||
program.showHelpAfterError(); | ||
program.parseAsync(process.argv); | ||
//# sourceMappingURL=app.js.map |
@@ -1,11 +0,8 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getCommand = void 0; | ||
// cSpell:ignore findup | ||
const commander_1 = require("commander"); | ||
const affReader_1 = require("./affReader"); | ||
const affToDicInfo_1 = require("./affToDicInfo"); | ||
const textUtils_1 = require("./textUtils"); | ||
function getCommand() { | ||
const commander = new commander_1.Command('cspell-dict-info'); | ||
import { Command } from 'commander'; | ||
import { parseAffFile } from './affReader.js'; | ||
import { affToDicInfo } from './affToDicInfo.js'; | ||
import { escapeUnicodeCode } from './textUtils.js'; | ||
export function getCommand() { | ||
const commander = new Command('cspell-dict-info'); | ||
commander | ||
@@ -17,11 +14,10 @@ .arguments('<hunspell_aff_file> <locale>') | ||
} | ||
exports.getCommand = getCommand; | ||
async function action(hunspellFile, locale) { | ||
const baseFile = hunspellFile.replace(/\.(dic|aff)$/, ''); | ||
const affFile = baseFile + '.aff'; | ||
const aff = await (0, affReader_1.parseAffFile)(affFile); | ||
const info = (0, affToDicInfo_1.affToDicInfo)(aff, locale); | ||
const aff = await parseAffFile(affFile); | ||
const info = affToDicInfo(aff, locale); | ||
const rawJson = JSON.stringify(info, null, 2); | ||
console.log((0, textUtils_1.escapeUnicodeCode)(rawJson)); | ||
console.log(escapeUnicodeCode(rawJson)); | ||
} | ||
//# sourceMappingURL=commandDictInfo.js.map |
@@ -1,16 +0,13 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getCommand = void 0; | ||
// cSpell:ignore findup | ||
const commander_1 = require("commander"); | ||
const fs_1 = require("fs"); | ||
const gensequence_1 = require("gensequence"); | ||
const aff_1 = require("./aff"); | ||
const IterableHunspellReader_1 = require("./IterableHunspellReader"); | ||
const iterableToStream_1 = require("./iterableToStream"); | ||
const util_1 = require("./util"); | ||
import { Command } from 'commander'; | ||
import { createWriteStream, openSync, writeSync } from 'fs'; | ||
import { genSequence } from 'gensequence'; | ||
import { asAffWord } from './aff.js'; | ||
import { IterableHunspellReader } from './IterableHunspellReader.js'; | ||
import { iterableToStream } from './iterableToStream.js'; | ||
import { batch, uniqueFilter } from './util.js'; | ||
const uniqueHistorySize = 500000; | ||
let logStream = process.stderr; | ||
function getCommand() { | ||
const commander = new commander_1.Command('words'); | ||
export function getCommand() { | ||
const commander = new Command('words'); | ||
commander | ||
@@ -35,3 +32,2 @@ .arguments('<hunspell_dic_file>') | ||
} | ||
exports.getCommand = getCommand; | ||
function notify(message, newLine = true) { | ||
@@ -56,5 +52,5 @@ message = message + (newLine ? '\n' : ''); | ||
let resolved = false; | ||
const out = outFile ? (0, fs_1.createWriteStream)(outFile) : process.stdout; | ||
const bufferedSeq = (0, gensequence_1.genSequence)((0, util_1.batch)(seq, 500)).map((batch) => batch.join('')); | ||
const dataStream = (0, iterableToStream_1.iterableToStream)(bufferedSeq); | ||
const out = outFile ? createWriteStream(outFile) : process.stdout; | ||
const bufferedSeq = genSequence(batch(seq, 500)).map((batch) => batch.join('')); | ||
const dataStream = iterableToStream(bufferedSeq); | ||
const fileStream = dataStream.pipe(out); | ||
@@ -118,3 +114,3 @@ const endEvents = ['finish', 'close', 'end']; | ||
log(`Generating Words...`); | ||
const reader = await IterableHunspellReader_1.IterableHunspellReader.createFromFiles(affFile, dicFile); | ||
const reader = await IterableHunspellReader.createFromFiles(affFile, dicFile); | ||
if (max_depth && Number.parseInt(max_depth) >= 0) { | ||
@@ -153,4 +149,4 @@ reader.maxDepth = Number.parseInt(max_depth); | ||
}; | ||
const seqWords = transform ? reader.seqAffWords(callback) : reader.seqRootWords().map(aff_1.asAffWord); | ||
const filterUnique = unique ? (0, util_1.uniqueFilter)(uniqueHistorySize) : (_) => true; | ||
const seqWords = transform ? reader.seqAffWords(callback) : reader.seqRootWords().map(asAffWord); | ||
const filterUnique = unique ? uniqueFilter(uniqueHistorySize) : (_) => true; | ||
const applyTransformers = (aff) => transformers.reduce((aff, fn) => fn(aff), aff); | ||
@@ -169,4 +165,4 @@ const applyFilters = (aff) => filters.reduce((cur, fn) => cur && fn(aff), true); | ||
const data = words.toArray().sort().join(''); | ||
const fd = outputFile ? (0, fs_1.openSync)(outputFile, 'w') : 1; | ||
(0, fs_1.writeSync)(fd, data); | ||
const fd = outputFile ? openSync(outputFile, 'w') : 1; | ||
writeSync(fd, data); | ||
} | ||
@@ -173,0 +169,0 @@ else { |
@@ -1,6 +0,3 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.Converter = void 0; | ||
const regexSpecialCharacters = /[|\\{}()[\]^$+*?.]/g; | ||
class Converter { | ||
export class Converter { | ||
constructor(convList) { | ||
@@ -21,3 +18,2 @@ const match = convList.map(({ from }) => from.replace(regexSpecialCharacters, '\\$&')).join('|'); | ||
} | ||
exports.Converter = Converter; | ||
//# sourceMappingURL=converter.js.map |
@@ -1,6 +0,6 @@ | ||
export type { AffInfo, AffWord } from './affDef'; | ||
export { parseAff, parseAffFile as readAffFile } from './affReader'; | ||
export { createMatchingWordsFilter, type HunspellSrcData, IterableHunspellReader, type WordInfo, } from './IterableHunspellReader'; | ||
export { IterableHunspellReader as HunspellReader } from './IterableHunspellReader'; | ||
export { uniqueFilter } from './util'; | ||
export type { AffInfo, AffWord } from './affDef.js'; | ||
export { parseAff, parseAffFile as readAffFile } from './affReader.js'; | ||
export { createMatchingWordsFilter, type HunspellSrcData, IterableHunspellReader, type WordInfo, } from './IterableHunspellReader.js'; | ||
export { IterableHunspellReader as HunspellReader } from './IterableHunspellReader.js'; | ||
export { uniqueFilter } from './util.js'; | ||
//# sourceMappingURL=index.d.ts.map |
@@ -1,14 +0,5 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.uniqueFilter = exports.HunspellReader = exports.IterableHunspellReader = exports.createMatchingWordsFilter = exports.readAffFile = exports.parseAff = void 0; | ||
var affReader_1 = require("./affReader"); | ||
Object.defineProperty(exports, "parseAff", { enumerable: true, get: function () { return affReader_1.parseAff; } }); | ||
Object.defineProperty(exports, "readAffFile", { enumerable: true, get: function () { return affReader_1.parseAffFile; } }); | ||
var IterableHunspellReader_1 = require("./IterableHunspellReader"); | ||
Object.defineProperty(exports, "createMatchingWordsFilter", { enumerable: true, get: function () { return IterableHunspellReader_1.createMatchingWordsFilter; } }); | ||
Object.defineProperty(exports, "IterableHunspellReader", { enumerable: true, get: function () { return IterableHunspellReader_1.IterableHunspellReader; } }); | ||
var IterableHunspellReader_2 = require("./IterableHunspellReader"); | ||
Object.defineProperty(exports, "HunspellReader", { enumerable: true, get: function () { return IterableHunspellReader_2.IterableHunspellReader; } }); | ||
var util_1 = require("./util"); | ||
Object.defineProperty(exports, "uniqueFilter", { enumerable: true, get: function () { return util_1.uniqueFilter; } }); | ||
export { parseAff, parseAffFile as readAffFile } from './affReader.js'; | ||
export { createMatchingWordsFilter, IterableHunspellReader, } from './IterableHunspellReader.js'; | ||
export { IterableHunspellReader as HunspellReader } from './IterableHunspellReader.js'; | ||
export { uniqueFilter } from './util.js'; | ||
//# sourceMappingURL=index.js.map |
import type { Sequence } from 'gensequence'; | ||
import type { Aff } from './aff'; | ||
import type { AffWord } from './affDef'; | ||
import type { WordInfo } from './types'; | ||
export { WordInfo } from './types'; | ||
import type { Aff } from './aff.js'; | ||
import type { AffWord } from './affDef.js'; | ||
import type { WordInfo } from './types.js'; | ||
export { WordInfo } from './types.js'; | ||
export interface HunspellSrcData { | ||
@@ -7,0 +7,0 @@ /** The Aff rules to use with the dictionary entries */ |
@@ -1,34 +0,9 @@ | ||
"use strict"; | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
var desc = Object.getOwnPropertyDescriptor(m, k); | ||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||
desc = { enumerable: true, get: function() { return m[k]; } }; | ||
} | ||
Object.defineProperty(o, k2, desc); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | ||
Object.defineProperty(o, "default", { enumerable: true, value: v }); | ||
}) : function(o, v) { | ||
o["default"] = v; | ||
}); | ||
var __importStar = (this && this.__importStar) || function (mod) { | ||
if (mod && mod.__esModule) return mod; | ||
var result = {}; | ||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | ||
__setModuleDefault(result, mod); | ||
return result; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.createMatchingWordsFilter = exports.IterableHunspellReader = void 0; | ||
const fs = __importStar(require("fs/promises")); | ||
const gensequence_1 = require("gensequence"); | ||
const iconv_lite_1 = require("iconv-lite"); | ||
const affReader_1 = require("./affReader"); | ||
const util_1 = require("./util"); | ||
import * as fs from 'fs/promises'; | ||
import { genSequence } from 'gensequence'; | ||
import pkgIconvLite from 'iconv-lite'; | ||
import { parseAffFileToAff } from './affReader.js'; | ||
import { filterOrderedList } from './util.js'; | ||
const { decode } = pkgIconvLite; | ||
const defaultEncoding = 'UTF-8'; | ||
class IterableHunspellReader { | ||
export class IterableHunspellReader { | ||
constructor(src) { | ||
@@ -55,3 +30,3 @@ this.src = src; | ||
dicWordsSeq() { | ||
return (0, gensequence_1.genSequence)(this.src.dic).map((line) => { | ||
return genSequence(this.src.dic).map((line) => { | ||
const [word, rules] = line.split('/', 2); | ||
@@ -93,3 +68,3 @@ return { word, rules, prefixes: [], suffixes: [] }; | ||
seqTransformDictionaryEntries(tapPreApplyRules, maxDepth) { | ||
const seq = (0, gensequence_1.genSequence)(this.src.dic); | ||
const seq = genSequence(this.src.dic); | ||
let count = 0; | ||
@@ -133,5 +108,5 @@ const dicWords = tapPreApplyRules ? seq.map((a) => (tapPreApplyRules(a, count++), a)) : seq; | ||
static async createFromFiles(affFile, dicFile) { | ||
const aff = await (0, affReader_1.parseAffFileToAff)(affFile, defaultEncoding); | ||
const aff = await parseAffFileToAff(affFile, defaultEncoding); | ||
const buffer = await fs.readFile(dicFile); | ||
const dicFileContent = (0, iconv_lite_1.decode)(buffer, aff.affInfo.SET); | ||
const dicFileContent = decode(buffer, aff.affInfo.SET); | ||
const dic = dicFileContent | ||
@@ -145,7 +120,5 @@ .split('\n') | ||
} | ||
exports.IterableHunspellReader = IterableHunspellReader; | ||
function createMatchingWordsFilter() { | ||
return (0, util_1.filterOrderedList)((a, b) => a !== b); | ||
export function createMatchingWordsFilter() { | ||
return filterOrderedList((a, b) => a !== b); | ||
} | ||
exports.createMatchingWordsFilter = createMatchingWordsFilter; | ||
//# sourceMappingURL=IterableHunspellReader.js.map |
@@ -1,3 +0,3 @@ | ||
/// <reference types="node" /> | ||
/// <reference types="node" /> | ||
/// <reference types="node" resolution-mode="require"/> | ||
/// <reference types="node" resolution-mode="require"/> | ||
import * as stream from 'stream'; | ||
@@ -4,0 +4,0 @@ export type Streamable = string | Buffer; |
@@ -1,35 +0,8 @@ | ||
"use strict"; | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
var desc = Object.getOwnPropertyDescriptor(m, k); | ||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||
desc = { enumerable: true, get: function() { return m[k]; } }; | ||
} | ||
Object.defineProperty(o, k2, desc); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | ||
Object.defineProperty(o, "default", { enumerable: true, value: v }); | ||
}) : function(o, v) { | ||
o["default"] = v; | ||
}); | ||
var __importStar = (this && this.__importStar) || function (mod) { | ||
if (mod && mod.__esModule) return mod; | ||
var result = {}; | ||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | ||
__setModuleDefault(result, mod); | ||
return result; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.iterableToStream = void 0; | ||
const stream = __importStar(require("stream")); | ||
import * as stream from 'stream'; | ||
/** | ||
* Transform an iterable into a node readable stream. | ||
*/ | ||
function iterableToStream(src, options = { encoding: 'utf8' }) { | ||
export function iterableToStream(src, options = { encoding: 'utf8' }) { | ||
return new ReadableObservableStream(src, options); | ||
} | ||
exports.iterableToStream = iterableToStream; | ||
class ReadableObservableStream extends stream.Readable { | ||
@@ -63,3 +36,3 @@ constructor(_source, options) { | ||
} | ||
exports.default = iterableToStream; | ||
export default iterableToStream; | ||
//# sourceMappingURL=iterableToStream.js.map |
@@ -1,4 +0,1 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.removeLooseAccents = exports.removeAccents = exports.toRange = exports.escapeUnicodeCode = void 0; | ||
/** | ||
@@ -10,6 +7,5 @@ * Escape Unicode Characters | ||
*/ | ||
function escapeUnicodeCode(text, regexp = /\p{M}/gu) { | ||
export function escapeUnicodeCode(text, regexp = /\p{M}/gu) { | ||
return text.replace(regexp, replaceWithUnicode); | ||
} | ||
exports.escapeUnicodeCode = escapeUnicodeCode; | ||
function replaceWithUnicode(substring) { | ||
@@ -38,3 +34,3 @@ const start = 0x20; | ||
*/ | ||
function toRange(letters, minLength = 4) { | ||
export function toRange(letters, minLength = 4) { | ||
const chars = []; | ||
@@ -77,11 +73,8 @@ let begin = 0; | ||
} | ||
exports.toRange = toRange; | ||
function removeAccents(text) { | ||
export function removeAccents(text) { | ||
return removeLooseAccents(text.normalize('NFD')); | ||
} | ||
exports.removeAccents = removeAccents; | ||
function removeLooseAccents(text) { | ||
export function removeLooseAccents(text) { | ||
return text.replace(/\p{M}/gu, ''); | ||
} | ||
exports.removeLooseAccents = removeLooseAccents; | ||
//# sourceMappingURL=textUtils.js.map |
@@ -1,3 +0,2 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
export {}; | ||
//# sourceMappingURL=types.js.map |
@@ -1,9 +0,5 @@ | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.cleanObject = exports.isDefined = exports.filterOrderedList = exports.batch = exports.uniqueFilter = exports.hrTimeToSeconds = void 0; | ||
function hrTimeToSeconds([seconds, nanoseconds]) { | ||
export function hrTimeToSeconds([seconds, nanoseconds]) { | ||
return seconds + nanoseconds / 1000000000; | ||
} | ||
exports.hrTimeToSeconds = hrTimeToSeconds; | ||
function uniqueFilter(historySize, key = (a) => a) { | ||
export function uniqueFilter(historySize, key = (a) => a) { | ||
const f0 = new Set(); | ||
@@ -28,4 +24,3 @@ const f1 = new Set(); | ||
} | ||
exports.uniqueFilter = uniqueFilter; | ||
function* batch(i, size) { | ||
export function* batch(i, size) { | ||
let data = []; | ||
@@ -43,3 +38,2 @@ for (const t of i) { | ||
} | ||
exports.batch = batch; | ||
/** | ||
@@ -49,3 +43,3 @@ * Generate a filter function that will remove adjacent values that compare to falsy; | ||
*/ | ||
function filterOrderedList(compare) { | ||
export function filterOrderedList(compare) { | ||
let last; | ||
@@ -58,7 +52,5 @@ return function (t) { | ||
} | ||
exports.filterOrderedList = filterOrderedList; | ||
function isDefined(v) { | ||
export function isDefined(v) { | ||
return v !== undefined; | ||
} | ||
exports.isDefined = isDefined; | ||
/** | ||
@@ -69,3 +61,3 @@ * Remove all `undefined` values from an Object. | ||
*/ | ||
function cleanObject(obj) { | ||
export function cleanObject(obj) { | ||
if (typeof obj != 'object') | ||
@@ -81,3 +73,2 @@ return obj; | ||
} | ||
exports.cleanObject = cleanObject; | ||
//# sourceMappingURL=util.js.map |
{ | ||
"name": "hunspell-reader", | ||
"version": "7.0.0-alpha.1", | ||
"version": "7.0.0-alpha.2", | ||
"description": "A library for reading Hunspell Dictionary Files", | ||
"bin": "bin.js", | ||
"main": "dist/index.js", | ||
"typings": "dist/index.d.ts", | ||
"type": "module", | ||
"module": "dist/index.js", | ||
"types": "dist/index.d.ts", | ||
"exports": { | ||
".": { | ||
"import": "./dist/index.js" | ||
} | ||
}, | ||
"files": [ | ||
@@ -16,10 +22,11 @@ "dist", | ||
"scripts": { | ||
"test-unit": "jest", | ||
"test-unit": "vitest run", | ||
"test-spelling": "cspell \"src/**/*.ts\" \"*.md\"", | ||
"smoke-test": "../bin.js words ./dictionaries/nl -n 100", | ||
"smoke-test": "./bin.js words ./dictionaries/nl -n 100", | ||
"test": "pnpm run test-unit", | ||
"build": "tsc -p .", | ||
"lint": "prettier -w \"**/*.{md,yaml,yml,json,ts}\"", | ||
"clean": "shx rm -rf dist coverage .tsbuildinfo", | ||
"coverage": "jest --coverage", | ||
"clean": "shx rm -rf dist temp coverage \"*.tsbuildInfo\"", | ||
"coverage": "pnpm coverage:vitest && pnpm coverage:fix", | ||
"coverage:vitest": "vitest run --coverage", | ||
"coverage:fix": "nyc report --temp-dir \"$(pwd)/coverage\" --reporter lcov --report-dir \"$(pwd)/coverage\" --cwd ../..", | ||
"watch": "tsc -w" | ||
@@ -41,3 +48,3 @@ }, | ||
"devDependencies": { | ||
"@types/jest": "^29.5.0", | ||
"@types/jest": "^29.5.1", | ||
"jest": "^29.5.0", | ||
@@ -48,5 +55,5 @@ "ts-jest": "^29.1.0", | ||
"dependencies": { | ||
"@cspell/cspell-pipe": "^7.0.0-alpha.1", | ||
"@cspell/cspell-types": "^7.0.0-alpha.1", | ||
"commander": "^10.0.0", | ||
"@cspell/cspell-pipe": "^7.0.0-alpha.2", | ||
"@cspell/cspell-types": "^7.0.0-alpha.2", | ||
"commander": "^10.0.1", | ||
"gensequence": "^5.0.2", | ||
@@ -58,3 +65,3 @@ "iconv-lite": "^0.6.3" | ||
}, | ||
"gitHead": "626d1a01b413c1b6de0b386a5a264b78a10ba146" | ||
"gitHead": "a1b7c5daeef5afdb14d6444318f450b9fd9c035a" | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 2 instances in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 2 instances in 1 package
Yes
8
67597
575
1709
4