Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@nlpjs/lang-ko

Package Overview
Dependencies
Maintainers
1
Versions
22
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nlpjs/lang-ko - npm Package Compare versions

Comparing version 4.2.1 to 4.3.0

6

package.json
{
"name": "@nlpjs/lang-ko",
"version": "4.2.1",
"version": "4.3.0",
"description": "Core",

@@ -28,5 +28,5 @@ "author": {

"dependencies": {
"@nlpjs/core": "^4.1.4"
"@nlpjs/core": "^4.3.0"
},
"gitHead": "96ade95b2be5b659a2271d975ca8acdc5b2ed19e"
"gitHead": "68b063504d4cc94d11de373516114befc7c4e648"
}

@@ -0,0 +0,0 @@ /*

@@ -0,0 +0,0 @@ /*

@@ -0,0 +0,0 @@ /*

@@ -110,3 +110,5 @@ /*

const cm = new ChunkMatch(m.index, m.index + m[0].length, m[0], pos);
if (chunksMatched.filter(c => cm.disjoint(c) === false).length === 0) {
if (
chunksMatched.filter((c) => cm.disjoint(c) === false).length === 0
) {
chunksMatched.push(cm);

@@ -126,3 +128,3 @@ matchedLen += m[0].length;

let segStart = 0;
const tokens = l.map(m => {
const tokens = l.map((m) => {
segStart = input.indexOf(m.text, segStart);

@@ -135,7 +137,7 @@ return new KoreanToken(m.text, m.pos, segStart, m.text.length);

function getChunks(input) {
return chunk(input).map(c => c.text);
return chunk(input).map((c) => c.text);
}
function getChunksByPos(input, pos) {
return chunk(input).filter(x => x.pos === pos);
return chunk(input).filter((x) => x.pos === pos);
}

@@ -142,0 +144,0 @@

@@ -60,3 +60,3 @@ /*

function buildCommon(onset, vowel) {
return ['ㅂ', 'ㅆ', 'ㄹ', 'ㄴ', 'ㅁ'].map(coda =>
return ['ㅂ', 'ㅆ', 'ㄹ', 'ㄴ', 'ㅁ'].map((coda) =>
composeHangul(onset, vowel, coda)

@@ -67,3 +67,3 @@ );

function buildNoPast(onset, vowel) {
return ['ㅂ', 'ㄹ', 'ㄴ', 'ㅁ'].map(coda =>
return ['ㅂ', 'ㄹ', 'ㄴ', 'ㅁ'].map((coda) =>
composeHangul(onset, vowel, coda)

@@ -86,3 +86,3 @@ );

...build(lastChar, [0, 6, 10, 12]),
...['ㅂ', 'ㅆ', 'ㄹ', 'ㄴ', 'ㅁ'].map(coda =>
...['ㅂ', 'ㅆ', 'ㄹ', 'ㄴ', 'ㅁ'].map((coda) =>
composeHangul('ㅎ', coda === 'ㅆ' ? 'ㅐ' : 'ㅏ', coda)

@@ -226,3 +226,3 @@ ),

buildCommon(lastOnset, lastVowel),
['ㅆ', 'ㄹ', 'ㅁ'].map(coda => composeHangul(lastOnset, 'ㅐ', coda)),
['ㅆ', 'ㄹ', 'ㅁ'].map((coda) => composeHangul(lastOnset, 'ㅐ', coda)),
[

@@ -269,5 +269,5 @@ composeHangul(lastOnset, 'ㅐ'),

);
irregularExpansion = conjugation.map(s => newInit + s);
irregularExpansion = conjugation.map((s) => newInit + s);
}
expanded.push(...expandedLast.map(s => init + s));
expanded.push(...expandedLast.map((s) => init + s));
expanded.push(...irregularExpansion);

@@ -274,0 +274,0 @@ }

@@ -0,0 +0,0 @@ /*

@@ -0,0 +0,0 @@ /*

@@ -62,3 +62,3 @@ /*

}
const decomposed = [...word].map(c => decomposeHangul(c));
const decomposed = [...word].map((c) => decomposeHangul(c));
const lastChar = decomposed[decomposed.length - 1];

@@ -65,0 +65,0 @@ if (!codaMap[lastChar.onset]) {

@@ -0,0 +0,0 @@ /*

@@ -0,0 +0,0 @@ /*

@@ -0,0 +0,0 @@ /*

@@ -0,0 +0,0 @@ /*

@@ -93,3 +93,3 @@ /*

tokenize(text) {
const tokens = text.split(/[\s,.!?;:([\]'"¡¿)/]+/).filter(x => x);
const tokens = text.split(/[\s,.!?;:([\]'"¡¿)/]+/).filter((x) => x);
const result = [];

@@ -165,4 +165,4 @@ for (let i = 0; i < tokens.length; i += 1) {

const tokens = tokenize(this.normalizer.normalize(newText))
.map(x => stemWord(this.prestem(x.trim())))
.filter(x => x);
.map((x) => stemWord(this.prestem(x.trim())))
.filter((x) => x);
const result = [];

@@ -195,4 +195,4 @@ for (let i = 0; i < tokens.length; i += 1) {

const tokens = tokenize(this.normalize(newText))
.map(x => stemWord(this.prestem(x.trim())))
.filter(x => x);
.map((x) => stemWord(this.prestem(x.trim())))
.filter((x) => x);
const result = [];

@@ -199,0 +199,0 @@ for (let i = 0; i < tokens.length; i += 1) {

@@ -0,0 +0,0 @@ /*

@@ -39,3 +39,3 @@ /*

clean(text) {
const tokens = text.split(/[\s,.!?;:([\]'"¡¿)/]+/).filter(x => x);
const tokens = text.split(/[\s,.!?;:([\]'"¡¿)/]+/).filter((x) => x);
const result = [];

@@ -64,4 +64,4 @@ for (let i = 0; i < tokens.length; i += 1) {

const tokens = tokenize(this.clean(text));
const trimmed = tokens.map(x => x.trim());
const filtered = trimmed.filter(x => x);
const trimmed = tokens.map((x) => x.trim());
const filtered = trimmed.filter((x) => x);
return filtered;

@@ -68,0 +68,0 @@ }

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc