node-nlp-typescript
Advanced tools
Comparing version 0.1.37 to 0.1.38
@@ -9,2 +9,3 @@ import { Language } from './language'; | ||
import { Recognizer, ConversationContext, MemoryConversationContext } from './recognizer'; | ||
export { Language, NlpUtil, NlpManager, NlpExcelReader, XTableUtils, XTable, XDoc, removeEmojis, Evaluator, SpellCheck, Handlebars, ActionManager, NlgManager, NeuralNetwork, SentimentAnalyzer, SentimentManager, Recognizer, ConversationContext, MemoryConversationContext, }; | ||
import { BrainNLU } from './nlu'; | ||
export { Language, NlpUtil, NlpManager, NlpExcelReader, XTableUtils, XTable, XDoc, removeEmojis, Evaluator, SpellCheck, Handlebars, ActionManager, NlgManager, NeuralNetwork, SentimentAnalyzer, SentimentManager, Recognizer, ConversationContext, MemoryConversationContext, BrainNLU, }; |
@@ -25,3 +25,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.MemoryConversationContext = exports.ConversationContext = exports.Recognizer = exports.SentimentManager = exports.SentimentAnalyzer = exports.NlgManager = exports.ActionManager = exports.Handlebars = exports.SpellCheck = exports.Evaluator = exports.removeEmojis = exports.XDoc = exports.XTable = exports.XTableUtils = exports.NlpExcelReader = exports.NlpManager = exports.NlpUtil = exports.Language = void 0; | ||
exports.BrainNLU = exports.MemoryConversationContext = exports.ConversationContext = exports.Recognizer = exports.SentimentManager = exports.SentimentAnalyzer = exports.NlgManager = exports.ActionManager = exports.Handlebars = exports.SpellCheck = exports.Evaluator = exports.removeEmojis = exports.XDoc = exports.XTable = exports.XTableUtils = exports.NlpExcelReader = exports.NlpManager = exports.NlpUtil = exports.Language = void 0; | ||
const language_1 = require("./language"); | ||
@@ -52,1 +52,3 @@ Object.defineProperty(exports, "Language", { enumerable: true, get: function () { return language_1.Language; } }); | ||
Object.defineProperty(exports, "MemoryConversationContext", { enumerable: true, get: function () { return recognizer_1.MemoryConversationContext; } }); | ||
const nlu_1 = require("./nlu"); | ||
Object.defineProperty(exports, "BrainNLU", { enumerable: true, get: function () { return nlu_1.BrainNLU; } }); |
@@ -11,3 +11,3 @@ "use strict"; | ||
const language_1 = __importDefault(require("@nlpjs/language")); | ||
const lang_all_1 = __importDefault(require("@nlpjs/lang-all")); | ||
const lang_all_1 = require("@nlpjs/lang-all"); | ||
const nlp_1 = require("@nlpjs/nlp"); | ||
@@ -35,3 +35,3 @@ const evaluator_1 = require("@nlpjs/evaluator"); | ||
this.container.register('Language', language_1.default, false); | ||
this.container.use(lang_all_1.default); | ||
this.container.use(lang_all_1.LangAll); | ||
this.container.use(evaluator_1.Evaluator); | ||
@@ -38,0 +38,0 @@ this.container.use(evaluator_1.Template); |
@@ -24,8 +24,5 @@ "use strict"; | ||
*/ | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const core_loader_1 = require("@nlpjs/core-loader"); | ||
const lang_all_1 = __importDefault(require("@nlpjs/lang-all")); | ||
const lang_all_1 = require("@nlpjs/lang-all"); | ||
const cultures = { | ||
@@ -114,3 +111,3 @@ ar: 'ar-ae', | ||
const name = `Stemmer${locale.slice(0, 1).toUpperCase()}${locale.slice(1)}`; | ||
const Stemmer = lang_all_1.default[name]; | ||
const Stemmer = lang_all_1.LangAll[name]; | ||
return Stemmer ? new Stemmer() : new core_loader_1.BaseStemmer(); | ||
@@ -123,3 +120,3 @@ } | ||
const name = `Tokenizer${locale.slice(0, 1).toUpperCase()}${locale.slice(1)}`; | ||
const TokenizerClass = lang_all_1.default[name]; | ||
const TokenizerClass = lang_all_1.LangAll[name]; | ||
return TokenizerClass | ||
@@ -137,3 +134,3 @@ ? new TokenizerClass(undefined, true) | ||
(0, core_loader_1.containerBootstrap)({}, true, core_loader_1.defaultContainer); | ||
core_loader_1.defaultContainer.use(lang_all_1.default.LangAll); | ||
core_loader_1.defaultContainer.use(lang_all_1.LangAll.LangAll); | ||
exports.default = NlpUtil; |
@@ -24,8 +24,5 @@ "use strict"; | ||
*/ | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const sentiment_1 = require("@nlpjs/sentiment"); | ||
const lang_all_1 = __importDefault(require("@nlpjs/lang-all")); | ||
const lang_all_1 = require("@nlpjs/lang-all"); | ||
const nlu_1 = require("@nlpjs/nlu"); | ||
@@ -35,3 +32,3 @@ class SentimentAnalyzer extends sentiment_1.SentimentAnalyzer { | ||
super(settings, container); | ||
this.container.use(lang_all_1.default); | ||
this.container.use(lang_all_1.LangAll); | ||
this.container.use(nlu_1.Nlu); | ||
@@ -38,0 +35,0 @@ } |
{ | ||
"name": "node-nlp-typescript", | ||
"version": "0.1.37", | ||
"version": "0.1.38", | ||
"description": "nlp.js from axa-group in typescript 🚀. NLP library for building bots 🤖, with entity extraction, sentiment analysis, automatic language identification, and more. ", | ||
@@ -5,0 +5,0 @@ "main": "dist/index.js", |
@@ -36,2 +36,3 @@ /* | ||
} from './recognizer' | ||
import { BrainNLU } from './nlu' | ||
@@ -58,3 +59,3 @@ export { | ||
MemoryConversationContext, | ||
// BrainNLU, | ||
BrainNLU, | ||
}; |
@@ -6,3 +6,3 @@ import fs from 'fs'; | ||
import Language from '@nlpjs/language'; | ||
import LangAll from '@nlpjs/lang-all'; | ||
import { LangAll } from '@nlpjs/lang-all'; | ||
import { Nlp } from '@nlpjs/nlp'; | ||
@@ -9,0 +9,0 @@ import { Evaluator, Template } from '@nlpjs/evaluator'; |
@@ -30,3 +30,3 @@ /* | ||
} from '@nlpjs/core-loader'; | ||
import LangAll from '@nlpjs/lang-all'; | ||
import { LangAll } from '@nlpjs/lang-all'; | ||
@@ -33,0 +33,0 @@ interface Cultures { |
@@ -25,3 +25,3 @@ /* | ||
import { SentimentAnalyzer as SentimentAnalyzerBase } from '@nlpjs/sentiment'; | ||
import LangAll from '@nlpjs/lang-all'; | ||
import { LangAll } from '@nlpjs/lang-all'; | ||
import { Nlu } from '@nlpjs/nlu'; | ||
@@ -28,0 +28,0 @@ import { Container } from '@nlpjs/core' |
@@ -31,3 +31,3 @@ /* | ||
const LangAll: LangAll; | ||
export = LangAll; | ||
export { LangAll }; | ||
} |
declare module '@nlpjs/nlu' { | ||
import { EventEmitter } from 'events'; | ||
import { Container } from '@nlpjs/core'; | ||
export class Nlu { | ||
constructor() | ||
export interface INluOptions { | ||
container?: Container; | ||
containerName?: string; | ||
autoSave?: boolean; | ||
autoLoad?: boolean; | ||
persist?: boolean; | ||
persistFilename?: string; | ||
persistDir?: string; | ||
persistInterval?: number; | ||
persistStateFilename?: string; | ||
persistStateDir?: string; | ||
persistStateInterval?: number; | ||
log?: boolean; | ||
minSamplesPerIntent?: number; | ||
trainByDomain?: boolean; | ||
languages?: string[]; | ||
locale?: string; | ||
} | ||
export interface INluClassifyPayload { | ||
utterance: string; | ||
locale?: string; | ||
language?: string; | ||
domain?: string; | ||
timezone?: string; | ||
userId?: string; | ||
sessionId?: string; | ||
additional?: any; | ||
} | ||
export interface INluModel { | ||
lang?: string; | ||
lastUpdate?: string; | ||
minSamplesPerIntent?: number; | ||
trainByDomain?: boolean; | ||
intentThresholds?: { [name: string]: number }; | ||
entitiesThresholds?: { [name: string]: number }; | ||
utterances?: { [locale: string]: { [intent: string]: string[] } }; | ||
domains?: { [name: string]: { [locale: string]: { [intent: string]: string[] } } }; | ||
entities?: { | ||
[locale: string]: { | ||
[name: string]: { | ||
type: string; | ||
values: { [value: string]: any }; | ||
}; | ||
}; | ||
}; | ||
regex?: { [name: string]: { [locale: string]: string } }; | ||
stems?: { [locale: string]: { [value: string]: string } }; | ||
} | ||
export class Nlu extends EventEmitter { | ||
constructor(); | ||
container: Container; | ||
model?: INluModel; | ||
containerName?: string; | ||
locale: string; | ||
languages: string[]; | ||
settings: INluOptions; | ||
stopWords: Set<string>; | ||
stemmers: { | ||
[locale: string]: (str: string) => string; | ||
}; | ||
classifiers: { | ||
[locale: string]: { | ||
[name: string]: any; | ||
}; | ||
}; | ||
load(): Promise<void>; | ||
process(payload: INluClassifyPayload): Promise<INluModel>; | ||
train(): Promise<void>; | ||
save(): Promise<void>; | ||
export(): INluModel; | ||
} | ||
export interface NluNeuralSettings { | ||
locale?: string; | ||
log?: boolean; | ||
useNoneFeature?: boolean; | ||
noneValue?: number; | ||
useNeural?: boolean; | ||
stemming?: boolean; | ||
useRegExpTokenize?: boolean; | ||
useLemma?: boolean; | ||
minScore?: number; | ||
ner?: any; | ||
skipStopWords?: boolean; | ||
pipeline?: any; | ||
} | ||
export class NluNeural { | ||
constructor(settings?: NluNeuralSettings); | ||
settings: NluNeuralSettings; | ||
train(corpus: any[], settings?: NluNeuralSettings): Promise<void>; | ||
process(utterance: string, context?: any): Promise<{ | ||
classifications: Array<{ | ||
intent: string; | ||
score: number; | ||
}>; | ||
}>; | ||
} | ||
export class NluNeuralManager { | ||
constructor(settings?: NluNeuralSettings); | ||
nlu: NluNeural; | ||
container: any; | ||
} | ||
export function register(container: any, options?: NluNeuralSettings): void; | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
150679
86
3761