wink-nlp
Advanced tools
Comparing version 1.5.0 to 1.6.0
@@ -1,1 +0,1 @@ | ||
{"processes":{"3eedfa72-8430-4e93-8f3f-45a399f32cc5":{"parent":null,"children":[]}},"files":{"/Users/neilsbohr/dev/winkjs/wink-nlp/src/wink-nlp.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/dd-wrapper.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/constants.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/doc-v2.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/contained-entities.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/locate.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/get-parent-item.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/search.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-get-item.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-get-item.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-each.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-each.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-filter.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-filter.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/itm-token-out.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/its.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/sort4FT.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/allowed.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/as.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/contained-markings.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-tokens-out.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-tokens-out.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/itm-entity-out.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-entities-out.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-entities-out.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/itm-sentence-out.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-sentences-out.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/itm-document-out.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/print-tokens.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/cache.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/tokenizer.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/recursive-tokenizer.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/compile-trex.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/tokens-mappers.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/examples-compiler.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/automaton.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/compose-patterns.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/helper.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/utilities/bm25-vectorizer.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/utilities/allowed.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"],"/Users/neilsbohr/dev/winkjs/wink-nlp/utilities/bow-cosine-similarity.js":["3eedfa72-8430-4e93-8f3f-45a399f32cc5"]},"externalIds":{}} | ||
{"processes":{"bc1c0c47-ad9d-4dd3-8756-e3ba367fc328":{"parent":null,"children":[]}},"files":{"/Users/neilsbohr/dev/winkjs/wink-nlp/src/wink-nlp.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/dd-wrapper.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/constants.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/doc-v2.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/contained-entities.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/locate.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/get-parent-item.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/search.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-get-item.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-get-item.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-each.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-each.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-filter.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-filter.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/itm-token-out.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/its.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/sort4FT.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/allowed.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/as.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/contained-markings.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-tokens-out.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-tokens-out.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/itm-entity-out.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-entities-out.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/sel-entities-out.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/itm-sentence-out.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/col-sentences-out.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/itm-document-out.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/api/print-tokens.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/cache.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/tokenizer.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/recursive-tokenizer.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/compile-trex.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/tokens-mappers.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/examples-compiler.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/automaton.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/compose-patterns.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/src/helper.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/utilities/bm25-vectorizer.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/utilities/allowed.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"],"/Users/neilsbohr/dev/winkjs/wink-nlp/utilities/bow-cosine-similarity.js":["bc1c0c47-ad9d-4dd3-8756-e3ba367fc328"]},"externalIds":{}} |
@@ -0,1 +1,8 @@ | ||
# [Enabling configurable annotation pipeline](https://github.com/winkjs/wink-nlp/releases/tag/1.6.0) | ||
## Version 1.6.0 June 27, 2021 | ||
### β¨ Features | ||
- No need to run the entire annotation pipeline, now you can select whatever you want or just even run tokenization by specifying an empty pipe. π€©π | ||
# [Operational update](https://github.com/winkjs/wink-nlp/releases/tag/1.5.0) | ||
@@ -2,0 +9,0 @@ ## Version 1.5.0 June 22, 2021 |
{ | ||
"name": "wink-nlp", | ||
"version": "1.5.0", | ||
"version": "1.6.0", | ||
"description": "Developer friendly NLP β¨", | ||
@@ -5,0 +5,0 @@ "keywords": [ |
@@ -59,7 +59,7 @@ # winkNLP | ||
#### How to install for Web Browser | ||
### How to install for Web Browser | ||
If youβre using winkNLP in the browser use the [wink-eng-lite-web-model](https://www.npmjs.com/package/wink-eng-lite-web-model) instead. Learn about its installation and usage in our [guide to using winkNLP in the browser](https://winkjs.org/wink-nlp/how-to-run-wink-nlp-in-browser.html). | ||
## Getting Started | ||
The "Hello World!" in winkNLP is given below. As the next step, we recommend a dive into [winkNLP's concepts](https://winkjs.org/wink-nlp/getting-started.html). | ||
The "Hello World!" in winkNLP is given below: | ||
@@ -97,4 +97,6 @@ ```javascript | ||
``` | ||
Experiment with the above code on [RunKit](https://npm.runkit.com/wink-nlp). | ||
Try a sample code at [RunKit](https://npm.runkit.com/wink-nlp) or head to [showcases](https://winkjs.org/showcase.html) to learn from live examples: | ||
### Explore Further | ||
Dive into [winkNLP's concepts](https://winkjs.org/wink-nlp/getting-started.html) or head to **[winkNLP recipes](https://observablehq.com/collection/@winkjs/winknlp-recipes)** for common NLP tasks or just explore live [showcases](https://winkjs.org/showcase.html) to learn: | ||
@@ -110,2 +112,4 @@ #### [Wikipedia Timeline](https://winkjs.org/showcase-timeline/) β³ | ||
## Speed & Accuracy | ||
@@ -112,0 +116,0 @@ The [winkNLP](https://winkjs.org/wink-nlp/) processes raw text at **~525,000 tokens per second** with its default language model β [wink-eng-lite-model](https://github.com/winkjs/wink-eng-lite-model), when [benchmarked](https://github.com/bestiejs/benchmark.js) using "Ch 13 of Ulysses by James Joyce" on a 2.2 GHz Intel Core i7 machine with 16GB RAM. The processing included the entire NLP pipeline β tokenization, sentence boundary detection, negation handling, sentiment analysis, part-of-speech tagging, and named entity extraction. This speed is way ahead of the prevailing speed benchmarks. |
@@ -63,2 +63,3 @@ // wink-nlp | ||
* @param {object} theModel language model. | ||
* @param {string[]} pipe of nlp annotations. | ||
* @returns {object} conatining set of API methods for natural language processing. | ||
@@ -69,3 +70,3 @@ * @example | ||
*/ | ||
var nlp = function ( theModel ) { | ||
var nlp = function ( theModel, pipe ) { | ||
@@ -113,2 +114,14 @@ var methods = Object.create( null ); | ||
// Annotation stuff. | ||
var validAnnotations = Object.create( null ); | ||
validAnnotations.sbd = true; | ||
validAnnotations.negation = true; | ||
validAnnotations.sentiment = true; | ||
validAnnotations.pos = true; | ||
validAnnotations.ner = true; | ||
validAnnotations.cer = true; | ||
// Current pipe. | ||
var currPipe = Object.create( null ); | ||
var onlyTokenization = true; | ||
// Private methods. | ||
@@ -235,61 +248,81 @@ | ||
// Map tokens for automata. | ||
var tokens4Automata = mapRawTokens2UIdOfNormal( rdd ); | ||
// Sentence Boundary Detection. | ||
// Set first `Pattern Swap (x)` as `null`. | ||
var px = null; | ||
for ( let i = 0; i < sbdAutomata.length; i += 1 ) { | ||
sbdAutomata[ i ].setPatternSwap( px ); | ||
// For SBD, all tokens are required to extract preceeding spaces. | ||
px = sbdAutomata[ i ].recognize( tokens4Automata, sbdTransformers[ i ], rdd.tokens ); | ||
// Map tokens for automata if there are other annotations to be performed. | ||
var tokens4Automata = ( onlyTokenization ) ? null : mapRawTokens2UIdOfNormal( rdd ); | ||
var px; | ||
if ( currPipe.sbd ) { | ||
// Sentence Boundary Detection. | ||
// Set first `Pattern Swap (x)` as `null`. | ||
px = null; | ||
for ( let i = 0; i < sbdAutomata.length; i += 1 ) { | ||
sbdAutomata[ i ].setPatternSwap( px ); | ||
// For SBD, all tokens are required to extract preceeding spaces. | ||
px = sbdAutomata[ i ].recognize( tokens4Automata, sbdTransformers[ i ], rdd.tokens ); | ||
} | ||
// The structure of sentence is:<br/> | ||
// `[ start, end, negationFlag, sentimentScore ]` | ||
sbdSetter( px, rdd ); | ||
// Compute number of sentences! | ||
rdd.numOfSentences = rdd.sentences.length; | ||
} else { | ||
// Setup default sentence as entire document! | ||
rdd.numOfSentences = 1; | ||
rdd.sentences = [ [ 0, ( rdd.numOfTokens - 1 ), 0, 0 ] ]; | ||
} | ||
// The structure of sentence is:<br/> | ||
// `[ start, end, negationFlag, sentimentScore ]` | ||
sbdSetter( px, rdd ); | ||
// Compute number of sentences! | ||
rdd.numOfSentences = rdd.sentences.length; | ||
// Named entity detection. | ||
px = null; | ||
for ( let i = 0; i < nerAutomata.length; i += 1 ) { | ||
nerAutomata[ i ].setPatternSwap( px ); | ||
px = nerAutomata[ i ].recognize( tokens4Automata, nerTransformers[ i ] ); | ||
if ( currPipe.ner ) { | ||
// Named entity detection. | ||
px = null; | ||
for ( let i = 0; i < nerAutomata.length; i += 1 ) { | ||
nerAutomata[ i ].setPatternSwap( px ); | ||
px = nerAutomata[ i ].recognize( tokens4Automata, nerTransformers[ i ] ); | ||
} | ||
// Entities β storted as array of `[ start, end, entity type ].` | ||
// There are no setter for entities as no transformation is needed. | ||
rdd.entities = px; | ||
} else { | ||
rdd.entities = []; | ||
} | ||
// Entities β storted as array of `[ start, end, entity type ].` | ||
// There are no setter for entities as no transformation is needed. | ||
rdd.entities = px; | ||
// Negation | ||
px = null; | ||
px = negAutomata.recognize( tokens4Automata ); | ||
negSetter( px, rdd, constants, search ); | ||
if ( currPipe.negation ) { | ||
// Negation | ||
px = null; | ||
px = negAutomata.recognize( tokens4Automata ); | ||
negSetter( px, rdd, constants, search ); | ||
} | ||
// Sentiment Analysis | ||
px = null; | ||
px = saAutomata.recognize( tokens4Automata ); | ||
saSetter( px, rdd, constants, locate ); | ||
// PoS Tagging | ||
const posTags = mapRawTokens2UIdOfDefaultPOS( rdd ); | ||
px = null; | ||
for ( let i = 0; i < posAutomata.length; i += 1 ) { | ||
px = posAutomata[ i ].recognize( posTags, posTransformers[ 0 ], rdd.tokens ); | ||
posUpdater( px, cache, posTags, tokens4Automata ); | ||
if ( currPipe.sentiment ) { | ||
// Sentiment Analysis | ||
px = null; | ||
px = saAutomata.recognize( tokens4Automata ); | ||
saSetter( px, rdd, constants, locate ); | ||
} | ||
posSetter( rdd, posTags, tkSize, constants.bits4lemma ); | ||
// Patterns | ||
px = null; | ||
if ( cerAutomata !== undefined && cerLearnings > 0 ) { | ||
cerConfig.rdd = rdd; | ||
cerConfig.preserve = cerPreserve; | ||
cerConfig.constants = constants; | ||
if ( cerConfig.useEntity ) cerAutomata.setPatternSwap( rdd.entities ); | ||
px = cerAutomata.recognize( tokens4Automata, cerTransformer, cerConfig ); | ||
if ( currPipe.pos ) { | ||
// PoS Tagging | ||
const posTags = mapRawTokens2UIdOfDefaultPOS( rdd ); | ||
px = null; | ||
for ( let i = 0; i < posAutomata.length; i += 1 ) { | ||
px = posAutomata[ i ].recognize( posTags, posTransformers[ 0 ], rdd.tokens ); | ||
posUpdater( px, cache, posTags, tokens4Automata ); | ||
} | ||
posSetter( rdd, posTags, tkSize, constants.bits4lemma ); | ||
} | ||
// If there are no custom entities, then `px` will be `null`; in such a case | ||
// set `customEntities` to an empty array. | ||
rdd.customEntities = px || []; | ||
if ( currPipe.cer ) { | ||
// Patterns | ||
px = null; | ||
if ( cerAutomata !== undefined && cerLearnings > 0 ) { | ||
cerConfig.rdd = rdd; | ||
cerConfig.preserve = cerPreserve; | ||
cerConfig.constants = constants; | ||
if ( cerConfig.useEntity ) cerAutomata.setPatternSwap( rdd.entities ); | ||
px = cerAutomata.recognize( tokens4Automata, cerTransformer, cerConfig ); | ||
} | ||
// If there are no custom entities, then `px` will be `null`; in such a case | ||
// set `customEntities` to an empty array. | ||
rdd.customEntities = px || []; | ||
} else rdd.customEntities = []; | ||
// Word Vector | ||
@@ -369,2 +402,11 @@ // if ( theModel.wordVectors !== undefined ) { | ||
const tempPipe = ( pipe === undefined ) ? Object.keys( validAnnotations ) : pipe; | ||
if ( helper.isArray( tempPipe ) ) { | ||
tempPipe.forEach( ( at ) => { | ||
if ( !validAnnotations[ at ] ) throw Error( `wink-nlp: invalid pipe annotation "${at}" found.` ); | ||
currPipe[ at ] = true; | ||
onlyTokenization = false; | ||
} ); | ||
} else throw Error( `wink-nlp: invalid pipe, it must be an array instead found a "${typeof pipe}".` ); | ||
// Load the model. | ||
@@ -371,0 +413,0 @@ load(); |
516535
5367
150