Comparing version 0.0.2 to 0.0.3
@@ -1,283 +0,9 @@ | ||
'use strict'; | ||
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } | ||
var fetch = _interopDefault(require('node-fetch')); | ||
var url = _interopDefault(require('url')); | ||
async function makeRequest(api, endpoint, opts, method = 'POST') { | ||
const headers = { | ||
'Accept': 'application/json', | ||
'Content-Type': 'application/json' | ||
}; | ||
const credentials = 'same-origin'; | ||
const body = JSON.stringify(opts); | ||
const apiUrl = url.resolve(api, endpoint); | ||
try { | ||
const res = await fetch(apiUrl, { | ||
method, | ||
headers, | ||
credentials, | ||
body | ||
}); | ||
return await res.json(); | ||
} catch (err) { | ||
console.log(`Error fetching data from API: ${api}`); | ||
} | ||
} | ||
async function getSimilarity(api, model, text1, text2) { | ||
const json = await makeRequest(api, '/similarity', { | ||
model, | ||
text1, | ||
text2 | ||
}); | ||
return json.similarity; | ||
} | ||
class Doc { | ||
constructor(words, spaces, attrs = {}) { | ||
this._doc = attrs.doc || {}; | ||
this._tokens = attrs.tokens || []; | ||
this._ents = attrs.ents || []; | ||
this._sents = attrs.sents || []; | ||
this._chunks = attrs.chunks || []; | ||
this._model = attrs.model; | ||
this._api = attrs.api; | ||
this.tokens = words.map((word, i) => new Token(this, word, spaces[i], this._tokens[i])); | ||
for (let i = 0; i < this.tokens.length; i++) { | ||
this[i] = this.tokens[i]; | ||
} | ||
this.cats = this._doc.cats; | ||
this.isTagged = this._doc.is_tagged; | ||
this.isParsed = this._doc.is_parsed; | ||
this.isSentenced = this._doc.is_sentenced; | ||
} | ||
inspect() { | ||
return this.text; | ||
} | ||
get text() { | ||
let text = ''; | ||
for (let token of this.tokens) { | ||
text += token.textWithWs; | ||
} | ||
return text; | ||
} | ||
get length() { | ||
return this.tokens.length; | ||
} | ||
get ents() { | ||
return this._ents.map(({ | ||
start, | ||
end, | ||
label | ||
}) => new Span(this, start, end, label)); | ||
} | ||
get sents() { | ||
return this._sents.map(({ | ||
start, | ||
end | ||
}) => new Span(this, start, end)); | ||
} | ||
get nounChunks() { | ||
return this._chunks.map(({ | ||
start, | ||
end | ||
}) => new Span(this, start, end)); | ||
} | ||
*[Symbol.iterator]() { | ||
let i = 0; | ||
while (this.tokens[i] !== undefined) { | ||
yield this.tokens[i]; | ||
++i; | ||
} | ||
} | ||
toString() { | ||
return this.text; | ||
} | ||
map(func) { | ||
let tokens = []; | ||
for (let token of this) { | ||
tokens.push(func(token)); | ||
} | ||
return tokens; | ||
} | ||
slice(start, end) { | ||
return new Span(this, start, end); | ||
} | ||
async similarity(obj) { | ||
return await getSimilarity(this._api, this._model, this.text, obj.text); | ||
} | ||
} | ||
class Span { | ||
constructor(doc, start, end, label) { | ||
this.doc = doc; | ||
this.start = start; | ||
this.end = end; | ||
this.label = label; | ||
this.tokens = [...this.doc].slice(this.start, this.end); | ||
for (let i = 0; i < this.tokens.length; i++) { | ||
this[i] = this.tokens[0]; | ||
} | ||
} | ||
get text() { | ||
let text = ''; | ||
for (let token of this.tokens) { | ||
text += token.textWithWs; | ||
} | ||
return text; | ||
} | ||
get length() { | ||
return this.tokens.length; | ||
} | ||
*[Symbol.iterator]() { | ||
let i = 0; | ||
while (this.tokens[i] !== undefined) { | ||
yield this.tokens[i]; | ||
++i; | ||
} | ||
} | ||
slice(start, end) { | ||
return new Span(this, start, end); | ||
} | ||
toString() { | ||
return this.text; | ||
} | ||
inspect() { | ||
return this.text; | ||
} | ||
async similarity(obj) { | ||
return await getSimilarity(this.doc._api, this.doc._model, this.text, obj.text); | ||
} | ||
} | ||
class Token { | ||
constructor(doc, word, space, attrs = {}) { | ||
this.doc = doc; | ||
this.whitespace = space ? ' ' : ''; | ||
this.text = word; | ||
this.textWithWs = this.text + this.whitespace; | ||
this.orth = attrs.orth; | ||
this.i = attrs.i; | ||
this.entType = attrs.ent_type; | ||
this.entIob = attrs.ent_iob; | ||
this.lemma = attrs.lemma; | ||
this.norm = attrs.norm; | ||
this.lower = attrs.lower; | ||
this.shape = attrs.shape, this.prefix = attrs.prefix; | ||
this.suffix = attrs.suffix; | ||
this.pos = attrs.pos; | ||
this.tag = attrs.tag; | ||
this.dep = attrs.dep; | ||
this.lang = attrs.lang; | ||
this.isAlpha = attrs.is_alpha; | ||
this.isAscii = attrs.is_ascii; | ||
this.isDigit = attrs.is_digit; | ||
this.isLower = attrs.is_lower; | ||
this.isUpper = attrs.is_upper; | ||
this.isTitle = attrs.is_title; | ||
this.isPunct = attrs.is_punct; | ||
this.isLeftPunct = attrs.is_left_punct; | ||
this.isRightPunct = attrs.is_right_punct; | ||
this.isSpace = attrs.is_space; | ||
this.isBracket = attrs.is_bracket; | ||
this.isCurrency = attrs.is_currency; | ||
this.likeUrl = attrs.like_url; | ||
this.likeNum = attrs.like_num; | ||
this.likeEmail = attrs.like_email; | ||
this.isOov = attrs.is_oov; | ||
this.isStop = attrs.is_stop; | ||
this.isSentStart = attrs.is_sent_start; | ||
this._head = attrs.head; | ||
} | ||
get head() { | ||
return this.doc[this._head]; | ||
} | ||
toString() { | ||
return this.text; | ||
} | ||
inspect() { | ||
return this.text; | ||
} | ||
async similarity(obj) { | ||
return await getSimilarity(this.doc._api, this.doc._model, this.text, obj.text); | ||
} | ||
} | ||
class Language { | ||
constructor(model, api = 'http://localhost:8080') { | ||
const self = this; | ||
return async function (text) { | ||
const { | ||
words, | ||
spaces, | ||
attrs | ||
} = await self.makeDoc(model, text, api); | ||
return new Doc(words, spaces, attrs); | ||
}; | ||
} | ||
async makeDoc(model, text, api) { | ||
const json = await makeRequest(api, 'parse', { | ||
model, | ||
text | ||
}); | ||
const words = json.tokens.map(({ | ||
text | ||
}) => text); | ||
const spaces = json.tokens.map(({ | ||
whitespace | ||
}) => Boolean(whitespace)); | ||
return { | ||
words, | ||
spaces, | ||
attrs: Object.assign({}, json, { | ||
api | ||
}) | ||
}; | ||
} | ||
} | ||
var index = { | ||
load: function (model, api) { | ||
return new Language(model, api); | ||
} | ||
}; | ||
module.exports = index; | ||
parcelRequire=function(e,r,n,t){var i="function"==typeof parcelRequire&&parcelRequire,o="function"==typeof require&&require;function u(n,t){if(!r[n]){if(!e[n]){var f="function"==typeof parcelRequire&&parcelRequire;if(!t&&f)return f(n,!0);if(i)return i(n,!0);if(o&&"string"==typeof n)return o(n);var c=new Error("Cannot find module '"+n+"'");throw c.code="MODULE_NOT_FOUND",c}p.resolve=function(r){return e[n][1][r]||r},p.cache={};var l=r[n]=new u.Module(n);e[n][0].call(l.exports,p,l,l.exports,this)}return r[n].exports;function p(e){return u(p.resolve(e))}}u.isParcelRequire=!0,u.Module=function(e){this.id=e,this.bundle=u,this.exports={}},u.modules=e,u.cache=r,u.parent=i,u.register=function(r,n){e[r]=[function(e,r){r.exports=n},{}]};for(var f=0;f<n.length;f++)u(n[f]);if(n.length){var c=u(n[n.length-1]);"object"==typeof exports&&"undefined"!=typeof module?module.exports=c:"function"==typeof define&&define.amd?define(function(){return c}):t&&(this[t]=c)}return u}({"Y/Oq":[function(require,module,exports) { | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.makeRequest=i,exports.getSimilarity=o;var e=r(require("node-fetch")),t=r(require("url"));function r(e){return e&&e.__esModule?e:{default:e}}async function i(r,i,o,a="POST"){const n={Accept:"application/json","Content-Type":"application/json"},s=JSON.stringify(o),c=t.default.resolve(r,i);try{const t=await(0,e.default)(c,{method:a,headers:n,credentials:"same-origin",body:s});return await t.json()}catch(l){console.log(`Error fetching data from API: ${r}`)}}async function o(e,t,r,o){return(await i(e,"/similarity",{model:t,text1:r,text2:o})).similarity} | ||
},{}],"nJFl":[function(require,module,exports) { | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.Token=exports.Span=exports.Doc=void 0;var t=require("./util");class s{constructor(t,s,i={}){this._doc=i.doc||{},this._tokens=i.tokens||[],this._ents=i.ents||[],this._sents=i.sents||[],this._chunks=i.chunks||[],this._model=i.model,this._api=i.api,this.tokens=t.map((t,i)=>new e(this,t,s[i],this._tokens[i]));for(let e=0;e<this.tokens.length;e++)this[e]=this.tokens[e];this.cats=this._doc.cats,this.isTagged=this._doc.is_tagged,this.isParsed=this._doc.is_parsed,this.isSentenced=this._doc.is_sentenced}inspect(){return this.text}get text(){let t="";for(let s of this.tokens)t+=s.textWithWs;return t}get length(){return this.tokens.length}get ents(){return this._ents.map(({start:t,end:s,label:e})=>new i(this,t,s,e))}get sents(){return this._sents.map(({start:t,end:s})=>new i(this,t,s))}get nounChunks(){return this._chunks.map(({start:t,end:s})=>new i(this,t,s))}*[Symbol.iterator](){let t=0;for(;void 0!==this.tokens[t];)yield this.tokens[t],++t}toString(){return this.text}map(t){let s=[];for(let i of this)s.push(t(i));return s}slice(t,s){return new i(this,t,s)}async similarity(s){return await(0,t.getSimilarity)(this._api,this._model,this.text,s.text)}}exports.Doc=s;class i{constructor(t,s,i,e){this.doc=t,this.start=s,this.end=i,this.label=e,this.tokens=[...this.doc].slice(this.start,this.end);for(let h=0;h<this.tokens.length;h++)this[h]=this.tokens[0]}get text(){let t="";for(let s of this.tokens)t+=s.textWithWs;return t}get length(){return this.tokens.length}*[Symbol.iterator](){let t=0;for(;void 0!==this.tokens[t];)yield this.tokens[t],++t}slice(t,s){return new i(this,t,s)}toString(){return this.text}inspect(){return this.text}async similarity(s){return await(0,t.getSimilarity)(this.doc._api,this.doc._model,this.text,s.text)}}exports.Span=i;class e{constructor(t,s,i,e={}){this.doc=t,this.whitespace=i?" ":"",this.text=s,this.textWithWs=this.text+this.whitespace,this.orth=e.orth,this.i=e.i,this.entType=e.ent_type,this.entIob=e.ent_iob,this.lemma=e.lemma,this.norm=e.norm,this.lower=e.lower,this.shape=e.shape,this.prefix=e.prefix,this.suffix=e.suffix,this.pos=e.pos,this.tag=e.tag,this.dep=e.dep,this.lang=e.lang,this.isAlpha=e.is_alpha,this.isAscii=e.is_ascii,this.isDigit=e.is_digit,this.isLower=e.is_lower,this.isUpper=e.is_upper,this.isTitle=e.is_title,this.isPunct=e.is_punct,this.isLeftPunct=e.is_left_punct,this.isRightPunct=e.is_right_punct,this.isSpace=e.is_space,this.isBracket=e.is_bracket,this.isCurrency=e.is_currency,this.likeUrl=e.like_url,this.likeNum=e.like_num,this.likeEmail=e.like_email,this.isOov=e.is_oov,this.isStop=e.is_stop,this.isSentStart=e.is_sent_start,this._head=e.head}get head(){return this.doc[this._head]}toString(){return this.text}inspect(){return this.text}async similarity(s){return await(0,t.getSimilarity)(this.doc._api,this.doc._model,this.text,s.text)}}exports.Token=e; | ||
},{"./util":"Y/Oq"}],"hk5u":[function(require,module,exports) { | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.default=void 0;var e=require("./tokens"),t=require("./util");class s{constructor(t,s="http://localhost:8080"){const a=this;return async function(o){const{words:r,spaces:c,attrs:n}=await a.makeDoc(t,o,s);return new e.Doc(r,c,n)}}async makeDoc(e,s,a){const o=await(0,t.makeRequest)(a,"parse",{model:e,text:s});return{words:o.tokens.map(({text:e})=>e),spaces:o.tokens.map(({whitespace:e})=>Boolean(e)),attrs:Object.assign({},o,{api:a})}}}exports.default=s; | ||
},{"./tokens":"nJFl","./util":"Y/Oq"}],"Focm":[function(require,module,exports) { | ||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),Object.defineProperty(exports,"Doc",{enumerable:!0,get:function(){return r.Doc}}),Object.defineProperty(exports,"Token",{enumerable:!0,get:function(){return r.Token}}),Object.defineProperty(exports,"Span",{enumerable:!0,get:function(){return r.Span}}),exports.default=void 0;var e=t(require("./language")),r=require("./tokens");function t(e){return e&&e.__esModule?e:{default:e}}var n={load:function(r,t){return new e.default(r,t)}};exports.default=n; | ||
},{"./language":"hk5u","./tokens":"nJFl"}]},{},["Focm"], null) |
{ | ||
"name": "spacy", | ||
"version": "0.0.2", | ||
"version": "0.0.3", | ||
"description": "JavaScript API for spaCy with Python REST API", | ||
"main": "dist/index.js", | ||
"scripts": { | ||
"test": "echo \"Error: no test specified\" && exit 1", | ||
"build": "rollup -c rollup.config.js", | ||
"dev": "rollup -c rollup.config.js --watch" | ||
"test": "jest", | ||
"build": "parcel build src/index.js --target node --no-source-maps", | ||
"dev": "parcel src/index.js --target node --no-source-maps", | ||
"package": "npm run build && npm pack" | ||
}, | ||
@@ -29,8 +30,11 @@ "author": "Ines Montani", | ||
"devDependencies": { | ||
"@babel/cli": "^7.1.2", | ||
"@babel/core": "^7.1.2", | ||
"@babel/preset-env": "^7.1.0", | ||
"babel-cli": "^6.26.0", | ||
"rollup": "^0.66.6", | ||
"rollup-plugin-babel": "^4.0.3" | ||
"babel-core": "^7.0.0-bridge.0", | ||
"babel-plugin-add-module-exports": "^1.0.0", | ||
"jest": "^23.6.0", | ||
"parcel-bundler": "^1.10.3", | ||
"regenerator-runtime": "^0.12.1" | ||
} | ||
} |
@@ -45,8 +45,2 @@ <a href="https://explosion.ai"><img src="https://explosion.ai/assets/img/logo.svg" width="125" height="125" align="right" /></a> | ||
Alternatively, you can also include the `.js` file: | ||
```html | ||
<script src="https://unpkg.com/spacy@0.0.1/dist/index.js"></script> | ||
``` | ||
### Setting up the Python server | ||
@@ -89,3 +83,3 @@ | ||
"Load" a spaCy model. This method mostly exists for consistency with the Python | ||
API. It mostly sets up the REST API and `nlp` object, but doesn't actually load | ||
API. It sets up the REST API and `nlp` object, but doesn't actually load | ||
anything, since the models are already available via the REST API. | ||
@@ -100,3 +94,3 @@ | ||
| `model` | String | Name of model to load, e.g. `'en_core_web_sm'`. Needs to be available via the REST API. | | ||
| `api` | String | Alternative URL of REST API. Defaults to `http://localhost:8080`. | | ||
| `api` | String | Alternative URL of REST API. Defaults to `http://0.0.0.0:8080`. | | ||
| **RETURNS** | [`Language`](src/language.js) | The `nlp` object. | | ||
@@ -137,3 +131,3 @@ | ||
```javascript | ||
import { Doc } from 'spacy/tokens'; | ||
import { Doc } from 'spacy'; | ||
@@ -195,3 +189,3 @@ const words = ['Hello', 'world', '!']; | ||
```javascript | ||
import { Doc, Span } from 'spacy/tokens'; | ||
import { Doc, Span } from 'spacy'; | ||
@@ -198,0 +192,0 @@ const doc = Doc(['Hello', 'world', '!'], [true, false, false]); |
import Language from './language' | ||
export { Doc, Token, Span } from './tokens' | ||
@@ -3,0 +4,0 @@ export default { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
No tests
QualityPackage does not have any tests. This is a strong signal of a poorly maintained or low quality package.
Found 1 instance in 1 package
1
30776
8
229
275
1