protodef-yaml
Advanced tools
Comparing version 1.2.5 to 1.3.0
@@ -0,0 +0,0 @@ #!/usr/bin/env node |
module.exports = require('./generators/protodef') |
@@ -0,0 +0,0 @@ const showdown = require('showdown') |
@@ -1,1 +0,408 @@ | ||
const fs=globalThis.window?null:require("fs"),Path=globalThis.window?null:require("path"),yaml=require("js-yaml"),log=()=>{};function getIndentation(t){let e=0;for(const n of t){if(" "!==n)break;e++}return e}function pad(t,e){let n="";for(let e=0;e<t;e++)n+=" ";return n+e}function toYAML(t,e=!0,n=!1){const i={};"string"==typeof t?i.main=fs.readFileSync(t,"utf8"):Object.assign(i,t);const s=[];function r(t,e){if(t.includes('"')||e.includes("[[")||e.includes("[{")||e.includes("{")||e.includes("}"))return!0;if(!e.includes("[]")&&e.includes("["))return!0;if(!e.includes("[]")&&e.includes("]"))return!0;for(const e of["[","]","{","}"])if(t.includes(e))return!0;return!1}let o=i.main;const a=(o=o.replace(/\t/g," ")).split("\n");let c=!1;function l(){let o=!1;for(let l=0;l<a.length;l++){let[p,f]=a[l].trim().split(":",2);const u=getIndentation(a[l]),y=getIndentation(a[l+1]||"");if(p.startsWith("#")){if(!p.startsWith("# ")&&"#"!==p||!c||!n)continue;{p="!comment,"+l,f=a[l].replace("#","");const t=a[l-1];getIndentation(a[l-1]||"");t.trim().startsWith("'!comment"),a[l]=pad(u,p+": |\n"),a[l]+=pad(u+3,f)}}if(p=p.trim(),f=f?f.trim():"","_"==p&&(p="__"+l),p)if(p.startsWith("!")){if(p.startsWith("!StartDocs")&&(c=!0),p.startsWith("!EndDocs")&&(c=!1),"!import"==p&&!s.includes(f)&&e){if(o)throw Error("Incorrectly placed import, place it ontop of the file");s.push(f);let e=i[f];if(e||"string"!=typeof t){if(!e)throw new Error("Import file not found: "+f)}else console.log("Path",t,Path.dirname(t)),e=fs.readFileSync(Path.dirname(t)+"/"+f,"utf-8");return e=e.replace(/\t/g," "),a.splice(l,0,...e.split("\n")),!0}let n=p.replace("!","'!");("!import"==p||p.includes("Docs"))&&(n+=","+l),n+="'",a[l]=a[l].replace(p,n)}else if(!r(p,f))if(a[l],log("i",l,f),log(u,y,y>u,f.trim()),y>u)if(o=!0,f.includes("[]")){const[t,e]=f.split("[]");if(t)throw Error("Array type cannot be both container and "+t+" at "+f);a[l]=pad(u,`"%array,${p},${t},${e}":`)}else if(f.includes("=>")){const t=f.replace("=>","").trim();if(a[l]=pad(u,`"%map,${p},${t}":`),n){let t=0;for(let e=l+1;e<a.length;e++)if(a[e].startsWith("- ".padStart(y+2)))a[e]=a[e].replace("- ",t+++": ");else if(!a[e].trim().startsWith("#"))break}}else if(f.includes("?"))f=f.replace("?","").trim(),a[l]=pad(u,`"%switch,${p},${f}":`);else{if(f&&!f.startsWith("#"))throw console.log("at ",a[l-1]),console.log("AT ",a[l]),console.log("at ",a[l+1]),console.log(f),Error(`Unexpected child block at line ${l}`);p.startsWith("if")||(a[l]=pad(u,`"%container,${p},${f}":`))}else if(f.includes("[]")){const[t,e]=f.split("[]");a[l]=pad(u,`"%array,${p},${t},${e}":`)}else if(isNaN(p.replace(/'/g,""))){if(f.includes("=>")){const[t,e]=f.split("=>");a[l]=pad(u,`"%map,${p},${t.trim()},${e.trim()}":`)}}else{const t=p.replace(/'/g,"");a[l]=pad(u,`'%n,${parseInt(t)}': ${f}`)}}}for(;l();)console.info("Importing",s[s.length-1]);return log(a),a}function parseYAML(t){try{let e;return yaml.loadAll(t.join("\n"),t=>e=t),e}catch(t){throw t instanceof yaml.YAMLException&&delete t.mark,t}}function transform(t){const e=[];function n(t,e,n,s,r){if(log("OBJ",t),n.startsWith("$")&&(s=n.slice(1),n=void 0),"string"==typeof t)e?r.push({name:e,type:["array",{countType:n,count:s,type:t}]}):r.push("array",{countType:n,count:s,type:t});else{const o=Object.keys(t).filter(t=>!t.startsWith("!")),a=o.length,c=o[0];if(1==a&&(c.startsWith("%array")||c.startsWith("%switch")))if(e){const o={countType:n,count:s,type:[]};r.push({name:e,type:["array",o]}),i(t,o.type),!o.type[0].name||o.type[0].name.startsWith("__")?o.type=o.type[0].type:o.type=["container",[o.type[0]]]}else{const e={countType:n,count:s,type:[]};r.push("array",e),i(t,e.type),!e.type[0].name||e.type[0].name.startsWith("__")?e.type=e.type[0].type:e.type=["container",[e.type[0]]]}else if(e){const o={countType:n,count:s,type:["container",[]]};r.push({name:e,type:["array",o]}),i(t,o.type[1])}else{const e={countType:n,count:s,type:["container",[]]};r.push("array",e),i(t,e.type[1])}}}function i(e,s){s=s||[];for(const o in e){let a=e[o];if(!o.startsWith("!")){if("object"==typeof a){if(o.startsWith("%")){const c=o.split(",");if(o.startsWith("%map")){const e={},[,n,i,r]=c;if(!i)continue;a=a||t["%map,"+r+","];for(const t in a){if(t.startsWith("!"))continue;e[t.startsWith("%")?t.split(",")[1]:t]=a[t]}s.push({name:n,type:["mapper",{type:i,mappings:e}]})}else if(o.startsWith("%switch")){let[,t,e]=c;const o={};let l,p=[];for(const t in a){const e=a[t],s=t.startsWith("%")?t.split(",")[1]:t;if(t.startsWith("%array")){const[,i,s,a]=t.split(","),c=i.replace("if ","").split(" or ");for(var r of c)o[r=r.trim()]="string"==typeof e?e:[],n(e||s,null,a,void 0,o[r]),"default"==r&&(p=o[r],delete o[r])}else if(s.startsWith("if")){const n=s.replace("if ","").split(" or ");for(var r of n)o[r=r.trim()]="string"==typeof e?e:["container",[]],"object"==typeof e?t.startsWith("%switch")?(i({[t]:e},o[r][1]),o[r]=o[r][1][0].type):i(e,o[r][1]):e.startsWith("[")&&(o[r]=JSON.parse(e))}else s.startsWith("default")&&(p=[],"object"==typeof e?(p=["container",[]],t.startsWith("%switch")?(i({[t]:e},p[1]),p=p[1][0].type):i(e,p[1])):(p=e,e.startsWith("[")&&(p=JSON.parse(e))))}t.startsWith("__")&&(t=void 0,l=!0),s.push({name:t,anon:l,type:["switch",{compareTo:e.replace("?",""),fields:o,default:p.length?p:"void"}]})}else if(o.startsWith("%array")){const[,t,i,r]=c;if(log(a,typeof a,e),i&&a&&"object"==typeof a)throw Error("Array has a type and body: "+t);n(a||i,t,r,void 0,s)}else if(o.startsWith("%container")){const[,t]=c,e=t.startsWith("__")?void 0:t;let n;e||(n=!0),s.push({name:e,anon:n,type:["container",[]]}),i(a,s[s.length-1].type[1])}}}else if("string"==typeof a){if(o.startsWith("!"))continue;a.startsWith("[")&&(a=JSON.parse(a)),s.push({name:o,type:a})}log(o,typeof a)}}}i(t,e);for(const n in t){const i=t[n];"object"!=typeof i||n.startsWith("%")||(log("pushing ext",{name:n,type:i}),e.push({name:n,type:i}))}return e}function getName(t){return t.startsWith("%")?t.split(",")[1]:t}function formFinal(t,e){const n={};for(const e of t)n[e.name]=e.type;return n}function getIntermediate(t,e,n=!1){return parseYAML(toYAML(t,n,e))}function compile(t,e){const n=formFinal(transform(parseYAML(toYAML(t))));return"string"==typeof e&&fs.writeFileSync(e,JSON.stringify(n,null,2)),n}module.exports={compile:compile,parse:getIntermediate}; | ||
const fs = globalThis.window ? null : require('fs') | ||
const Path = globalThis.window ? null : require('path') | ||
const yaml = require('js-yaml') | ||
const log = () => { } | ||
function getIndentation(line) { | ||
// log(line) | ||
let ind = 0 | ||
for (const c of line) { | ||
if (c === ' ') ind++ | ||
else break | ||
} | ||
return ind | ||
} | ||
function pad(indentation, line) { | ||
let ret = '' | ||
for (let i = 0; i < indentation; i++) ret += ' ' | ||
return ret + line | ||
} | ||
// make valid yaml | ||
function toYAML(input, followImports = true, document = false) { | ||
const files = {} | ||
if (typeof input === 'string') { | ||
files.main = fs.readFileSync(input, 'utf8') | ||
} else { | ||
Object.assign(files, input) | ||
} | ||
const imported = [] | ||
function checkIfJson(key, val) { // ¯\(°_o)/¯ | ||
if (key.includes('"') || val.includes('[[') || val.includes('[{') || val.includes('{') || val.includes('}')) return true | ||
if (!val.includes('[]') && val.includes('[')) return true | ||
if (!val.includes('[]') && val.includes(']')) return true | ||
for (const c of ['[', ']', '{', '}']) if (key.includes(c)) return true | ||
return false | ||
} | ||
function validateKey(line, key) { } | ||
let data = files.main | ||
data = data.replace(/\t/g, ' ') | ||
const lines = data.split('\n') | ||
let startedDocumenting = false | ||
function pars() { | ||
let modified = false | ||
for (let i = 0; i < lines.length; i++) { | ||
let [key, val] = lines[i].trim().split(':', 2) | ||
const thisLevel = getIndentation(lines[i]) | ||
const nextLevel = getIndentation(lines[i + 1] || '') | ||
if (key.startsWith('#')) { | ||
if ((key.startsWith('# ') || key === '#') && startedDocumenting && document) { // Convert the YAML comments to entries | ||
key = '!comment,' + i | ||
val = lines[i].replace('#', '') | ||
const lastLine = lines[i - 1] | ||
const lastLevel = getIndentation(lines[i - 1] || '') | ||
if (lastLine.trim().startsWith('\'!comment') && lastLevel == thisLevel && false) { // Truncate multi-lines | ||
lines[i - 1] += '\n' + pad(thisLevel + 3, val) | ||
lines[i] = '' | ||
} else { | ||
lines[i] = pad(thisLevel, key + ': |\n') | ||
lines[i] += pad(thisLevel + 3, val) | ||
} | ||
} else { | ||
continue | ||
} | ||
} | ||
key = key.trim(); val = val ? val.trim() : '' | ||
if (key == '_') key = '__' + i | ||
if (!key) continue | ||
if (key.startsWith('!')) { | ||
if (key.startsWith('!StartDocs')) startedDocumenting = true | ||
if (key.startsWith('!EndDocs')) startedDocumenting = false | ||
if (key == '!import' && !imported.includes(val) && followImports) { | ||
if (modified) { | ||
throw Error('Incorrectly placed import, place it ontop of the file') | ||
} | ||
imported.push(val) | ||
let imp = files[val] | ||
if (!imp && typeof input === 'string') { | ||
console.log('Path', input, Path.dirname(input)) | ||
imp = fs.readFileSync(Path.dirname(input) + '/' + val, 'utf-8') | ||
} else if (!imp) { | ||
throw new Error('Import file not found: ' + val) | ||
} | ||
imp = imp.replace(/\t/g, ' ') | ||
lines.splice(i, 0, ...imp.split('\n')) | ||
return true | ||
} | ||
let nkey = key.replace('!', "'!") | ||
if (key == '!import' || key.includes('Docs')) nkey += ',' + i | ||
nkey += "'" | ||
lines[i] = lines[i].replace(key, nkey) | ||
continue | ||
} | ||
if (checkIfJson(key, val)) { | ||
// console.debug('Ignoring JSON', lines[i]) | ||
continue | ||
} | ||
validateKey(lines[i], key) | ||
log('i', i, val) | ||
log(thisLevel, nextLevel, nextLevel > thisLevel, val.trim()) | ||
const isParent = nextLevel > thisLevel | ||
if (isParent) { | ||
modified = true | ||
// console.info(lines[i]) | ||
if (val.includes('[]')) { | ||
const [type, countType] = val.split('[]') | ||
if (type) throw Error('Array type cannot be both container and ' + type + ' at ' + val) | ||
lines[i] = pad(thisLevel, `"%array,${key},${type},${countType}":`) | ||
} else if (val.includes('=>')) { | ||
const type = val.replace('=>', '').trim() | ||
lines[i] = pad(thisLevel, `"%map,${key},${type}":`) | ||
if (document) { // we need index numbers for the docs | ||
let autoIncrementPos = 0 | ||
for (let j = i + 1; j < lines.length; j++) { | ||
if (lines[j].startsWith('- '.padStart(nextLevel + 2))) { | ||
lines[j] = lines[j].replace('- ', autoIncrementPos++ + ': ') | ||
} else if (!lines[j].trim().startsWith('#')) { | ||
break | ||
} | ||
} | ||
} | ||
} else if (val.includes('?')) { | ||
val = val.replace('?', '').trim() | ||
lines[i] = pad(thisLevel, `"%switch,${key},${val}":`) | ||
} else if (val && !val.startsWith('#')) { | ||
console.log('at ', lines[i - 1]) | ||
console.log('AT ', lines[i]) | ||
console.log('at ', lines[i + 1]) | ||
console.log(val) | ||
throw Error(`Unexpected child block at line ${i}`) | ||
} else if (!key.startsWith('if')) { | ||
lines[i] = pad(thisLevel, `"%container,${key},${val}":`) | ||
} | ||
} else { | ||
if (val.includes('[]')) { | ||
const [type, countType] = val.split('[]') | ||
lines[i] = pad(thisLevel, `"%array,${key},${type},${countType}":`) | ||
} else if (!isNaN(key.replace(/'/g, ''))) { | ||
// Because JS sorts objects weird, we need to encapsulate numeric type | ||
// keys to ensure proper ordering with '%n,NUMBER' | ||
const num = key.replace(/'/g, '') | ||
lines[i] = pad(thisLevel, `'%n,${parseInt(num)}': ${val}`) | ||
} else if (val.includes('=>')) { | ||
const [sizeType,valueType] = val.split('=>') | ||
lines[i] = pad(thisLevel, `"%map,${key},${sizeType.trim()},${valueType.trim()}":`) | ||
} | ||
} | ||
} | ||
} | ||
while (pars()) { console.info('Importing', imported[imported.length - 1]) } | ||
log(lines) | ||
// fs.writeFileSync(__dirname + '/inter.yaml', lines.join('\n')) | ||
return lines | ||
} | ||
function parseYAML(lines) { | ||
try { | ||
let ret | ||
yaml.loadAll(lines.join('\n'), d => ret = d) | ||
return ret | ||
} catch (e) { | ||
if (e instanceof yaml.YAMLException) { | ||
delete e.mark // remove logging spam | ||
} | ||
throw e | ||
} | ||
} | ||
function transform(json) { | ||
// console.log(json) | ||
const ctx = [] | ||
function visitArray(obj, name, countType, count, ctx) { | ||
log('OBJ', obj) | ||
if (countType.startsWith('$')) { | ||
count = countType.slice(1) | ||
countType = undefined | ||
} | ||
if (typeof obj === 'string') { | ||
if (name) { | ||
ctx.push({ name, type: ['array', { countType, count, type: obj }] }) | ||
} else { | ||
ctx.push('array', { countType, count, type: obj }) | ||
} | ||
} else { | ||
const k = Object.keys(obj).filter(k => !k.startsWith('!')) | ||
const len = k.length | ||
const first = k[0] | ||
// Try to inline switch/array inside an array if only 1 item inside | ||
// log('F', first, name, Object.keys(obj), first.startsWith('%array')) | ||
if (len == 1 && (first.startsWith('%array') || first.startsWith('%switch'))) { // remove container nested array | ||
if (name) { | ||
const a = { countType, count, type: [] } | ||
ctx.push({ name, type: ['array', a] }) | ||
trans(obj, a.type) | ||
// log('atn0-------',name,a.type) | ||
if (!a.type[0].name || a.type[0].name.startsWith('__')) a.type = a.type[0].type | ||
else a.type = ['container', [a.type[0]]] | ||
} else { | ||
const a = { countType, count, type: [] } | ||
ctx.push('array', a) | ||
trans(obj, a.type) | ||
// log('atn1',a.type) | ||
if (!a.type[0].name || a.type[0].name.startsWith('__')) a.type = a.type[0].type | ||
else a.type = ['container', [a.type[0]]] | ||
} | ||
} else { | ||
if (name) { | ||
const a = { countType, count, type: ['container', []] } | ||
ctx.push({ name, type: ['array', a] }) | ||
trans(obj, a.type[1]) | ||
} else { | ||
const a = { countType, count, type: ['container', []] } | ||
ctx.push('array', a) | ||
trans(obj, a.type[1]) | ||
} | ||
} | ||
} | ||
} | ||
function trans(obj, ctx) { | ||
ctx = ctx || [] | ||
for (const key in obj) { | ||
let val = obj[key] | ||
if (key.startsWith('!')) continue | ||
if (typeof val === 'object') { | ||
if (key.startsWith('%')) { | ||
const args = key.split(',') | ||
if (key.startsWith('%map')) { | ||
const mappings = {} | ||
const [, name, mappingType, valueType] = args | ||
if (!mappingType) continue | ||
val = val || json['%map,' + valueType + ','] | ||
for (const i in val) { | ||
if (i.startsWith('!')) continue | ||
const _i = i.startsWith('%') ? i.split(',')[1] : i | ||
mappings[_i] = val[i] // Ignore comments + encapsulated numbers | ||
} | ||
ctx.push({ | ||
name, | ||
type: [ | ||
'mapper', | ||
{ | ||
type: mappingType, | ||
mappings | ||
} | ||
] | ||
}) | ||
} else if (key.startsWith('%switch')) { | ||
let [, name, cmp] = args | ||
const as = {} | ||
let def = [] | ||
for (const _key in val) { | ||
const _val = val[_key] | ||
const _keyName = _key.startsWith('%') ? _key.split(',')[1] : _key | ||
if (_key.startsWith('%array')) { | ||
const [, name, type, countType] = _key.split(',') | ||
const tokens = name.replace('if ', '').split(' or ') | ||
for (var token of tokens) { | ||
token = token.trim() | ||
as[token] = typeof _val === 'string' ? _val : [] | ||
// if (typeof _val == 'object') trans(_val, as[token]) | ||
visitArray(_val || type, null, countType, undefined, as[token]) | ||
if (token == 'default') { | ||
def = as[token] | ||
delete as[token] | ||
} | ||
} | ||
} else if (_keyName.startsWith('if')) { | ||
const tokens = _keyName.replace('if ', '').split(' or ') | ||
for (var token of tokens) { | ||
token = token.trim() | ||
as[token] = typeof _val === 'string' ? _val : ['container', []] | ||
if (typeof _val === 'object') { | ||
if (_key.startsWith('%switch')) { | ||
trans({ [_key]: _val }, as[token][1]) | ||
as[token] = as[token][1][0].type | ||
} else { | ||
trans(_val, as[token][1]) | ||
} | ||
} else { | ||
if (_val.startsWith('[')) { | ||
as[token] = JSON.parse(_val) | ||
} | ||
} | ||
} | ||
} else if (_keyName.startsWith('default')) { | ||
def = [] | ||
if (typeof _val === 'object') { | ||
def = ['container', []] | ||
if (_key.startsWith('%switch')) { | ||
trans({ [_key]: _val }, def[1]) | ||
def = def[1][0].type | ||
} else { | ||
trans(_val, def[1]) | ||
} | ||
} else { | ||
def = _val | ||
if (_val.startsWith('[')) { | ||
def = JSON.parse(_val) | ||
} | ||
} | ||
} | ||
} | ||
let anon | ||
if (name.startsWith('__')) { name = undefined; anon = true } | ||
ctx.push({ | ||
name, | ||
anon, | ||
type: [ | ||
'switch', | ||
{ | ||
compareTo: cmp.replace('?', ''), | ||
fields: as, | ||
default: def.length ? def : 'void' | ||
} | ||
] | ||
}) | ||
} else if (key.startsWith('%array')) { | ||
const [, name, type, countType] = args | ||
log(val, typeof val, obj) | ||
if (type && val && typeof val === 'object') throw Error('Array has a type and body: ' + name) | ||
visitArray(val || type, name, countType, undefined, ctx) | ||
} else if (key.startsWith('%container')) { | ||
const [, cname] = args | ||
const name = cname.startsWith('__') ? undefined : cname | ||
let anon | ||
if (!name) anon = true | ||
ctx.push({ name, anon, type: ['container', []] }) | ||
trans(val, ctx[ctx.length - 1].type[1]) | ||
} | ||
} else { | ||
// log(ctx) | ||
// Probably JSON, leave as is | ||
} | ||
} else if (typeof val === 'string') { | ||
if (key.startsWith('!')) continue | ||
if (val.startsWith('[')) { | ||
val = JSON.parse(val) | ||
} | ||
ctx.push({ name: key, type: val }) | ||
} | ||
log(key, typeof val) | ||
} | ||
} | ||
trans(json, ctx) | ||
// add in json | ||
for (const key in json) { | ||
const val = json[key] | ||
if (typeof val === 'object' && !key.startsWith('%')) { | ||
log('pushing ext', { name: key, type: val }) | ||
ctx.push({ name: key, type: val }) | ||
} | ||
} | ||
// log('ctx', JSON.stringify(ctx, null, 2)) | ||
// fs.writeFileSync(outFile || 'compiled_proto.json', JSON.stringify(ctx, null, 2)) | ||
return ctx | ||
} | ||
function getName(_key) { | ||
if (_key.startsWith('%')) { | ||
return _key.split(',')[1] | ||
} | ||
return _key | ||
} | ||
function formFinal(inp, out) { | ||
const ret = {} | ||
for (const entry of inp) { | ||
ret[entry.name] = entry.type | ||
} | ||
// fs.writeFileSync('./compiled_proto.json', JSON.stringify(ret, null, 2)) | ||
return ret | ||
} | ||
function getIntermediate(input, includeComments, followImports = false) { | ||
return parseYAML(toYAML(input, followImports, includeComments)) | ||
} | ||
function compile(input, output) { | ||
const ret = formFinal(transform(parseYAML(toYAML(input)))) | ||
if (typeof output === 'string') fs.writeFileSync(output, JSON.stringify(ret, null, 2)) | ||
return ret | ||
} | ||
module.exports = { compile, parse: getIntermediate } |
@@ -0,0 +0,0 @@ ## 1.2 |
@@ -0,0 +0,0 @@ const { compile, parse } = require('./compiler') |
{ | ||
"name": "protodef-yaml", | ||
"version": "1.2.5", | ||
"version": "1.3.0", | ||
"description": "Transforms YAML-like syntax to ProtoDef JSON schema", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
@@ -0,0 +0,0 @@ const { parse, compile, genHTML } = require('../index') |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
No README
QualityPackage does not have a README. This may indicate a failed publish or a low quality package.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 1 instance in 1 package
777965
25
11083
1
1
195
10
1