codemirror-graphql
Advanced tools
Comparing version 0.2.2 to 0.3.0
145
hint.js
@@ -22,2 +22,10 @@ /** | ||
var _utilsForEachState = require('./utils/forEachState'); | ||
var _utilsForEachState2 = _interopRequireDefault(_utilsForEachState); | ||
var _utilsHintList = require('./utils/hintList'); | ||
var _utilsHintList2 = _interopRequireDefault(_utilsHintList); | ||
require('./mode'); | ||
@@ -62,3 +70,3 @@ | ||
if (kind === 'Document') { | ||
return hintList(editor, options, cur, token, [{ text: 'query' }, { text: 'mutation' }, { text: 'subscription' }, { text: 'fragment' }, { text: '{' }]); | ||
return _utilsHintList2['default'](editor, options, cur, token, [{ text: 'query' }, { text: 'mutation' }, { text: 'subscription' }, { text: 'fragment' }, { text: '{' }]); | ||
} | ||
@@ -84,3 +92,3 @@ | ||
} | ||
return hintList(editor, options, cur, token, fields.map(function (field) { | ||
return _utilsHintList2['default'](editor, options, cur, token, fields.map(function (field) { | ||
return { | ||
@@ -99,3 +107,3 @@ text: field.name, | ||
if (argDefs) { | ||
return hintList(editor, options, cur, token, argDefs.map(function (argDef) { | ||
return _utilsHintList2['default'](editor, options, cur, token, argDefs.map(function (argDef) { | ||
return { | ||
@@ -116,3 +124,3 @@ text: argDef.name, | ||
}); | ||
return hintList(editor, options, cur, token, objectFields.map(function (field) { | ||
return _utilsHintList2['default'](editor, options, cur, token, objectFields.map(function (field) { | ||
return { | ||
@@ -135,3 +143,3 @@ text: field.name, | ||
}); | ||
return hintList(editor, options, cur, token, values.map(function (value) { | ||
return _utilsHintList2['default'](editor, options, cur, token, values.map(function (value) { | ||
return { | ||
@@ -144,3 +152,3 @@ text: value.name, | ||
} else if (namedInputType === _graphql.GraphQLBoolean) { | ||
return hintList(editor, options, cur, token, [{ text: 'true', type: _graphql.GraphQLBoolean, description: 'Not false.' }, { text: 'false', type: _graphql.GraphQLBoolean, description: 'Not true.' }]); | ||
return _utilsHintList2['default'](editor, options, cur, token, [{ text: 'true', type: _graphql.GraphQLBoolean, description: 'Not false.' }, { text: 'false', type: _graphql.GraphQLBoolean, description: 'Not true.' }]); | ||
} | ||
@@ -162,3 +170,3 @@ } | ||
} | ||
return hintList(editor, options, cur, token, possibleTypes.map(function (type) { | ||
return _utilsHintList2['default'](editor, options, cur, token, possibleTypes.map(function (type) { | ||
return { | ||
@@ -191,3 +199,3 @@ text: type.name, | ||
return { | ||
v: hintList(editor, options, cur, token, relevantFrags.map(function (frag) { | ||
v: _utilsHintList2['default'](editor, options, cur, token, relevantFrags.map(function (frag) { | ||
return { | ||
@@ -211,3 +219,3 @@ text: frag.name.value, | ||
}).filter(_graphql.isInputType); | ||
return hintList(editor, options, cur, token, inputTypes.map(function (type) { | ||
return _utilsHintList2['default'](editor, options, cur, token, inputTypes.map(function (type) { | ||
return { | ||
@@ -225,3 +233,3 @@ text: type.name, | ||
}); | ||
return hintList(editor, options, cur, token, directives.map(function (directive) { | ||
return _utilsHintList2['default'](editor, options, cur, token, directives.map(function (directive) { | ||
return { | ||
@@ -249,3 +257,3 @@ text: directive.name, | ||
forEachState(tokenState, function (state) { | ||
_utilsForEachState2['default'](tokenState, function (state) { | ||
switch (state.kind) { | ||
@@ -316,3 +324,3 @@ case 'Query': | ||
forEachState(tokenState, function (state) { | ||
_utilsForEachState2['default'](tokenState, function (state) { | ||
switch (state.kind) { | ||
@@ -332,15 +340,2 @@ case 'Query': | ||
// Utility for iterating through a state stack bottom-up. | ||
function forEachState(stack, fn) { | ||
var reverseStateStack = []; | ||
var state = stack; | ||
while (state && state.kind) { | ||
reverseStateStack.push(state); | ||
state = state.prevState; | ||
} | ||
for (var i = reverseStateStack.length - 1; i >= 0; i--) { | ||
fn(reverseStateStack[i]); | ||
} | ||
} | ||
// Finds all fragment definition ASTs in a source. | ||
@@ -399,102 +394,2 @@ function getFragmentDefinitions(editor) { | ||
} | ||
} | ||
// Create the expected hint response given a possible list and a token | ||
function hintList(editor, options, cursor, token, list) { | ||
var hints = filterAndSortList(list, normalizeText(token.string)); | ||
if (!hints) { | ||
return; | ||
} | ||
var tokenStart = token.type !== null && /\w/.test(token.string[0]) ? token.start : token.end; | ||
var results = { | ||
list: hints, | ||
from: _codemirror2['default'].Pos(cursor.line, tokenStart), | ||
to: _codemirror2['default'].Pos(cursor.line, token.end) | ||
}; | ||
_codemirror2['default'].signal(editor, 'hasCompletion', editor, results, token); | ||
return results; | ||
} | ||
// Given a list of hint entries and currently typed text, sort and filter to | ||
// provide a concise list. | ||
function filterAndSortList(list, text) { | ||
var sorted = !text ? list : list.map(function (entry) { | ||
return { | ||
proximity: getProximity(normalizeText(entry.text), text), | ||
entry: entry | ||
}; | ||
}).filter(function (pair) { | ||
return pair.proximity <= 2; | ||
}).sort(function (a, b) { | ||
return a.proximity - b.proximity || a.entry.text.length - b.entry.text.length; | ||
}).map(function (pair) { | ||
return pair.entry; | ||
}); | ||
return sorted.length > 0 ? sorted : list; | ||
} | ||
function normalizeText(text) { | ||
return text.toLowerCase().replace(/\W/g, ''); | ||
} | ||
// Determine a numeric proximity for a suggestion based on current text. | ||
function getProximity(suggestion, text) { | ||
// start with lexical distance | ||
var proximity = lexicalDistance(text, suggestion); | ||
if (suggestion.length > text.length) { | ||
// do not penalize long suggestions. | ||
proximity -= suggestion.length - text.length - 1; | ||
// penalize suggestions not starting with this phrase | ||
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5; | ||
} | ||
return proximity; | ||
} | ||
/** | ||
* Computes the lexical distance between strings A and B. | ||
* | ||
* The "distance" between two strings is given by counting the minimum number | ||
* of edits needed to transform string A into string B. An edit can be an | ||
* insertion, deletion, or substitution of a single character, or a swap of two | ||
* adjacent characters. | ||
* | ||
* This distance can be useful for detecting typos in input or sorting | ||
* | ||
* @param {string} a | ||
* @param {string} b | ||
* @return {int} distance in number of edits | ||
*/ | ||
function lexicalDistance(a, b) { | ||
var i; | ||
var j; | ||
var d = []; | ||
var aLength = a.length; | ||
var bLength = b.length; | ||
for (i = 0; i <= aLength; i++) { | ||
d[i] = [i]; | ||
} | ||
for (j = 1; j <= bLength; j++) { | ||
d[0][j] = j; | ||
} | ||
for (i = 1; i <= aLength; i++) { | ||
for (j = 1; j <= bLength; j++) { | ||
var cost = a[i - 1] === b[j - 1] ? 0 : 1; | ||
d[i][j] = Math.min(d[i - 1][j] + 1, d[i][j - 1] + 1, d[i - 1][j - 1] + cost); | ||
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) { | ||
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost); | ||
} | ||
} | ||
} | ||
return d[aLength][bLength]; | ||
} |
356
mode.js
@@ -18,2 +18,6 @@ /** | ||
var _utilsOnlineParser = require('./utils/onlineParser'); | ||
var _utilsOnlineParser2 = _interopRequireDefault(_utilsOnlineParser); | ||
/** | ||
@@ -40,11 +44,15 @@ * The GraphQL mode is defined as a tokenizer along with a list of rules, each | ||
_codemirror2['default'].defineMode('graphql', function (config) { | ||
var parser = _utilsOnlineParser2['default']({ | ||
eatWhitespace: function eatWhitespace(stream) { | ||
return stream.eatWhile(isIgnored); | ||
}, | ||
LexRules: LexRules, | ||
ParseRules: ParseRules | ||
}); | ||
return { | ||
config: config, | ||
token: getToken, | ||
startState: parser.startState, | ||
token: parser.getToken, | ||
indent: indent, | ||
startState: function startState() { | ||
var initialState = { level: 0 }; | ||
pushRule(initialState, 'Document'); | ||
return initialState; | ||
}, | ||
electricInput: /^\s*[})\]]/, | ||
@@ -60,94 +68,6 @@ fold: 'brace', | ||
function getToken(stream, state) { | ||
if (state.needsAdvance) { | ||
state.needsAdvance = false; | ||
advanceRule(state); | ||
} | ||
var isIgnored = function isIgnored(ch) { | ||
return ch === ' ' || ch === '\t' || ch === ',' || ch === '\n' || ch === '\r' || ch === ''; | ||
}; | ||
// Remember initial indentation | ||
if (stream.sol()) { | ||
state.indentLevel = Math.floor(stream.indentation() / this.config.tabSize); | ||
} | ||
// Consume spaces and ignored characters | ||
if (stream.eatSpace() || stream.eatWhile(',')) { | ||
return null; | ||
} | ||
// Tokenize line comment | ||
if (stream.match(this.lineComment)) { | ||
stream.skipToEnd(); | ||
return 'comment'; | ||
} | ||
// Lex a token from the stream | ||
var token = lex(stream); | ||
// If there's no matching token, skip ahead. | ||
if (!token) { | ||
stream.match(/\w+|./); | ||
return 'invalidchar'; | ||
} | ||
// Save state before continuing. | ||
saveState(state); | ||
// Handle changes in expected indentation level | ||
if (token.kind === 'Punctuation') { | ||
if (/^[{([]/.test(token.value)) { | ||
// Push on the stack of levels one level deeper than the current level. | ||
state.levels = (state.levels || []).concat(state.indentLevel + 1); | ||
} else if (/^[})\]]/.test(token.value)) { | ||
// Pop from the stack of levels. | ||
// If the top of the stack is lower than the current level, lower the | ||
// current level to match. | ||
var levels = state.levels = (state.levels || []).slice(0, -1); | ||
if (levels.length > 0 && levels[levels.length - 1] < state.indentLevel) { | ||
state.indentLevel = levels[levels.length - 1]; | ||
} | ||
} | ||
} | ||
while (state.rule) { | ||
// If this is a forking rule, determine what rule to use based on | ||
// the current token, otherwise expect based on the current step. | ||
var expected = typeof state.rule === 'function' ? state.step === 0 ? state.rule(token, stream) : null : state.rule[state.step]; | ||
if (expected) { | ||
// Un-wrap optional/list ParseRules. | ||
if (expected.ofRule) { | ||
expected = expected.ofRule; | ||
} | ||
// A string represents a Rule | ||
if (typeof expected === 'string') { | ||
pushRule(state, expected); | ||
continue; | ||
} | ||
// Otherwise, match a Terminal. | ||
if (expected.match && expected.match(token)) { | ||
if (expected.update) { | ||
expected.update(state, token); | ||
} | ||
// If this token was a punctuator, advance the parse rule, otherwise | ||
// mark the state to be advanced before the next token. This ensures | ||
// that tokens which can be appended to keep the appropriate state. | ||
if (token.kind === 'Punctuation') { | ||
advanceRule(state); | ||
} else { | ||
state.needsAdvance = true; | ||
} | ||
return expected.style; | ||
} | ||
} | ||
unsuccessful(state); | ||
} | ||
// The parser does not know how to interpret this token, do not affect state. | ||
restoreState(state); | ||
return 'invalidchar'; | ||
} | ||
function indent(state, textAfter) { | ||
@@ -161,156 +81,2 @@ var levels = state.levels; | ||
function assign(to, from) { | ||
var keys = Object.keys(from); | ||
for (var i = 0; i < keys.length; i++) { | ||
to[keys[i]] = from[keys[i]]; | ||
} | ||
return to; | ||
} | ||
var stateCache = {}; | ||
// Save the current state in the cache. | ||
function saveState(state) { | ||
assign(stateCache, state); | ||
} | ||
// Restore from the state cache. | ||
function restoreState(state) { | ||
assign(state, stateCache); | ||
} | ||
// Push a new rule onto the state. | ||
function pushRule(state, ruleKind) { | ||
state.prevState = assign({}, state); | ||
state.kind = ruleKind; | ||
state.name = null; | ||
state.type = null; | ||
state.rule = ParseRules[ruleKind]; | ||
state.step = 0; | ||
} | ||
// Pop the current rule from the state. | ||
function popRule(state) { | ||
state.kind = state.prevState.kind; | ||
state.name = state.prevState.name; | ||
state.type = state.prevState.type; | ||
state.rule = state.prevState.rule; | ||
state.step = state.prevState.step; | ||
state.prevState = state.prevState.prevState; | ||
} | ||
// Advance the step of the current rule. | ||
function advanceRule(state) { | ||
// Advance the step in the rule. If the rule is completed, pop | ||
// the rule and advance the parent rule as well (recursively). | ||
state.step++; | ||
while (state.rule && !(Array.isArray(state.rule) && state.step < state.rule.length)) { | ||
popRule(state); | ||
// Do not advance a List step so it has the opportunity to repeat itself. | ||
if (state.rule && !(Array.isArray(state.rule) && state.rule[state.step].isList)) { | ||
state.step++; | ||
} | ||
} | ||
} | ||
// Unwind the state after an unsuccessful match. | ||
function unsuccessful(state) { | ||
// Fall back to the parent rule until you get to an optional or list rule or | ||
// until the entire stack of rules is empty. | ||
while (state.rule && !(Array.isArray(state.rule) && state.rule[state.step].ofRule)) { | ||
popRule(state); | ||
} | ||
// If there is still a rule, it must be an optional or list rule. | ||
// Consider this rule a success so that we may move past it. | ||
if (state.rule) { | ||
advanceRule(state); | ||
} | ||
} | ||
// Given a stream, returns a { kind, value } pair, or null. | ||
function lex(stream) { | ||
var kinds = Object.keys(LexRules); | ||
for (var i = 0; i < kinds.length; i++) { | ||
var match = stream.match(LexRules[kinds[i]]); | ||
if (match) { | ||
return { kind: kinds[i], value: match[0] }; | ||
} | ||
} | ||
} | ||
// An constraint described as `but not` in the GraphQL spec. | ||
function butNot(rule, exclusions) { | ||
var ruleMatch = rule.match; | ||
rule.match = function (token) { | ||
return ruleMatch(token) && exclusions.every(function (exclusion) { | ||
return !exclusion.match(token); | ||
}); | ||
}; | ||
return rule; | ||
} | ||
// An optional rule. | ||
function opt(ofRule) { | ||
return { ofRule: ofRule }; | ||
} | ||
// A list of another rule. | ||
function list(ofRule) { | ||
return { ofRule: ofRule, isList: true }; | ||
} | ||
// Token of a kind | ||
function t(kind, style) { | ||
return { style: style, match: function match(token) { | ||
return token.kind === kind; | ||
} }; | ||
} | ||
// Punctuator | ||
function p(value, style) { | ||
return { | ||
style: style || 'punctuation', | ||
match: function match(token) { | ||
return token.kind === 'Punctuation' && token.value === value; | ||
} | ||
}; | ||
} | ||
// A keyword Token | ||
function word(value) { | ||
return { | ||
style: 'keyword', | ||
match: function match(token) { | ||
return token.kind === 'Name' && token.value === value; | ||
} | ||
}; | ||
} | ||
// A Name Token which will decorate the state with a `name` | ||
function name(style) { | ||
return { | ||
style: style, | ||
match: function match(token) { | ||
return token.kind === 'Name'; | ||
}, | ||
update: function update(state, token) { | ||
state.name = token.value; | ||
} | ||
}; | ||
} | ||
// A Name Token which will decorate the previous state with a `type` | ||
function type(style) { | ||
return { | ||
style: style, | ||
match: function match(token) { | ||
return token.kind === 'Name'; | ||
}, | ||
update: function update(state, token) { | ||
state.prevState.type = token.value; | ||
} | ||
}; | ||
} | ||
/** | ||
@@ -339,3 +105,3 @@ * The lexer rules. These are exactly as described by the spec. | ||
var ParseRules = { | ||
Document: [list('Definition')], | ||
Document: [_utilsOnlineParser.list('Definition')], | ||
Definition: function Definition(token) { | ||
@@ -356,11 +122,11 @@ switch (token.value) { | ||
// Note: instead of "Operation", these rules have been separated out. | ||
Query: [word('query'), opt(name('def')), opt('VariableDefinitions'), list('Directive'), 'SelectionSet'], | ||
Query: [word('query'), _utilsOnlineParser.opt(name('def')), _utilsOnlineParser.opt('VariableDefinitions'), _utilsOnlineParser.list('Directive'), 'SelectionSet'], | ||
ShortQuery: ['SelectionSet'], | ||
Mutation: [word('mutation'), opt(name('def')), opt('VariableDefinitions'), list('Directive'), 'SelectionSet'], | ||
Subscription: [word('subscription'), opt(name('def')), opt('VariableDefinitions'), list('Directive'), 'SelectionSet'], | ||
VariableDefinitions: [p('('), list('VariableDefinition'), p(')')], | ||
VariableDefinition: ['Variable', p(':'), 'Type', opt('DefaultValue')], | ||
Variable: [p('$', 'variable'), name('variable')], | ||
DefaultValue: [p('='), 'Value'], | ||
SelectionSet: [p('{'), list('Selection'), p('}')], | ||
Mutation: [word('mutation'), _utilsOnlineParser.opt(name('def')), _utilsOnlineParser.opt('VariableDefinitions'), _utilsOnlineParser.list('Directive'), 'SelectionSet'], | ||
Subscription: [word('subscription'), _utilsOnlineParser.opt(name('def')), _utilsOnlineParser.opt('VariableDefinitions'), _utilsOnlineParser.list('Directive'), 'SelectionSet'], | ||
VariableDefinitions: [_utilsOnlineParser.p('('), _utilsOnlineParser.list('VariableDefinition'), _utilsOnlineParser.p(')')], | ||
VariableDefinition: ['Variable', _utilsOnlineParser.p(':'), 'Type', _utilsOnlineParser.opt('DefaultValue')], | ||
Variable: [_utilsOnlineParser.p('$', 'variable'), name('variable')], | ||
DefaultValue: [_utilsOnlineParser.p('='), 'Value'], | ||
SelectionSet: [_utilsOnlineParser.p('{'), _utilsOnlineParser.list('Selection'), _utilsOnlineParser.p('}')], | ||
Selection: function Selection(token, stream) { | ||
@@ -370,9 +136,9 @@ return token.value === '...' ? stream.match(/[\s\u00a0,]*(on\b|@|{)/, false) ? 'InlineFragment' : 'FragmentSpread' : stream.match(/[\s\u00a0,]*:/, false) ? 'AliasedField' : 'Field'; | ||
// Note: this minor deviation of "AliasedField" simplifies the lookahead. | ||
AliasedField: [name('qualifier'), p(':'), 'Field'], | ||
Field: [name('property'), opt('Arguments'), list('Directive'), opt('SelectionSet')], | ||
Arguments: [p('('), list('Argument'), p(')')], | ||
Argument: [name('attribute'), p(':'), 'Value'], | ||
FragmentSpread: [p('...'), name('def'), list('Directive')], | ||
InlineFragment: [p('...'), opt('TypeCondition'), list('Directive'), 'SelectionSet'], | ||
FragmentDefinition: [word('fragment'), opt(butNot(name('def'), [word('on')])), 'TypeCondition', list('Directive'), 'SelectionSet'], | ||
AliasedField: [name('qualifier'), _utilsOnlineParser.p(':'), 'Field'], | ||
Field: [name('property'), _utilsOnlineParser.opt('Arguments'), _utilsOnlineParser.list('Directive'), _utilsOnlineParser.opt('SelectionSet')], | ||
Arguments: [_utilsOnlineParser.p('('), _utilsOnlineParser.list('Argument'), _utilsOnlineParser.p(')')], | ||
Argument: [name('attribute'), _utilsOnlineParser.p(':'), 'Value'], | ||
FragmentSpread: [_utilsOnlineParser.p('...'), name('def'), _utilsOnlineParser.list('Directive')], | ||
InlineFragment: [_utilsOnlineParser.p('...'), _utilsOnlineParser.opt('TypeCondition'), _utilsOnlineParser.list('Directive'), 'SelectionSet'], | ||
FragmentDefinition: [word('fragment'), _utilsOnlineParser.opt(_utilsOnlineParser.butNot(name('def'), [word('on')])), 'TypeCondition', _utilsOnlineParser.list('Directive'), 'SelectionSet'], | ||
TypeCondition: [word('on'), type('atom')], | ||
@@ -404,9 +170,9 @@ // Variables could be parsed in cases where only Const is expected by spec. | ||
}, | ||
NumberValue: [t('Number', 'number')], | ||
StringValue: [t('String', 'string')], | ||
BooleanValue: [t('Name', 'builtin')], | ||
NumberValue: [_utilsOnlineParser.t('Number', 'number')], | ||
StringValue: [_utilsOnlineParser.t('String', 'string')], | ||
BooleanValue: [_utilsOnlineParser.t('Name', 'builtin')], | ||
EnumValue: [name('string-2')], | ||
ListValue: [p('['), list('Value'), p(']')], | ||
ObjectValue: [p('{'), list('ObjectField'), p('}')], | ||
ObjectField: [name('attribute'), p(':'), 'Value'], | ||
ListValue: [_utilsOnlineParser.p('['), _utilsOnlineParser.list('Value'), _utilsOnlineParser.p(']')], | ||
ObjectValue: [_utilsOnlineParser.p('{'), _utilsOnlineParser.list('ObjectField'), _utilsOnlineParser.p('}')], | ||
ObjectField: [name('attribute'), _utilsOnlineParser.p(':'), 'Value'], | ||
Type: function Type(token) { | ||
@@ -416,5 +182,41 @@ return token.value === '[' ? 'ListType' : 'NamedType'; | ||
// NonNullType has been merged into ListType and NamedType to simplify. | ||
ListType: [p('['), 'NamedType', p(']'), opt(p('!'))], | ||
NamedType: [name('atom'), opt(p('!'))], | ||
Directive: [p('@', 'meta'), name('meta'), opt('Arguments')] | ||
}; | ||
ListType: [_utilsOnlineParser.p('['), 'NamedType', _utilsOnlineParser.p(']'), _utilsOnlineParser.opt(_utilsOnlineParser.p('!'))], | ||
NamedType: [name('atom'), _utilsOnlineParser.opt(_utilsOnlineParser.p('!'))], | ||
Directive: [_utilsOnlineParser.p('@', 'meta'), name('meta'), _utilsOnlineParser.opt('Arguments')] | ||
}; | ||
// A keyword Token. | ||
function word(value) { | ||
return { | ||
style: 'keyword', | ||
match: function match(token) { | ||
return token.kind === 'Name' && token.value === value; | ||
} | ||
}; | ||
} | ||
// A Name Token which will decorate the state with a `name`. | ||
function name(style) { | ||
return { | ||
style: style, | ||
match: function match(token) { | ||
return token.kind === 'Name'; | ||
}, | ||
update: function update(state, token) { | ||
state.name = token.value; | ||
} | ||
}; | ||
} | ||
// A Name Token which will decorate the previous state with a `type`. | ||
function type(style) { | ||
return { | ||
style: style, | ||
match: function match(token) { | ||
return token.kind === 'Name'; | ||
}, | ||
update: function update(state, token) { | ||
state.prevState.type = token.value; | ||
} | ||
}; | ||
} |
{ | ||
"name": "codemirror-graphql", | ||
"version": "0.2.2", | ||
"version": "0.3.0", | ||
"description": "GraphQL mode and helpers for CodeMirror.", | ||
@@ -22,2 +22,4 @@ "contributors": [ | ||
"lint.js", | ||
"utils", | ||
"variables", | ||
"README.md", | ||
@@ -37,3 +39,3 @@ "LICENSE" | ||
"options": { | ||
"mocha": "--full-trace --require resources/mocha-bootload src/**/*-test.js" | ||
"mocha": "--full-trace --require resources/mocha-bootload src/**/__tests__/**/*-test.js" | ||
}, | ||
@@ -40,0 +42,0 @@ "scripts": { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Unidentified License
License(Experimental) Something that seems like a license was found, but its contents could not be matched with a known license.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Unidentified License
License(Experimental) Something that seems like a license was found, but its contents could not be matched with a known license.
Found 1 instance in 1 package
63892
14
1707
1