New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@citation-js/plugin-bibtex

Package Overview
Dependencies
Maintainers
1
Versions
54
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@citation-js/plugin-bibtex - npm Package Compare versions

Comparing version 0.5.0-alpha.2 to 0.5.0-alpha.3

lib-mjs/input/constants.js

382

lib-mjs/input/text.js

@@ -0,116 +1,316 @@

function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(source, true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(source).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
import { util } from '@citation-js/core';
import varBibTeXTokens from './tokens.json';
const tokenPattern = /\\url|\\href|{\\[a-zA-Z]+}|\$\\[a-zA-Z]+\$|\$[_^]{[0-9()+=\-n]}\$|`{2,3}|'{2,3}|-{2,3}|[!?]!|!\?|{\\~}|\\[#$%&~_^\\{}]|{\\(?:[a-z] |[`"'^~=.])\\?[a-zA-Z]}|[\s\S]/g;
const whitespace = /^\s$/;
const syntax = /^[@{}"=,\\]$/;
import moo from 'moo';
import * as constants from './constants';
const identifier = /[a-zA-Z][a-zA-Z0-9_-]*/;
const whitespace = {
comment: /%.*/,
whitespace: {
match: /\s+/,
lineBreaks: true
}
};
const text = {
command: /\\(?:[a-z]+|.) */,
lbrace: {
match: '{',
push: 'bracedLiteral'
},
mathShift: {
match: '$',
push: 'mathLiteral'
},
whitespace: {
match: /\s+/,
lineBreaks: true
}
};
const lexer = moo.states({
main: {
junk: {
match: /@[cC][oO][mM][mM][eE][nN][tT].+|[^@]+/,
lineBreaks: true
},
at: {
match: '@',
push: 'entry'
}
},
entry: _objectSpread({}, whitespace, {
otherEntryType: {
match: /[sS][tT][rR][iI][nN][gG]|[pP][rR][eE][aA][mM][bB][lL][eE]/,
next: 'otherEntryContents'
},
dataEntryType: {
match: identifier,
next: 'dataEntryContents'
}
}),
otherEntryContents: _objectSpread({}, whitespace, {
lbrace: {
match: /[{(]/,
next: 'fields'
}
}),
dataEntryContents: _objectSpread({}, whitespace, {
lbrace: {
match: /[{(]/,
next: 'dataEntryContents'
},
label: /[^,\s]+/,
comma: {
match: ',',
next: 'fields'
}
}),
fields: _objectSpread({}, whitespace, {
identifier,
number: /-?\d+/,
hash: '#',
equals: '=',
comma: ',',
quote: {
match: '"',
push: 'quotedLiteral'
},
lbrace: {
match: '{',
push: 'bracedLiteral'
},
rbrace: {
match: /[})]/,
pop: true
}
}),
quotedLiteral: _objectSpread({}, text, {
quote: {
match: '"',
pop: true
},
text: /[^{$"\s\\]+/
}),
bracedLiteral: _objectSpread({}, text, {
rbrace: {
match: '}',
pop: true
},
text: /[^{$}\s\\]+/
}),
mathLiteral: _objectSpread({}, text, {
mathShift: {
match: '$',
pop: true
},
script: /[\^_]/,
text: /[^{$}\s\\^_]+/
})
});
const delimiters = {
'"': '"',
'{': '}',
'': ''
'(': ')',
'{': '}'
};
export const bibtexGrammar = new util.Grammar({
Main() {
let entries = [];
const getTokenizedBibtex = function (str) {
str = str.replace(/(\\[`"'^~=.]){\\?([A-Za-z])}/g, '{$1$2}').replace(/(\\[a-z]) ?{\\?([A-Za-z])}/g, '{$1 $2}');
return str.match(tokenPattern);
};
while (true) {
while (this.matchToken('junk')) {
this.consumeToken('junk');
}
const parseBibTeX = function (str) {
const entries = [];
const tokens = getTokenizedBibtex(str);
const stack = new util.TokenStack(tokens);
stack.consumeWhitespace();
if (this.matchEndOfFile()) {
break;
}
while (stack.tokensLeft()) {
stack.consumeToken('@', {
spaced: false
});
stack.consumeWhitespace();
const type = stack.consume([whitespace, syntax], {
inverse: true
}).toLowerCase();
stack.consumeToken('{');
const label = stack.consume([whitespace, syntax], {
inverse: true
});
stack.consumeToken(',');
const properties = {};
entries.push(this.consumeRule('Entry'));
}
while (stack.tokensLeft()) {
const key = stack.consume([whitespace, '='], {
inverse: true
}).toLowerCase();
stack.consumeToken('=');
const startDelimiter = stack.consume(/^({|"|)$/g);
const endDelimiter = delimiters[startDelimiter];
return entries.filter(Boolean);
},
if (!delimiters.hasOwnProperty(startDelimiter)) {
throw new SyntaxError(`Unexpected field delimiter at index ${stack.index}. Expected ` + `${Object.keys(delimiters).map(v => `"${v}"`).join(', ')}; got "${startDelimiter}"`);
}
_() {
let oldToken;
const tokenMap = token => {
if (varBibTeXTokens.hasOwnProperty(token)) {
return varBibTeXTokens[token];
} else if (token.match(/^\\[#$%&~_^\\{}]$/)) {
return token.slice(1);
} else if (token.length > 1) {
throw new SyntaxError(`Escape sequence not recognized: ${token}`);
} else {
return token;
}
while (oldToken !== this.token) {
oldToken = this.token;
this.consumeToken('whitespace', true);
this.consumeToken('comment', true);
}
},
Entry() {
this.consumeToken('at');
this.consumeRule('_');
const type = (this.matchToken('otherEntryType') ? this.consumeToken('otherEntryType') : this.consumeToken('dataEntryType')).value.toLowerCase();
this.consumeRule('_');
const openBrace = this.consumeToken('lbrace').value;
this.consumeRule('_');
let result;
if (type === 'string') {
const [key, value] = this.consumeRule('Field');
this.state.strings[key] = value;
} else if (type === 'preamble') {
this.consumeRule('Expression');
} else {
const label = this.consumeToken('label').value;
this.consumeRule('_');
this.consumeToken('comma');
this.consumeRule('_');
const properties = this.consumeRule('EntryBody');
result = {
type,
label,
properties
};
}
let openBrackets = 0;
const val = stack.consume((token, index) => {
if (token === '{') {
openBrackets++;
}
this.consumeRule('_');
const closeBrace = this.consumeToken('rbrace');
if (stack.tokensLeft() < endDelimiter.length) {
throw new SyntaxError(`Unmatched delimiter at index ${stack.index}: Expected ${endDelimiter}`);
} else if (!endDelimiter.length) {
return ![whitespace, syntax].some(rgx => rgx.test(token));
} else {
return token === '}' && openBrackets-- || !stack.matchesSequence(endDelimiter);
}
}, {
tokenMap
});
properties[key] = val.replace(/[ \t\n]+/g, ' ');
stack.consumeN(endDelimiter.length);
stack.consumeWhitespace();
if (closeBrace !== delimiters[openBrace]) {}
if (stack.matches('}')) {
return result;
},
EntryBody() {
let properties = {};
while (this.matchToken('identifier')) {
let [field, value] = this.consumeRule('Field');
properties[field] = value;
this.consumeRule('_');
if (this.consumeToken('comma', true)) {
this.consumeRule('_');
} else {
break;
}
}
stack.consumeToken(',', {
spaced: false
});
stack.consumeWhitespace();
return properties;
},
if (stack.matches('}')) {
break;
Field() {
const field = this.consumeToken('identifier');
this.consumeRule('_');
this.consumeToken('equals');
this.consumeRule('_');
const value = this.consumeRule('Expression');
return [field, value];
},
Expression() {
let output = this.consumeRule('ExpressionPart');
this.consumeRule('_');
while (this.matchToken('hash')) {
this.consumeToken('hash');
this.consumeRule('_');
output += this.consumeRule('ExpressionPart').toString();
this.consumeRule('_');
}
return output;
},
ExpressionPart() {
if (this.matchToken('identifier')) {
return this.state.strings[this.consumeToken('identifier').value] || '';
} else if (this.matchToken('number')) {
return this.consumeToken('number').value;
} else if (this.matchToken('quote')) {
return this.consumeRule('QuoteString');
} else if (this.matchToken('lbrace')) {
return this.consumeRule('BracketString');
}
},
QuoteString() {
let output = '';
this.consumeToken('quote');
while (!this.matchToken('quote')) {
output += this.consumeRule('Text');
}
this.consumeToken('quote');
return output;
},
BracketString() {
let output = '';
this.consumeToken('lbrace');
while (!this.matchToken('rbrace')) {
output += this.consumeRule('Text');
}
this.consumeToken('rbrace');
return output;
},
MathString() {
let output = '';
this.consumeToken('mathShift');
while (!this.matchToken('mathShift')) {
if (this.matchToken('script')) {
const script = this.consumeToken('script').value;
const text = this.consumeRule('Text').replace(/^{|}$/g, '');
output += constants.mathScripts[script][text[0]] + text.slice(1);
} else {
output += this.consumeRule('Text');
}
}
stack.consumeToken('}', {
spaced: false
});
stack.consumeWhitespace();
this.consumeToken('mathShift');
return output;
},
if (stack.matches(',')) {
stack.consumeToken(',');
stack.consumeWhitespace();
Text() {
if (this.matchToken('lbrace')) {
return `{${this.consumeRule('BracketString')}}`;
} else if (this.matchToken('mathShift')) {
return this.consumeRule('MathString');
} else if (this.matchToken('whitespace')) {
this.consumeToken('whitespace');
return ' ';
} else if (this.matchToken('command')) {
return this.consumeRule('Command');
} else {
return this.consumeToken('text').value.replace(constants.ligaturePattern, ligature => constants.ligatures[ligature]);
}
},
entries.push({
type,
label,
properties
});
Command() {
const command = this.consumeToken('command').value.slice(1).trim();
if (command in constants.commands) {
return constants.commands[command];
} else if (command in constants.diacritics && !this.matchEndOfFile()) {
if (this.matchToken('text')) {
const text = this.consumeToken('text').value;
return text[0] + constants.diacritics[command] + text.slice(1);
} else {
return this.consumeRule('Text').replace(/^{|}$/g, '') + constants.diacritics[command];
}
} else if (/^\W$/.test(command)) {
return command;
} else {
return '\\' + command;
}
}
return entries;
};
export { parseBibTeX as parse, parseBibTeX as default };
}, {
strings: Object.assign({}, constants.defaultStrings)
});
export function parse(text) {
return bibtexGrammar.parse(lexer.reset(text));
}
export default parse;

@@ -6,123 +6,335 @@ "use strict";

});
exports.default = exports.parse = void 0;
exports.parse = parse;
exports.default = exports.bibtexGrammar = void 0;
var _core = require("@citation-js/core");
var _tokens = _interopRequireDefault(require("./tokens.json"));
var _moo = _interopRequireDefault(require("moo"));
var constants = _interopRequireWildcard(require("./constants"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; if (obj != null) { var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const tokenPattern = /\\url|\\href|{\\[a-zA-Z]+}|\$\\[a-zA-Z]+\$|\$[_^]{[0-9()+=\-n]}\$|`{2,3}|'{2,3}|-{2,3}|[!?]!|!\?|{\\~}|\\[#$%&~_^\\{}]|{\\(?:[a-z] |[`"'^~=.])\\?[a-zA-Z]}|[\s\S]/g;
const whitespace = /^\s$/;
const syntax = /^[@{}"=,\\]$/;
const delimiters = {
'"': '"',
'{': '}',
'': ''
};
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
const getTokenizedBibtex = function (str) {
str = str.replace(/(\\[`"'^~=.]){\\?([A-Za-z])}/g, '{$1$2}').replace(/(\\[a-z]) ?{\\?([A-Za-z])}/g, '{$1 $2}');
return str.match(tokenPattern);
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(source, true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(source).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
const identifier = /[a-zA-Z][a-zA-Z0-9_-]*/;
const whitespace = {
comment: /%.*/,
whitespace: {
match: /\s+/,
lineBreaks: true
}
};
const text = {
command: /\\(?:[a-z]+|.) */,
lbrace: {
match: '{',
push: 'bracedLiteral'
},
mathShift: {
match: '$',
push: 'mathLiteral'
},
whitespace: {
match: /\s+/,
lineBreaks: true
}
};
const parseBibTeX = function (str) {
const entries = [];
const tokens = getTokenizedBibtex(str);
const stack = new _core.util.TokenStack(tokens);
stack.consumeWhitespace();
const lexer = _moo.default.states({
main: {
junk: {
match: /@[cC][oO][mM][mM][eE][nN][tT].+|[^@]+/,
lineBreaks: true
},
at: {
match: '@',
push: 'entry'
}
},
entry: _objectSpread({}, whitespace, {
otherEntryType: {
match: /[sS][tT][rR][iI][nN][gG]|[pP][rR][eE][aA][mM][bB][lL][eE]/,
next: 'otherEntryContents'
},
dataEntryType: {
match: identifier,
next: 'dataEntryContents'
}
}),
otherEntryContents: _objectSpread({}, whitespace, {
lbrace: {
match: /[{(]/,
next: 'fields'
}
}),
dataEntryContents: _objectSpread({}, whitespace, {
lbrace: {
match: /[{(]/,
next: 'dataEntryContents'
},
label: /[^,\s]+/,
comma: {
match: ',',
next: 'fields'
}
}),
fields: _objectSpread({}, whitespace, {
identifier,
number: /-?\d+/,
hash: '#',
equals: '=',
comma: ',',
quote: {
match: '"',
push: 'quotedLiteral'
},
lbrace: {
match: '{',
push: 'bracedLiteral'
},
rbrace: {
match: /[})]/,
pop: true
}
}),
quotedLiteral: _objectSpread({}, text, {
quote: {
match: '"',
pop: true
},
text: /[^{$"\s\\]+/
}),
bracedLiteral: _objectSpread({}, text, {
rbrace: {
match: '}',
pop: true
},
text: /[^{$}\s\\]+/
}),
mathLiteral: _objectSpread({}, text, {
mathShift: {
match: '$',
pop: true
},
script: /[\^_]/,
text: /[^{$}\s\\^_]+/
})
});
while (stack.tokensLeft()) {
stack.consumeToken('@', {
spaced: false
});
stack.consumeWhitespace();
const type = stack.consume([whitespace, syntax], {
inverse: true
}).toLowerCase();
stack.consumeToken('{');
const label = stack.consume([whitespace, syntax], {
inverse: true
});
stack.consumeToken(',');
const properties = {};
const delimiters = {
'(': ')',
'{': '}'
};
const bibtexGrammar = new _core.util.Grammar({
Main() {
let entries = [];
while (stack.tokensLeft()) {
const key = stack.consume([whitespace, '='], {
inverse: true
}).toLowerCase();
stack.consumeToken('=');
const startDelimiter = stack.consume(/^({|"|)$/g);
const endDelimiter = delimiters[startDelimiter];
while (true) {
while (this.matchToken('junk')) {
this.consumeToken('junk');
}
if (!delimiters.hasOwnProperty(startDelimiter)) {
throw new SyntaxError(`Unexpected field delimiter at index ${stack.index}. Expected ` + `${Object.keys(delimiters).map(v => `"${v}"`).join(', ')}; got "${startDelimiter}"`);
if (this.matchEndOfFile()) {
break;
}
const tokenMap = token => {
if (_tokens.default.hasOwnProperty(token)) {
return _tokens.default[token];
} else if (token.match(/^\\[#$%&~_^\\{}]$/)) {
return token.slice(1);
} else if (token.length > 1) {
throw new SyntaxError(`Escape sequence not recognized: ${token}`);
} else {
return token;
}
entries.push(this.consumeRule('Entry'));
}
return entries.filter(Boolean);
},
_() {
let oldToken;
while (oldToken !== this.token) {
oldToken = this.token;
this.consumeToken('whitespace', true);
this.consumeToken('comment', true);
}
},
Entry() {
this.consumeToken('at');
this.consumeRule('_');
const type = (this.matchToken('otherEntryType') ? this.consumeToken('otherEntryType') : this.consumeToken('dataEntryType')).value.toLowerCase();
this.consumeRule('_');
const openBrace = this.consumeToken('lbrace').value;
this.consumeRule('_');
let result;
if (type === 'string') {
const [key, value] = this.consumeRule('Field');
this.state.strings[key] = value;
} else if (type === 'preamble') {
this.consumeRule('Expression');
} else {
const label = this.consumeToken('label').value;
this.consumeRule('_');
this.consumeToken('comma');
this.consumeRule('_');
const properties = this.consumeRule('EntryBody');
result = {
type,
label,
properties
};
}
let openBrackets = 0;
const val = stack.consume((token, index) => {
if (token === '{') {
openBrackets++;
}
this.consumeRule('_');
const closeBrace = this.consumeToken('rbrace');
if (stack.tokensLeft() < endDelimiter.length) {
throw new SyntaxError(`Unmatched delimiter at index ${stack.index}: Expected ${endDelimiter}`);
} else if (!endDelimiter.length) {
return ![whitespace, syntax].some(rgx => rgx.test(token));
} else {
return token === '}' && openBrackets-- || !stack.matchesSequence(endDelimiter);
}
}, {
tokenMap
});
properties[key] = val.replace(/[ \t\n]+/g, ' ');
stack.consumeN(endDelimiter.length);
stack.consumeWhitespace();
if (closeBrace !== delimiters[openBrace]) {}
if (stack.matches('}')) {
return result;
},
EntryBody() {
let properties = {};
while (this.matchToken('identifier')) {
let [field, value] = this.consumeRule('Field');
properties[field] = value;
this.consumeRule('_');
if (this.consumeToken('comma', true)) {
this.consumeRule('_');
} else {
break;
}
}
stack.consumeToken(',', {
spaced: false
});
stack.consumeWhitespace();
return properties;
},
if (stack.matches('}')) {
break;
Field() {
const field = this.consumeToken('identifier');
this.consumeRule('_');
this.consumeToken('equals');
this.consumeRule('_');
const value = this.consumeRule('Expression');
return [field, value];
},
Expression() {
let output = this.consumeRule('ExpressionPart');
this.consumeRule('_');
while (this.matchToken('hash')) {
this.consumeToken('hash');
this.consumeRule('_');
output += this.consumeRule('ExpressionPart').toString();
this.consumeRule('_');
}
return output;
},
ExpressionPart() {
if (this.matchToken('identifier')) {
return this.state.strings[this.consumeToken('identifier').value] || '';
} else if (this.matchToken('number')) {
return this.consumeToken('number').value;
} else if (this.matchToken('quote')) {
return this.consumeRule('QuoteString');
} else if (this.matchToken('lbrace')) {
return this.consumeRule('BracketString');
}
},
QuoteString() {
let output = '';
this.consumeToken('quote');
while (!this.matchToken('quote')) {
output += this.consumeRule('Text');
}
this.consumeToken('quote');
return output;
},
BracketString() {
let output = '';
this.consumeToken('lbrace');
while (!this.matchToken('rbrace')) {
output += this.consumeRule('Text');
}
this.consumeToken('rbrace');
return output;
},
MathString() {
let output = '';
this.consumeToken('mathShift');
while (!this.matchToken('mathShift')) {
if (this.matchToken('script')) {
const script = this.consumeToken('script').value;
const text = this.consumeRule('Text').replace(/^{|}$/g, '');
output += constants.mathScripts[script][text[0]] + text.slice(1);
} else {
output += this.consumeRule('Text');
}
}
stack.consumeToken('}', {
spaced: false
});
stack.consumeWhitespace();
this.consumeToken('mathShift');
return output;
},
if (stack.matches(',')) {
stack.consumeToken(',');
stack.consumeWhitespace();
Text() {
if (this.matchToken('lbrace')) {
return `{${this.consumeRule('BracketString')}}`;
} else if (this.matchToken('mathShift')) {
return this.consumeRule('MathString');
} else if (this.matchToken('whitespace')) {
this.consumeToken('whitespace');
return ' ';
} else if (this.matchToken('command')) {
return this.consumeRule('Command');
} else {
return this.consumeToken('text').value.replace(constants.ligaturePattern, ligature => constants.ligatures[ligature]);
}
},
entries.push({
type,
label,
properties
});
Command() {
const command = this.consumeToken('command').value.slice(1).trim();
if (command in constants.commands) {
return constants.commands[command];
} else if (command in constants.diacritics && !this.matchEndOfFile()) {
if (this.matchToken('text')) {
const text = this.consumeToken('text').value;
return text[0] + constants.diacritics[command] + text.slice(1);
} else {
return this.consumeRule('Text').replace(/^{|}$/g, '') + constants.diacritics[command];
}
} else if (/^\W$/.test(command)) {
return command;
} else {
return '\\' + command;
}
}
return entries;
};
}, {
strings: Object.assign({}, constants.defaultStrings)
});
exports.bibtexGrammar = bibtexGrammar;
exports.default = exports.parse = parseBibTeX;
function parse(text) {
return bibtexGrammar.parse(lexer.reset(text));
}
var _default = parse;
exports.default = _default;
{
"name": "@citation-js/plugin-bibtex",
"version": "0.5.0-alpha.2",
"version": "0.5.0-alpha.3",
"description": "Plugin for BibTeX formats for Citation.js",

@@ -36,11 +36,12 @@ "keywords": [

"@citation-js/date": "^0.4.4",
"@citation-js/name": "^0.4.2"
"@citation-js/name": "^0.4.2",
"moo": "^0.5.1"
},
"devDependencies": {
"@citation-js/core": "^0.5.0-alpha.2"
"@citation-js/core": "^0.5.0-alpha.3"
},
"peerDependencies": {
"@citation-js/core": "^0.4.0-rc.0"
"@citation-js/core": "^0.5.0-alpha.0"
},
"gitHead": "669be81e86a7847a3f2c2b4da95b9a62b0f11729"
"gitHead": "dacc48b278edbb7ea6be4415366a4ffc4d9e85a9"
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc