Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

comment-parser

Package Overview
Dependencies
Maintainers
1
Versions
55
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

comment-parser - npm Package Compare versions

Comparing version 1.0.1 to 1.1.0

es6/parser/tokenizers/description.d.ts

555

browser/index.js
var CommentParser = (function (exports) {
'use strict';
var __assign = (window && window.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
function isSpace(source) {

@@ -19,3 +8,3 @@ return /^\s+$/.test(source);

function splitSpace(source) {
var matches = source.match(/^\s+/);
const matches = source.match(/^\s+/);
return matches == null

@@ -28,9 +17,7 @@ ? ['', source]

}
function seedSpec(spec) {
if (spec === void 0) { spec = {}; }
return __assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
function seedSpec(spec = {}) {
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
}
function seedTokens(tokens) {
if (tokens === void 0) { tokens = {}; }
return __assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '' }, tokens);
function seedTokens(tokens = {}) {
return Object.assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '' }, tokens);
}

@@ -43,6 +30,5 @@ /**

function rewireSource(block) {
var source = block.source.reduce(function (acc, line) { return acc.set(line.number, line); }, new Map());
for (var _i = 0, _a = block.tags; _i < _a.length; _i++) {
var spec = _a[_i];
spec.source = spec.source.map(function (line) { return source.get(line.number); });
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
for (const spec of block.tags) {
spec.source = spec.source.map((line) => source.get(line.number));
}

@@ -52,2 +38,32 @@ return block;

const reTag = /^@\S+/;
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
function getParser({ fence = '```', } = {}) {
const fencer = getFencer(fence);
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
return function parseBlock(source) {
// start with description section
const sections = [[]];
let isFenced = false;
for (const line of source) {
if (reTag.test(line.tokens.description) && !isFenced) {
sections.push([line]);
}
else {
sections[sections.length - 1].push(line);
}
isFenced = toggleFence(line.tokens.description, isFenced);
}
return sections;
};
}
function getFencer(fence) {
if (typeof fence === 'string')
return (source) => source.split(fence).length % 2 === 0;
return fence;
}
var Markers;

@@ -61,11 +77,9 @@ (function (Markers) {

function getParser(_a) {
var _b = (_a === void 0 ? {} : _a).startLine, startLine = _b === void 0 ? 0 : _b;
var block = null;
var num = startLine;
function getParser$1({ startLine = 0, } = {}) {
let block = null;
let num = startLine;
return function parseSource(source) {
var _a, _b, _c;
var rest = source;
var tokens = seedTokens();
_a = splitSpace(rest), tokens.start = _a[0], rest = _a[1];
let rest = source;
const tokens = seedTokens();
[tokens.start, rest] = splitSpace(rest);
if (block === null &&

@@ -77,3 +91,3 @@ rest.startsWith(Markers.start) &&

rest = rest.slice(Markers.start.length);
_b = splitSpace(rest), tokens.postDelimiter = _b[0], rest = _b[1];
[tokens.postDelimiter, rest] = splitSpace(rest);
}

@@ -84,3 +98,3 @@ if (block === null) {

}
var isClosed = rest.trimRight().endsWith(Markers.end);
const isClosed = rest.trimRight().endsWith(Markers.end);
if (tokens.delimiter === '' &&

@@ -91,6 +105,6 @@ rest.startsWith(Markers.delim) &&

rest = rest.slice(Markers.delim.length);
_c = splitSpace(rest), tokens.postDelimiter = _c[0], rest = _c[1];
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (isClosed) {
var trimmed = rest.trimRight();
const trimmed = rest.trimRight();
tokens.end = rest.slice(trimmed.length - Markers.end.length);

@@ -100,6 +114,6 @@ rest = trimmed.slice(0, -Markers.end.length);

tokens.description = rest;
block.push({ number: num, source: source, tokens: tokens });
block.push({ number: num, source, tokens });
num++;
if (isClosed) {
var result = block.slice();
const result = block.slice();
block = null;

@@ -112,39 +126,7 @@ return result;

var reTag = /^@\S+/;
function getParser$1(_a) {
var _b = (_a === void 0 ? {} : _a).fence, fence = _b === void 0 ? '```' : _b;
var fencer = getFencer(fence);
var toggleFence = function (source, isFenced) {
return fencer(source) ? !isFenced : isFenced;
};
return function parseBlock(source) {
// start with description section
var sections = [[]];
var isFenced = false;
for (var _i = 0, source_1 = source; _i < source_1.length; _i++) {
var line = source_1[_i];
if (reTag.test(line.tokens.description) && !isFenced) {
sections.push([line]);
}
else {
sections[sections.length - 1].push(line);
}
isFenced = toggleFence(line.tokens.description, isFenced);
}
return sections;
};
}
function getFencer(fence) {
if (typeof fence === 'string')
return function (source) { return source.split(fence).length % 2 === 0; };
return fence;
}
function getParser$2(_a) {
var tokenizers = _a.tokenizers;
function getParser$2({ tokenizers }) {
return function parseSpec(source) {
var _a;
var spec = seedSpec({ source: source });
for (var _i = 0, tokenizers_1 = tokenizers; _i < tokenizers_1.length; _i++) {
var tokenize = tokenizers_1[_i];
let spec = seedSpec({ source });
for (const tokenize of tokenizers) {
spec = tokenize(spec);

@@ -157,6 +139,11 @@ if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)

}
/**
* Splits the `@prefix` from remaining `Spec.lines[].token.descrioption` into the `tag` token,
* and populates `spec.tag`
*/
function tagTokenizer() {
return function (spec) {
var tokens = spec.source[0].tokens;
var match = tokens.description.match(/\s*(@(\S+))(\s*)/);
return (spec) => {
const { tokens } = spec.source[0];
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
if (match === null) {

@@ -178,21 +165,31 @@ spec.problems.push({

}
function typeTokenizer() {
return function (spec) {
var _a;
var res = '';
var curlies = 0;
var tokens = spec.source[0].tokens;
var source = tokens.description.trimLeft();
if (source[0] !== '{')
return spec;
for (var _i = 0, source_1 = source; _i < source_1.length; _i++) {
var ch = source_1[_i];
if (ch === '{')
curlies++;
if (ch === '}')
curlies--;
res += ch;
if (curlies === 0) {
/**
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
* tokens and populates Spec.type`
*
* @param {Spacing} spacing tells how to deal with a whitespace
* for type values going over multiple lines
*/
function typeTokenizer(spacing = 'compact') {
const join = getJoiner(spacing);
return (spec) => {
let curlies = 0;
let lines = [];
for (const [i, { tokens }] of spec.source.entries()) {
let type = '';
if (i === 0 && tokens.description[0] !== '{')
return spec;
for (const ch of tokens.description) {
if (ch === '{')
curlies++;
if (ch === '}')
curlies--;
type += ch;
if (curlies === 0)
break;
}
lines.push([tokens, type]);
if (curlies === 0)
break;
}
}

@@ -208,15 +205,41 @@ if (curlies !== 0) {

}
spec.type = res.slice(1, -1);
tokens.type = res;
_a = splitSpace(source.slice(tokens.type.length)), tokens.postType = _a[0], tokens.description = _a[1];
const parts = [];
const offset = lines[0][0].postDelimiter.length;
for (const [i, [tokens, type]] of lines.entries()) {
if (type === '')
continue;
tokens.type = type;
if (i > 0) {
tokens.type = tokens.postDelimiter.slice(offset) + type;
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
}
[tokens.postType, tokens.description] = splitSpace(tokens.description.slice(tokens.type.length));
parts.push(tokens.type);
}
parts[0] = parts[0].slice(1);
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
spec.type = join(parts);
return spec;
};
}
const trim = (x) => x.trim();
function getJoiner(spacing) {
if (spacing === 'compact')
return (t) => t.map(trim).join('');
else if (spacing === 'preserve')
return (t) => t.join('\n');
else
return spacing;
}
const isQuoted = (s) => s && s.startsWith('"') && s.endsWith('"');
/**
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
* and populates the `spec.name`
*/
function nameTokenizer() {
return function (spec) {
var _a, _b;
var _c;
var tokens = spec.source[0].tokens;
var source = tokens.description.trimLeft();
var quotedGroups = source.split('"');
return (spec) => {
const { tokens } = spec.source[0];
const source = tokens.description.trimLeft();
const quotedGroups = source.split('"');
// if it starts with quoted group, assume it is a literal

@@ -227,13 +250,12 @@ if (quotedGroups.length > 1 &&

spec.name = quotedGroups[1];
tokens.name = "\"" + quotedGroups[1] + "\"";
_a = splitSpace(source.slice(tokens.name.length)), tokens.postName = _a[0], tokens.description = _a[1];
tokens.name = `"${quotedGroups[1]}"`;
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
return spec;
}
var brackets = 0;
var name = '';
var optional = false;
var defaultValue;
let brackets = 0;
let name = '';
let optional = false;
let defaultValue;
// assume name is non-space string or anything wrapped into brackets
for (var _i = 0, source_2 = source; _i < source_2.length; _i++) {
var ch = source_2[_i];
for (const ch of source) {
if (brackets === 0 && isSpace(ch))

@@ -256,9 +278,10 @@ break;

}
var nameToken = name;
const nameToken = name;
if (name[0] === '[' && name[name.length - 1] === ']') {
optional = true;
name = name.slice(1, -1);
var parts = name.split('=');
const parts = name.split('=');
name = parts[0].trim();
defaultValue = (_c = parts[1]) === null || _c === void 0 ? void 0 : _c.trim();
if (parts[1] !== undefined)
defaultValue = parts.slice(1).join('=').trim();
if (name === '') {

@@ -273,6 +296,6 @@ spec.problems.push({

}
if (parts.length > 2) {
if (defaultValue === '') {
spec.problems.push({
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
code: 'spec:name:empty-default',
message: 'empty default value',
line: spec.source[0].number,

@@ -283,6 +306,7 @@ critical: true,

}
if (defaultValue === '') {
// has "=" and is not a string, except for "=>"
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
spec.problems.push({
code: 'spec:name:empty-default',
message: 'empty default value',
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
line: spec.source[0].number,

@@ -299,8 +323,15 @@ critical: true,

spec.default = defaultValue;
_b = splitSpace(source.slice(tokens.name.length)), tokens.postName = _b[0], tokens.description = _b[1];
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
return spec;
};
}
function descriptionTokenizer(join) {
return function (spec) {
/**
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
* following given spacing srtategy
* @param {Spacing} spacing tells how to handle the whitespace
*/
function descriptionTokenizer(spacing = 'compact') {
const join = getJoiner$1(spacing);
return (spec) => {
spec.description = join(spec.source);

@@ -310,26 +341,27 @@ return spec;

}
function getSpacer(spacing) {
function getJoiner$1(spacing) {
if (spacing === 'compact')
return compactSpacer;
return compactJoiner;
if (spacing === 'preserve')
return preserveSpacer;
return preserveJoiner;
return spacing;
}
function compactSpacer(lines) {
function compactJoiner(lines) {
return lines
.map(function (_a) {
var description = _a.tokens.description;
return description.trim();
})
.filter(function (description) { return description !== ''; })
.map(({ tokens: { description } }) => description.trim())
.filter((description) => description !== '')
.join(' ');
}
function preserveSpacer(lines) {
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
tokens.description;
function preserveJoiner(lines) {
if (lines.length === 0)
return '';
// skip the opening line with no description
if (lines[0].tokens.description === '' &&
lines[0].tokens.delimiter === Markers.start)
lines = lines.slice(1);
var lastLine = lines[lines.length - 1];
// skip the closing line with no description
const lastLine = lines[lines.length - 1];
if (lastLine !== undefined &&

@@ -339,33 +371,24 @@ lastLine.tokens.description === '' &&

lines = lines.slice(0, -1);
return lines
.map(function (_a) {
var tokens = _a.tokens;
return (tokens.delimiter === ''
? tokens.start
: tokens.postDelimiter.slice(1)) + tokens.description;
})
.join('\n');
// description starts at the last line of type definition
lines = lines.slice(lines.reduce(lineNo, 0));
return lines.map(getDescription).join('\n');
}
function getParser$3(_a) {
var _b = _a === void 0 ? {} : _a, _c = _b.startLine, startLine = _c === void 0 ? 0 : _c, _d = _b.fence, fence = _d === void 0 ? '```' : _d, _e = _b.spacing, spacing = _e === void 0 ? 'compact' : _e, _f = _b.tokenizers, tokenizers = _f === void 0 ? [
tagTokenizer(),
typeTokenizer(),
nameTokenizer(),
descriptionTokenizer(getSpacer(spacing)),
] : _f;
function getParser$3({ startLine = 0, fence = '```', spacing = 'compact', tokenizers = [
tagTokenizer(),
typeTokenizer(spacing),
nameTokenizer(),
descriptionTokenizer(spacing),
], } = {}) {
if (startLine < 0 || startLine % 1 > 0)
throw new Error('Invalid startLine');
var parseSource = getParser({ startLine: startLine });
var parseBlock = getParser$1({ fence: fence });
var parseSpec = getParser$2({ tokenizers: tokenizers });
var join = getSpacer(spacing);
var notEmpty = function (line) {
return line.tokens.description.trim() != '';
};
const parseSource = getParser$1({ startLine });
const parseBlock = getParser({ fence });
const parseSpec = getParser$2({ tokenizers });
const joinDescription = getJoiner$1(spacing);
const notEmpty = (line) => line.tokens.description.trim() != '';
return function (source) {
var blocks = [];
for (var _i = 0, _a = splitLines(source); _i < _a.length; _i++) {
var line = _a[_i];
var lines = parseSource(line);
const blocks = [];
for (const line of splitLines(source)) {
const lines = parseSource(line);
if (lines === null)

@@ -375,9 +398,9 @@ continue;

continue;
var sections = parseBlock(lines);
var specs = sections.slice(1).map(parseSpec);
const sections = parseBlock(lines);
const specs = sections.slice(1).map(parseSpec);
blocks.push({
description: join(sections[0]),
description: joinDescription(sections[0]),
tags: specs,
source: lines,
problems: specs.reduce(function (acc, spec) { return acc.concat(spec.problems); }, []),
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
});

@@ -403,21 +426,5 @@ }

function getStringifier() {
return function (block) {
return block.source.map(function (_a) {
var tokens = _a.tokens;
return join(tokens);
}).join('\n');
};
return (block) => block.source.map(({ tokens }) => join(tokens)).join('\n');
}
var __assign$1 = (window && window.__assign) || function () {
__assign$1 = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign$1.apply(this, arguments);
};
var __rest = (window && window.__rest) || function (s, e) {

@@ -434,46 +441,3 @@ var t = {};

};
var pull = function (offset) { return function (str) { return str.slice(offset); }; };
var push = function (offset) {
var space = ''.padStart(offset, ' ');
return function (str) { return str + space; };
};
function indent(pos) {
var shift;
var pad = function (start) {
if (shift === undefined) {
var offset = pos - start.length;
shift = offset > 0 ? push(offset) : pull(-offset);
}
return shift(start);
};
var update = function (line) { return (__assign$1(__assign$1({}, line), { tokens: __assign$1(__assign$1({}, line.tokens), { start: pad(line.tokens.start) }) })); };
return function (_a) {
var source = _a.source, fields = __rest(_a, ["source"]);
return rewireSource(__assign$1(__assign$1({}, fields), { source: source.map(update) }));
};
}
var __assign$2 = (window && window.__assign) || function () {
__assign$2 = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign$2.apply(this, arguments);
};
var __rest$1 = (window && window.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
var zeroWidth = {
const zeroWidth = {
start: 0,

@@ -484,11 +448,8 @@ tag: 0,

};
var getWidth = function (w, _a) {
var t = _a.tokens;
return ({
start: t.delimiter === Markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length),
});
};
const getWidth = (w, { tokens: t }) => ({
start: t.delimiter === Markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length),
});
// /**

@@ -502,11 +463,11 @@ // * Description may go

// */
var space = function (len) { return ''.padStart(len, ' '); };
const space = (len) => ''.padStart(len, ' ');
function align() {
var intoTags = false;
var w;
let intoTags = false;
let w;
function update(line) {
var tokens = __assign$2({}, line.tokens);
const tokens = Object.assign({}, line.tokens);
if (tokens.tag !== '')
intoTags = true;
var isEmpty = tokens.tag === '' &&
const isEmpty = tokens.tag === '' &&
tokens.name === '' &&

@@ -518,3 +479,3 @@ tokens.type === '' &&

tokens.start = space(w.start + 1);
return __assign$2(__assign$2({}, line), { tokens: tokens });
return Object.assign(Object.assign({}, line), { tokens });
}

@@ -537,37 +498,109 @@ switch (tokens.delimiter) {

}
return __assign$2(__assign$2({}, line), { tokens: tokens });
return Object.assign(Object.assign({}, line), { tokens });
}
return function (_a) {
var source = _a.source, fields = __rest$1(_a, ["source"]);
w = source.reduce(getWidth, __assign$2({}, zeroWidth));
return rewireSource(__assign$2(__assign$2({}, fields), { source: source.map(update) }));
return (_a) => {
var { source } = _a, fields = __rest(_a, ["source"]);
w = source.reduce(getWidth, Object.assign({}, zeroWidth));
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}
function flow() {
var transforms = [];
for (var _i = 0; _i < arguments.length; _i++) {
transforms[_i] = arguments[_i];
}
return function (block) {
return transforms.reduce(function (block, t) { return t(block); }, block);
var __rest$1 = (window && window.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
const pull = (offset) => (str) => str.slice(offset);
const push = (offset) => {
const space = ''.padStart(offset, ' ');
return (str) => str + space;
};
function indent(pos) {
let shift;
const pad = (start) => {
if (shift === undefined) {
const offset = pos - start.length;
shift = offset > 0 ? push(offset) : pull(-offset);
}
return shift(start);
};
const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));
return (_a) => {
var { source } = _a, fields = __rest$1(_a, ["source"]);
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}
var index = /*#__PURE__*/Object.freeze({
__proto__: null,
flow: flow,
indent: indent,
align: align
});
function flow(...transforms) {
return (block) => transforms.reduce((block, t) => t(block), block);
}
function parse(source, options) {
if (options === void 0) { options = {}; }
const zeroWidth$1 = {
line: 0,
start: 0,
delimiter: 0,
postDelimiter: 0,
tag: 0,
postTag: 0,
name: 0,
postName: 0,
type: 0,
postType: 0,
description: 0,
end: 0,
};
const fields = Object.keys(zeroWidth$1);
const repr = (x) => (isSpace(x) ? `{${x.length}}` : x);
const frame = (line) => '|' + line.join('|') + '|';
const align$1 = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));
function inspect({ source }) {
if (source.length === 0)
return '';
const width = Object.assign({}, zeroWidth$1);
for (const f of fields)
width[f] = f.length;
for (const { number, tokens } of source) {
width.line = Math.max(width.line, number.toString().length);
for (const k in tokens)
width[k] = Math.max(width[k], repr(tokens[k]).length);
}
const lines = [[], []];
for (const f of fields)
lines[0].push(f.padEnd(width[f]));
for (const f of fields)
lines[1].push('-'.padEnd(width[f], '-'));
for (const { number, tokens } of source) {
const line = number.toString().padStart(width.line);
lines.push([line, ...align$1(width, tokens)]);
}
return lines.map(frame).join('\n');
}
function parse(source, options = {}) {
return getParser$3(options)(source);
}
var stringify = getStringifier();
const stringify = getStringifier();
const transforms = {
flow: flow,
align: align,
indent: indent,
};
const tokenizers = {
tag: tagTokenizer,
type: typeTokenizer,
name: nameTokenizer,
description: descriptionTokenizer,
};
exports.inspect = inspect;
exports.parse = parse;
exports.stringify = stringify;
exports.transforms = index;
exports.tokenizers = tokenizers;
exports.transforms = transforms;

@@ -574,0 +607,0 @@ Object.defineProperty(exports, '__esModule', { value: true });

@@ -0,1 +1,14 @@

# v1.1.0
- split tokenizers into separate modules
- allow multiline {type} definitions - issue #109
- allow using "=>" in [name=default] defaults – issue #112
- allow using "=" in quoted [name=default] defaults – issue #112
- add tokenizers usage example - issue #111
# v1.1.1
- add helpers for rewiring Spec.source <-> Spec.tags.source
# v1.0.0
- complete rewrite in TS with more flexible API
# v0.7.6

@@ -2,0 +15,0 @@ - distinct non-critical errors by providing `err.warning`

import { Options as ParserOptions } from './parser/index';
import { Block } from './primitives';
export declare function parse(source: string, options?: Partial<ParserOptions>): Block[];
export declare const stringify: (block: Block) => string;
export * as transforms from './transforms/index';
import descriptionTokenizer from './parser/tokenizers/description';
import nameTokenizer from './parser/tokenizers/name';
import tagTokenizer from './parser/tokenizers/tag';
import typeTokenizer from './parser/tokenizers/type';
import alignTransform from './transforms/align';
import indentTransform from './transforms/indent';
import { flow as flowTransform } from './transforms/index';
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives").Block[];
export declare const stringify: (block: import("./primitives").Block) => string;
export { default as inspect } from './stringifier/inspect';
export declare const transforms: {
flow: typeof flowTransform;
align: typeof alignTransform;
indent: typeof indentTransform;
};
export declare const tokenizers: {
tag: typeof tagTokenizer;
type: typeof typeTokenizer;
name: typeof nameTokenizer;
description: typeof descriptionTokenizer;
};
import getParser from './parser/index';
import descriptionTokenizer from './parser/tokenizers/description';
import nameTokenizer from './parser/tokenizers/name';
import tagTokenizer from './parser/tokenizers/tag';
import typeTokenizer from './parser/tokenizers/type';
import getStringifier from './stringifier/index';
export function parse(source, options) {
if (options === void 0) { options = {}; }
import alignTransform from './transforms/align';
import indentTransform from './transforms/indent';
import { flow as flowTransform } from './transforms/index';
export function parse(source, options = {}) {
return getParser(options)(source);
}
export var stringify = getStringifier();
import * as transforms_1 from './transforms/index';
export { transforms_1 as transforms };
export const stringify = getStringifier();
export { default as inspect } from './stringifier/inspect';
export const transforms = {
flow: flowTransform,
align: alignTransform,
indent: indentTransform,
};
export const tokenizers = {
tag: tagTokenizer,
type: typeTokenizer,
name: nameTokenizer,
description: descriptionTokenizer,
};
import { Line } from '../primitives';
/**
* Groups source lines in sections representing tags.
* First section is a block description if present. Last section captures lines starting with
* the last tag to the end of the block, including dangling closing marker.
* @param {Line[]} block souce lines making a single comment block
*/
export declare type Parser = (block: Line[]) => Line[][];
declare type Fencer = (source: string) => boolean;
/**
* Predicate telling if string contains opening/closing escaping sequence
* @param {string} source raw source line
*/
export declare type Fencer = (source: string) => boolean;
/**
* `Parser` configuration options
*/
export interface Options {
fence: string | Fencer;
}
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({ fence, }?: Partial<Options>): Parser;
export {};

@@ -1,14 +0,14 @@

var reTag = /^@\S+/;
export default function getParser(_a) {
var _b = (_a === void 0 ? {} : _a).fence, fence = _b === void 0 ? '```' : _b;
var fencer = getFencer(fence);
var toggleFence = function (source, isFenced) {
return fencer(source) ? !isFenced : isFenced;
};
const reTag = /^@\S+/;
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({ fence = '```', } = {}) {
const fencer = getFencer(fence);
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
return function parseBlock(source) {
// start with description section
var sections = [[]];
var isFenced = false;
for (var _i = 0, source_1 = source; _i < source_1.length; _i++) {
var line = source_1[_i];
const sections = [[]];
let isFenced = false;
for (const line of source) {
if (reTag.test(line.tokens.description) && !isFenced) {

@@ -27,4 +27,4 @@ sections.push([line]);

if (typeof fence === 'string')
return function (source) { return source.split(fence).length % 2 === 0; };
return (source) => source.split(fence).length % 2 === 0;
return fence;
}

@@ -1,10 +0,9 @@

import { Tokenizer } from './spec-parser';
import { Block } from '../primitives';
import { Spacer } from './spacer';
import { Tokenizer } from './tokenizers/index';
export interface Options {
startLine: number;
fence: string;
spacing: 'compact' | 'preserve' | Spacer;
spacing: 'compact' | 'preserve';
tokenizers: Tokenizer[];
}
export default function getParser({ startLine, fence, spacing, tokenizers, }?: Partial<Options>): (source: string) => Block[];

@@ -0,27 +1,26 @@

import { splitLines } from '../util';
import blockParser from './block-parser';
import sourceParser from './source-parser';
import blockParser from './block-parser';
import specParser, { tagTokenizer, nameTokenizer, typeTokenizer, descriptionTokenizer, } from './spec-parser';
import getSpacer from './spacer';
import { splitLines } from '../util';
export default function getParser(_a) {
var _b = _a === void 0 ? {} : _a, _c = _b.startLine, startLine = _c === void 0 ? 0 : _c, _d = _b.fence, fence = _d === void 0 ? '```' : _d, _e = _b.spacing, spacing = _e === void 0 ? 'compact' : _e, _f = _b.tokenizers, tokenizers = _f === void 0 ? [
tagTokenizer(),
typeTokenizer(),
nameTokenizer(),
descriptionTokenizer(getSpacer(spacing)),
] : _f;
import specParser from './spec-parser';
import tokenizeTag from './tokenizers/tag';
import tokenizeType from './tokenizers/type';
import tokenizeName from './tokenizers/name';
import tokenizeDescription, { getJoiner as getDescriptionJoiner, } from './tokenizers/description';
export default function getParser({ startLine = 0, fence = '```', spacing = 'compact', tokenizers = [
tokenizeTag(),
tokenizeType(spacing),
tokenizeName(),
tokenizeDescription(spacing),
], } = {}) {
if (startLine < 0 || startLine % 1 > 0)
throw new Error('Invalid startLine');
var parseSource = sourceParser({ startLine: startLine });
var parseBlock = blockParser({ fence: fence });
var parseSpec = specParser({ tokenizers: tokenizers });
var join = getSpacer(spacing);
var notEmpty = function (line) {
return line.tokens.description.trim() != '';
};
const parseSource = sourceParser({ startLine });
const parseBlock = blockParser({ fence });
const parseSpec = specParser({ tokenizers });
const joinDescription = getDescriptionJoiner(spacing);
const notEmpty = (line) => line.tokens.description.trim() != '';
return function (source) {
var blocks = [];
for (var _i = 0, _a = splitLines(source); _i < _a.length; _i++) {
var line = _a[_i];
var lines = parseSource(line);
const blocks = [];
for (const line of splitLines(source)) {
const lines = parseSource(line);
if (lines === null)

@@ -31,9 +30,9 @@ continue;

continue;
var sections = parseBlock(lines);
var specs = sections.slice(1).map(parseSpec);
const sections = parseBlock(lines);
const specs = sections.slice(1).map(parseSpec);
blocks.push({
description: join(sections[0]),
description: joinDescription(sections[0]),
tags: specs,
source: lines,
problems: specs.reduce(function (acc, spec) { return acc.concat(spec.problems); }, []),
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
});

@@ -40,0 +39,0 @@ }

@@ -1,12 +0,10 @@

import { splitSpace, seedTokens } from '../util';
import { Markers } from '../primitives';
export default function getParser(_a) {
var _b = (_a === void 0 ? {} : _a).startLine, startLine = _b === void 0 ? 0 : _b;
var block = null;
var num = startLine;
import { seedTokens, splitSpace } from '../util';
export default function getParser({ startLine = 0, } = {}) {
let block = null;
let num = startLine;
return function parseSource(source) {
var _a, _b, _c;
var rest = source;
var tokens = seedTokens();
_a = splitSpace(rest), tokens.start = _a[0], rest = _a[1];
let rest = source;
const tokens = seedTokens();
[tokens.start, rest] = splitSpace(rest);
if (block === null &&

@@ -18,3 +16,3 @@ rest.startsWith(Markers.start) &&

rest = rest.slice(Markers.start.length);
_b = splitSpace(rest), tokens.postDelimiter = _b[0], rest = _b[1];
[tokens.postDelimiter, rest] = splitSpace(rest);
}

@@ -25,3 +23,3 @@ if (block === null) {

}
var isClosed = rest.trimRight().endsWith(Markers.end);
const isClosed = rest.trimRight().endsWith(Markers.end);
if (tokens.delimiter === '' &&

@@ -32,6 +30,6 @@ rest.startsWith(Markers.delim) &&

rest = rest.slice(Markers.delim.length);
_c = splitSpace(rest), tokens.postDelimiter = _c[0], rest = _c[1];
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (isClosed) {
var trimmed = rest.trimRight();
const trimmed = rest.trimRight();
tokens.end = rest.slice(trimmed.length - Markers.end.length);

@@ -41,6 +39,6 @@ rest = trimmed.slice(0, -Markers.end.length);

tokens.description = rest;
block.push({ number: num, source: source, tokens: tokens });
block.push({ number: num, source, tokens });
num++;
if (isClosed) {
var result = block.slice();
const result = block.slice();
block = null;

@@ -47,0 +45,0 @@ return result;

import { Line, Spec } from '../primitives';
import { Spacer } from './spacer';
import { Tokenizer } from './tokenizers/index';
export declare type Parser = (source: Line[]) => Spec;
export declare type Tokenizer = (spec: Spec) => Spec;
export interface Options {

@@ -9,5 +8,1 @@ tokenizers: Tokenizer[];

export default function getParser({ tokenizers }: Options): Parser;
export declare function tagTokenizer(): Tokenizer;
export declare function typeTokenizer(): Tokenizer;
export declare function nameTokenizer(): Tokenizer;
export declare function descriptionTokenizer(join: Spacer): Tokenizer;

@@ -1,9 +0,7 @@

import { splitSpace, isSpace, seedSpec } from '../util';
export default function getParser(_a) {
var tokenizers = _a.tokenizers;
import { seedSpec } from '../util';
export default function getParser({ tokenizers }) {
return function parseSpec(source) {
var _a;
var spec = seedSpec({ source: source });
for (var _i = 0, tokenizers_1 = tokenizers; _i < tokenizers_1.length; _i++) {
var tokenize = tokenizers_1[_i];
let spec = seedSpec({ source });
for (const tokenize of tokenizers) {
spec = tokenize(spec);

@@ -16,146 +14,1 @@ if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)

}
export function tagTokenizer() {
return function (spec) {
var tokens = spec.source[0].tokens;
var match = tokens.description.match(/\s*(@(\S+))(\s*)/);
if (match === null) {
spec.problems.push({
code: 'spec:tag:prefix',
message: 'tag should start with "@" symbol',
line: spec.source[0].number,
critical: true,
});
return spec;
}
tokens.tag = match[1];
tokens.postTag = match[3];
tokens.description = tokens.description.slice(match[0].length);
spec.tag = match[2];
return spec;
};
}
export function typeTokenizer() {
return function (spec) {
var _a;
var res = '';
var curlies = 0;
var tokens = spec.source[0].tokens;
var source = tokens.description.trimLeft();
if (source[0] !== '{')
return spec;
for (var _i = 0, source_1 = source; _i < source_1.length; _i++) {
var ch = source_1[_i];
if (ch === '{')
curlies++;
if (ch === '}')
curlies--;
res += ch;
if (curlies === 0) {
break;
}
}
if (curlies !== 0) {
spec.problems.push({
code: 'spec:type:unpaired-curlies',
message: 'unpaired curlies',
line: spec.source[0].number,
critical: true,
});
return spec;
}
spec.type = res.slice(1, -1);
tokens.type = res;
_a = splitSpace(source.slice(tokens.type.length)), tokens.postType = _a[0], tokens.description = _a[1];
return spec;
};
}
export function nameTokenizer() {
return function (spec) {
var _a, _b;
var _c;
var tokens = spec.source[0].tokens;
var source = tokens.description.trimLeft();
var quotedGroups = source.split('"');
// if it starts with quoted group, assume it is a literal
if (quotedGroups.length > 1 &&
quotedGroups[0] === '' &&
quotedGroups.length % 2 === 1) {
spec.name = quotedGroups[1];
tokens.name = "\"" + quotedGroups[1] + "\"";
_a = splitSpace(source.slice(tokens.name.length)), tokens.postName = _a[0], tokens.description = _a[1];
return spec;
}
var brackets = 0;
var name = '';
var optional = false;
var defaultValue;
// assume name is non-space string or anything wrapped into brackets
for (var _i = 0, source_2 = source; _i < source_2.length; _i++) {
var ch = source_2[_i];
if (brackets === 0 && isSpace(ch))
break;
if (ch === '[')
brackets++;
if (ch === ']')
brackets--;
name += ch;
}
if (brackets !== 0) {
spec.problems.push({
code: 'spec:name:unpaired-brackets',
message: 'unpaired brackets',
line: spec.source[0].number,
critical: true,
});
return spec;
}
var nameToken = name;
if (name[0] === '[' && name[name.length - 1] === ']') {
optional = true;
name = name.slice(1, -1);
var parts = name.split('=');
name = parts[0].trim();
defaultValue = (_c = parts[1]) === null || _c === void 0 ? void 0 : _c.trim();
if (name === '') {
spec.problems.push({
code: 'spec:name:empty-name',
message: 'empty name',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (parts.length > 2) {
spec.problems.push({
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (defaultValue === '') {
spec.problems.push({
code: 'spec:name:empty-default',
message: 'empty default value',
line: spec.source[0].number,
critical: true,
});
return spec;
}
}
spec.optional = optional;
spec.name = name;
tokens.name = nameToken;
if (defaultValue !== undefined)
spec.default = defaultValue;
_b = splitSpace(source.slice(tokens.name.length)), tokens.postName = _b[0], tokens.description = _b[1];
return spec;
};
}
export function descriptionTokenizer(join) {
return function (spec) {
spec.description = join(spec.source);
return spec;
};
}

@@ -15,8 +15,3 @@ function join(tokens) {

export default function getStringifier() {
return function (block) {
return block.source.map(function (_a) {
var tokens = _a.tokens;
return join(tokens);
}).join('\n');
};
return (block) => block.source.map(({ tokens }) => join(tokens)).join('\n');
}

@@ -1,12 +0,1 @@

var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __rest = (this && this.__rest) || function (s, e) {

@@ -25,3 +14,3 @@ var t = {};

import { rewireSource } from '../util';
var zeroWidth = {
const zeroWidth = {
start: 0,

@@ -32,11 +21,8 @@ tag: 0,

};
var getWidth = function (w, _a) {
var t = _a.tokens;
return ({
start: t.delimiter === Markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length),
});
};
const getWidth = (w, { tokens: t }) => ({
start: t.delimiter === Markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length),
});
// /**

@@ -50,11 +36,11 @@ // * Description may go

// */
var space = function (len) { return ''.padStart(len, ' '); };
const space = (len) => ''.padStart(len, ' ');
export default function align() {
var intoTags = false;
var w;
let intoTags = false;
let w;
function update(line) {
var tokens = __assign({}, line.tokens);
const tokens = Object.assign({}, line.tokens);
if (tokens.tag !== '')
intoTags = true;
var isEmpty = tokens.tag === '' &&
const isEmpty = tokens.tag === '' &&
tokens.name === '' &&

@@ -66,3 +52,3 @@ tokens.type === '' &&

tokens.start = space(w.start + 1);
return __assign(__assign({}, line), { tokens: tokens });
return Object.assign(Object.assign({}, line), { tokens });
}

@@ -85,9 +71,9 @@ switch (tokens.delimiter) {

}
return __assign(__assign({}, line), { tokens: tokens });
return Object.assign(Object.assign({}, line), { tokens });
}
return function (_a) {
var source = _a.source, fields = __rest(_a, ["source"]);
w = source.reduce(getWidth, __assign({}, zeroWidth));
return rewireSource(__assign(__assign({}, fields), { source: source.map(update) }));
return (_a) => {
var { source } = _a, fields = __rest(_a, ["source"]);
w = source.reduce(getWidth, Object.assign({}, zeroWidth));
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}

@@ -1,12 +0,1 @@

var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __rest = (this && this.__rest) || function (s, e) {

@@ -24,12 +13,12 @@ var t = {};

import { rewireSource } from '../util';
var pull = function (offset) { return function (str) { return str.slice(offset); }; };
var push = function (offset) {
var space = ''.padStart(offset, ' ');
return function (str) { return str + space; };
const pull = (offset) => (str) => str.slice(offset);
const push = (offset) => {
const space = ''.padStart(offset, ' ');
return (str) => str + space;
};
export default function indent(pos) {
var shift;
var pad = function (start) {
let shift;
const pad = (start) => {
if (shift === undefined) {
var offset = pos - start.length;
const offset = pos - start.length;
shift = offset > 0 ? push(offset) : pull(-offset);

@@ -39,7 +28,7 @@ }

};
var update = function (line) { return (__assign(__assign({}, line), { tokens: __assign(__assign({}, line.tokens), { start: pad(line.tokens.start) }) })); };
return function (_a) {
var source = _a.source, fields = __rest(_a, ["source"]);
return rewireSource(__assign(__assign({}, fields), { source: source.map(update) }));
const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));
return (_a) => {
var { source } = _a, fields = __rest(_a, ["source"]);
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}
import { Block } from '../primitives';
export declare type Transform = (Block: any) => Block;
export { default as indent } from './indent';
export { default as align } from './align';
export declare function flow(...transforms: Transform[]): Transform;

@@ -1,11 +0,3 @@

export { default as indent } from './indent';
export { default as align } from './align';
export function flow() {
var transforms = [];
for (var _i = 0; _i < arguments.length; _i++) {
transforms[_i] = arguments[_i];
}
return function (block) {
return transforms.reduce(function (block, t) { return t(block); }, block);
};
export function flow(...transforms) {
return (block) => transforms.reduce((block, t) => t(block), block);
}

@@ -1,12 +0,1 @@

var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
export function isSpace(source) {

@@ -16,3 +5,3 @@ return /^\s+$/.test(source);

export function splitSpace(source) {
var matches = source.match(/^\s+/);
const matches = source.match(/^\s+/);
return matches == null

@@ -25,13 +14,10 @@ ? ['', source]

}
export function seedBlock(block) {
if (block === void 0) { block = {}; }
return __assign({ description: '', tags: [], source: [], problems: [] }, block);
export function seedBlock(block = {}) {
return Object.assign({ description: '', tags: [], source: [], problems: [] }, block);
}
export function seedSpec(spec) {
if (spec === void 0) { spec = {}; }
return __assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
export function seedSpec(spec = {}) {
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
}
export function seedTokens(tokens) {
if (tokens === void 0) { tokens = {}; }
return __assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '' }, tokens);
export function seedTokens(tokens = {}) {
return Object.assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '' }, tokens);
}

@@ -44,6 +30,5 @@ /**

export function rewireSource(block) {
var source = block.source.reduce(function (acc, line) { return acc.set(line.number, line); }, new Map());
for (var _i = 0, _a = block.tags; _i < _a.length; _i++) {
var spec = _a[_i];
spec.source = spec.source.map(function (line) { return source.get(line.number); });
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
for (const spec of block.tags) {
spec.source = spec.source.map((line) => source.get(line.number));
}

@@ -58,7 +43,5 @@ return block;

export function rewireSpecs(block) {
var source = block.tags.reduce(function (acc, spec) {
return spec.source.reduce(function (acc, line) { return acc.set(line.number, line); }, acc);
}, new Map());
block.source = block.source.map(function (line) { return source.get(line.number) || line; });
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
block.source = block.source.map((line) => source.get(line.number) || line);
return block;
}

@@ -5,2 +5,8 @@ // For a detailed explanation regarding each configuration property, visit:

module.exports = {
globals: {
'ts-jest': {
tsconfig: require('./tsconfig.node.json').compilerOptions,
},
},
// All imported modules in your tests should be mocked automatically

@@ -28,9 +34,6 @@ // automock: false,

// An array of regexp pattern strings used to skip coverage collection
coveragePathIgnorePatterns: [
"/node_modules/",
"/lib/",
],
coveragePathIgnorePatterns: ['/node_modules/', '/lib/'],
// Indicates which provider should be used to instrument code for coverage
coverageProvider: "v8",
coverageProvider: 'v8',

@@ -104,3 +107,3 @@ // A list of reporter names that Jest uses when writing coverage reports

// A preset that is used as a base for Jest's configuration
// preset: undefined,
preset: 'ts-jest',

@@ -129,5 +132,3 @@ // Run tests from one or more projects

// A list of paths to directories that Jest should use to search for files in
roots: [
"<rootDir>/tests/"
],
roots: ['<rootDir>/tests/'],

@@ -150,3 +151,3 @@ // Allows you to use a custom runner instead of Jest's default test runner

// The test environment that will be used for testing
testEnvironment: "node",
testEnvironment: 'node',

@@ -187,3 +188,3 @@ // Options that will be passed to the testEnvironment

transform: {
"^.+\\.ts$": "ts-jest"
'^.+\\.ts$': 'ts-jest',
},

@@ -190,0 +191,0 @@

import { Options as ParserOptions } from './parser/index';
import { Block } from './primitives';
export declare function parse(source: string, options?: Partial<ParserOptions>): Block[];
export declare const stringify: (block: Block) => string;
export * as transforms from './transforms/index';
import descriptionTokenizer from './parser/tokenizers/description';
import nameTokenizer from './parser/tokenizers/name';
import tagTokenizer from './parser/tokenizers/tag';
import typeTokenizer from './parser/tokenizers/type';
import alignTransform from './transforms/align';
import indentTransform from './transforms/indent';
import { flow as flowTransform } from './transforms/index';
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives").Block[];
export declare const stringify: (block: import("./primitives").Block) => string;
export { default as inspect } from './stringifier/inspect';
export declare const transforms: {
flow: typeof flowTransform;
align: typeof alignTransform;
indent: typeof indentTransform;
};
export declare const tokenizers: {
tag: typeof tagTokenizer;
type: typeof typeTokenizer;
name: typeof nameTokenizer;
description: typeof descriptionTokenizer;
};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.transforms = exports.stringify = exports.parse = void 0;
var index_1 = require("./parser/index");
var index_2 = require("./stringifier/index");
function parse(source, options) {
if (options === void 0) { options = {}; }
exports.tokenizers = exports.transforms = exports.inspect = exports.stringify = exports.parse = void 0;
const index_1 = require("./parser/index");
const description_1 = require("./parser/tokenizers/description");
const name_1 = require("./parser/tokenizers/name");
const tag_1 = require("./parser/tokenizers/tag");
const type_1 = require("./parser/tokenizers/type");
const index_2 = require("./stringifier/index");
const align_1 = require("./transforms/align");
const indent_1 = require("./transforms/indent");
const index_3 = require("./transforms/index");
function parse(source, options = {}) {
return index_1.default(options)(source);

@@ -12,2 +18,14 @@ }

exports.stringify = index_2.default();
exports.transforms = require("./transforms/index");
var inspect_1 = require("./stringifier/inspect");
Object.defineProperty(exports, "inspect", { enumerable: true, get: function () { return inspect_1.default; } });
exports.transforms = {
flow: index_3.flow,
align: align_1.default,
indent: indent_1.default,
};
exports.tokenizers = {
tag: tag_1.default,
type: type_1.default,
name: name_1.default,
description: description_1.default,
};
import { Line } from '../primitives';
/**
* Groups source lines in sections representing tags.
* First section is a block description if present. Last section captures lines starting with
* the last tag to the end of the block, including dangling closing marker.
* @param {Line[]} block souce lines making a single comment block
*/
export declare type Parser = (block: Line[]) => Line[][];
declare type Fencer = (source: string) => boolean;
/**
* Predicate telling if string contains opening/closing escaping sequence
* @param {string} source raw source line
*/
export declare type Fencer = (source: string) => boolean;
/**
* `Parser` configuration options
*/
export interface Options {
fence: string | Fencer;
}
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({ fence, }?: Partial<Options>): Parser;
export {};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var reTag = /^@\S+/;
function getParser(_a) {
var _b = (_a === void 0 ? {} : _a).fence, fence = _b === void 0 ? '```' : _b;
var fencer = getFencer(fence);
var toggleFence = function (source, isFenced) {
return fencer(source) ? !isFenced : isFenced;
};
const reTag = /^@\S+/;
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
function getParser({ fence = '```', } = {}) {
const fencer = getFencer(fence);
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
return function parseBlock(source) {
// start with description section
var sections = [[]];
var isFenced = false;
for (var _i = 0, source_1 = source; _i < source_1.length; _i++) {
var line = source_1[_i];
const sections = [[]];
let isFenced = false;
for (const line of source) {
if (reTag.test(line.tokens.description) && !isFenced) {

@@ -30,4 +30,4 @@ sections.push([line]);

if (typeof fence === 'string')
return function (source) { return source.split(fence).length % 2 === 0; };
return (source) => source.split(fence).length % 2 === 0;
return fence;
}

@@ -1,10 +0,9 @@

import { Tokenizer } from './spec-parser';
import { Block } from '../primitives';
import { Spacer } from './spacer';
import { Tokenizer } from './tokenizers/index';
export interface Options {
startLine: number;
fence: string;
spacing: 'compact' | 'preserve' | Spacer;
spacing: 'compact' | 'preserve';
tokenizers: Tokenizer[];
}
export default function getParser({ startLine, fence, spacing, tokenizers, }?: Partial<Options>): (source: string) => Block[];
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var source_parser_1 = require("./source-parser");
var block_parser_1 = require("./block-parser");
var spec_parser_1 = require("./spec-parser");
var spacer_1 = require("./spacer");
var util_1 = require("../util");
function getParser(_a) {
var _b = _a === void 0 ? {} : _a, _c = _b.startLine, startLine = _c === void 0 ? 0 : _c, _d = _b.fence, fence = _d === void 0 ? '```' : _d, _e = _b.spacing, spacing = _e === void 0 ? 'compact' : _e, _f = _b.tokenizers, tokenizers = _f === void 0 ? [
spec_parser_1.tagTokenizer(),
spec_parser_1.typeTokenizer(),
spec_parser_1.nameTokenizer(),
spec_parser_1.descriptionTokenizer(spacer_1.default(spacing)),
] : _f;
const util_1 = require("../util");
const block_parser_1 = require("./block-parser");
const source_parser_1 = require("./source-parser");
const spec_parser_1 = require("./spec-parser");
const tag_1 = require("./tokenizers/tag");
const type_1 = require("./tokenizers/type");
const name_1 = require("./tokenizers/name");
const description_1 = require("./tokenizers/description");
function getParser({ startLine = 0, fence = '```', spacing = 'compact', tokenizers = [
tag_1.default(),
type_1.default(spacing),
name_1.default(),
description_1.default(spacing),
], } = {}) {
if (startLine < 0 || startLine % 1 > 0)
throw new Error('Invalid startLine');
var parseSource = source_parser_1.default({ startLine: startLine });
var parseBlock = block_parser_1.default({ fence: fence });
var parseSpec = spec_parser_1.default({ tokenizers: tokenizers });
var join = spacer_1.default(spacing);
var notEmpty = function (line) {
return line.tokens.description.trim() != '';
};
const parseSource = source_parser_1.default({ startLine });
const parseBlock = block_parser_1.default({ fence });
const parseSpec = spec_parser_1.default({ tokenizers });
const joinDescription = description_1.getJoiner(spacing);
const notEmpty = (line) => line.tokens.description.trim() != '';
return function (source) {
var blocks = [];
for (var _i = 0, _a = util_1.splitLines(source); _i < _a.length; _i++) {
var line = _a[_i];
var lines = parseSource(line);
const blocks = [];
for (const line of util_1.splitLines(source)) {
const lines = parseSource(line);
if (lines === null)

@@ -33,9 +32,9 @@ continue;

continue;
var sections = parseBlock(lines);
var specs = sections.slice(1).map(parseSpec);
const sections = parseBlock(lines);
const specs = sections.slice(1).map(parseSpec);
blocks.push({
description: join(sections[0]),
description: joinDescription(sections[0]),
tags: specs,
source: lines,
problems: specs.reduce(function (acc, spec) { return acc.concat(spec.problems); }, []),
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
});

@@ -42,0 +41,0 @@ }

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var util_1 = require("../util");
var primitives_1 = require("../primitives");
function getParser(_a) {
var _b = (_a === void 0 ? {} : _a).startLine, startLine = _b === void 0 ? 0 : _b;
var block = null;
var num = startLine;
const primitives_1 = require("../primitives");
const util_1 = require("../util");
function getParser({ startLine = 0, } = {}) {
let block = null;
let num = startLine;
return function parseSource(source) {
var _a, _b, _c;
var rest = source;
var tokens = util_1.seedTokens();
_a = util_1.splitSpace(rest), tokens.start = _a[0], rest = _a[1];
let rest = source;
const tokens = util_1.seedTokens();
[tokens.start, rest] = util_1.splitSpace(rest);
if (block === null &&

@@ -20,3 +18,3 @@ rest.startsWith(primitives_1.Markers.start) &&

rest = rest.slice(primitives_1.Markers.start.length);
_b = util_1.splitSpace(rest), tokens.postDelimiter = _b[0], rest = _b[1];
[tokens.postDelimiter, rest] = util_1.splitSpace(rest);
}

@@ -27,3 +25,3 @@ if (block === null) {

}
var isClosed = rest.trimRight().endsWith(primitives_1.Markers.end);
const isClosed = rest.trimRight().endsWith(primitives_1.Markers.end);
if (tokens.delimiter === '' &&

@@ -34,6 +32,6 @@ rest.startsWith(primitives_1.Markers.delim) &&

rest = rest.slice(primitives_1.Markers.delim.length);
_c = util_1.splitSpace(rest), tokens.postDelimiter = _c[0], rest = _c[1];
[tokens.postDelimiter, rest] = util_1.splitSpace(rest);
}
if (isClosed) {
var trimmed = rest.trimRight();
const trimmed = rest.trimRight();
tokens.end = rest.slice(trimmed.length - primitives_1.Markers.end.length);

@@ -43,6 +41,6 @@ rest = trimmed.slice(0, -primitives_1.Markers.end.length);

tokens.description = rest;
block.push({ number: num, source: source, tokens: tokens });
block.push({ number: num, source, tokens });
num++;
if (isClosed) {
var result = block.slice();
const result = block.slice();
block = null;

@@ -49,0 +47,0 @@ return result;

import { Line, Spec } from '../primitives';
import { Spacer } from './spacer';
import { Tokenizer } from './tokenizers/index';
export declare type Parser = (source: Line[]) => Spec;
export declare type Tokenizer = (spec: Spec) => Spec;
export interface Options {

@@ -9,5 +8,1 @@ tokenizers: Tokenizer[];

export default function getParser({ tokenizers }: Options): Parser;
export declare function tagTokenizer(): Tokenizer;
export declare function typeTokenizer(): Tokenizer;
export declare function nameTokenizer(): Tokenizer;
export declare function descriptionTokenizer(join: Spacer): Tokenizer;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.descriptionTokenizer = exports.nameTokenizer = exports.typeTokenizer = exports.tagTokenizer = void 0;
var util_1 = require("../util");
function getParser(_a) {
var tokenizers = _a.tokenizers;
const util_1 = require("../util");
function getParser({ tokenizers }) {
return function parseSpec(source) {
var _a;
var spec = util_1.seedSpec({ source: source });
for (var _i = 0, tokenizers_1 = tokenizers; _i < tokenizers_1.length; _i++) {
var tokenize = tokenizers_1[_i];
let spec = util_1.seedSpec({ source });
for (const tokenize of tokenizers) {
spec = tokenize(spec);

@@ -20,150 +17,1 @@ if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)

exports.default = getParser;
function tagTokenizer() {
return function (spec) {
var tokens = spec.source[0].tokens;
var match = tokens.description.match(/\s*(@(\S+))(\s*)/);
if (match === null) {
spec.problems.push({
code: 'spec:tag:prefix',
message: 'tag should start with "@" symbol',
line: spec.source[0].number,
critical: true,
});
return spec;
}
tokens.tag = match[1];
tokens.postTag = match[3];
tokens.description = tokens.description.slice(match[0].length);
spec.tag = match[2];
return spec;
};
}
exports.tagTokenizer = tagTokenizer;
function typeTokenizer() {
return function (spec) {
var _a;
var res = '';
var curlies = 0;
var tokens = spec.source[0].tokens;
var source = tokens.description.trimLeft();
if (source[0] !== '{')
return spec;
for (var _i = 0, source_1 = source; _i < source_1.length; _i++) {
var ch = source_1[_i];
if (ch === '{')
curlies++;
if (ch === '}')
curlies--;
res += ch;
if (curlies === 0) {
break;
}
}
if (curlies !== 0) {
spec.problems.push({
code: 'spec:type:unpaired-curlies',
message: 'unpaired curlies',
line: spec.source[0].number,
critical: true,
});
return spec;
}
spec.type = res.slice(1, -1);
tokens.type = res;
_a = util_1.splitSpace(source.slice(tokens.type.length)), tokens.postType = _a[0], tokens.description = _a[1];
return spec;
};
}
exports.typeTokenizer = typeTokenizer;
function nameTokenizer() {
return function (spec) {
var _a, _b;
var _c;
var tokens = spec.source[0].tokens;
var source = tokens.description.trimLeft();
var quotedGroups = source.split('"');
// if it starts with quoted group, assume it is a literal
if (quotedGroups.length > 1 &&
quotedGroups[0] === '' &&
quotedGroups.length % 2 === 1) {
spec.name = quotedGroups[1];
tokens.name = "\"" + quotedGroups[1] + "\"";
_a = util_1.splitSpace(source.slice(tokens.name.length)), tokens.postName = _a[0], tokens.description = _a[1];
return spec;
}
var brackets = 0;
var name = '';
var optional = false;
var defaultValue;
// assume name is non-space string or anything wrapped into brackets
for (var _i = 0, source_2 = source; _i < source_2.length; _i++) {
var ch = source_2[_i];
if (brackets === 0 && util_1.isSpace(ch))
break;
if (ch === '[')
brackets++;
if (ch === ']')
brackets--;
name += ch;
}
if (brackets !== 0) {
spec.problems.push({
code: 'spec:name:unpaired-brackets',
message: 'unpaired brackets',
line: spec.source[0].number,
critical: true,
});
return spec;
}
var nameToken = name;
if (name[0] === '[' && name[name.length - 1] === ']') {
optional = true;
name = name.slice(1, -1);
var parts = name.split('=');
name = parts[0].trim();
defaultValue = (_c = parts[1]) === null || _c === void 0 ? void 0 : _c.trim();
if (name === '') {
spec.problems.push({
code: 'spec:name:empty-name',
message: 'empty name',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (parts.length > 2) {
spec.problems.push({
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (defaultValue === '') {
spec.problems.push({
code: 'spec:name:empty-default',
message: 'empty default value',
line: spec.source[0].number,
critical: true,
});
return spec;
}
}
spec.optional = optional;
spec.name = name;
tokens.name = nameToken;
if (defaultValue !== undefined)
spec.default = defaultValue;
_b = util_1.splitSpace(source.slice(tokens.name.length)), tokens.postName = _b[0], tokens.description = _b[1];
return spec;
};
}
exports.nameTokenizer = nameTokenizer;
function descriptionTokenizer(join) {
return function (spec) {
spec.description = join(spec.source);
return spec;
};
}
exports.descriptionTokenizer = descriptionTokenizer;

@@ -17,9 +17,4 @@ "use strict";

function getStringifier() {
return function (block) {
return block.source.map(function (_a) {
var tokens = _a.tokens;
return join(tokens);
}).join('\n');
};
return (block) => block.source.map(({ tokens }) => join(tokens)).join('\n');
}
exports.default = getStringifier;
"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __rest = (this && this.__rest) || function (s, e) {

@@ -25,5 +14,5 @@ var t = {};

Object.defineProperty(exports, "__esModule", { value: true });
var primitives_1 = require("../primitives");
var util_1 = require("../util");
var zeroWidth = {
const primitives_1 = require("../primitives");
const util_1 = require("../util");
const zeroWidth = {
start: 0,

@@ -34,11 +23,8 @@ tag: 0,

};
var getWidth = function (w, _a) {
var t = _a.tokens;
return ({
start: t.delimiter === primitives_1.Markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length),
});
};
const getWidth = (w, { tokens: t }) => ({
start: t.delimiter === primitives_1.Markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length),
});
// /**

@@ -52,11 +38,11 @@ // * Description may go

// */
var space = function (len) { return ''.padStart(len, ' '); };
const space = (len) => ''.padStart(len, ' ');
function align() {
var intoTags = false;
var w;
let intoTags = false;
let w;
function update(line) {
var tokens = __assign({}, line.tokens);
const tokens = Object.assign({}, line.tokens);
if (tokens.tag !== '')
intoTags = true;
var isEmpty = tokens.tag === '' &&
const isEmpty = tokens.tag === '' &&
tokens.name === '' &&

@@ -68,3 +54,3 @@ tokens.type === '' &&

tokens.start = space(w.start + 1);
return __assign(__assign({}, line), { tokens: tokens });
return Object.assign(Object.assign({}, line), { tokens });
}

@@ -87,10 +73,10 @@ switch (tokens.delimiter) {

}
return __assign(__assign({}, line), { tokens: tokens });
return Object.assign(Object.assign({}, line), { tokens });
}
return function (_a) {
var source = _a.source, fields = __rest(_a, ["source"]);
w = source.reduce(getWidth, __assign({}, zeroWidth));
return util_1.rewireSource(__assign(__assign({}, fields), { source: source.map(update) }));
return (_a) => {
var { source } = _a, fields = __rest(_a, ["source"]);
w = source.reduce(getWidth, Object.assign({}, zeroWidth));
return util_1.rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}
exports.default = align;
"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __rest = (this && this.__rest) || function (s, e) {

@@ -25,13 +14,13 @@ var t = {};

Object.defineProperty(exports, "__esModule", { value: true });
var util_1 = require("../util");
var pull = function (offset) { return function (str) { return str.slice(offset); }; };
var push = function (offset) {
var space = ''.padStart(offset, ' ');
return function (str) { return str + space; };
const util_1 = require("../util");
const pull = (offset) => (str) => str.slice(offset);
const push = (offset) => {
const space = ''.padStart(offset, ' ');
return (str) => str + space;
};
function indent(pos) {
var shift;
var pad = function (start) {
let shift;
const pad = (start) => {
if (shift === undefined) {
var offset = pos - start.length;
const offset = pos - start.length;
shift = offset > 0 ? push(offset) : pull(-offset);

@@ -41,8 +30,8 @@ }

};
var update = function (line) { return (__assign(__assign({}, line), { tokens: __assign(__assign({}, line.tokens), { start: pad(line.tokens.start) }) })); };
return function (_a) {
var source = _a.source, fields = __rest(_a, ["source"]);
return util_1.rewireSource(__assign(__assign({}, fields), { source: source.map(update) }));
const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));
return (_a) => {
var { source } = _a, fields = __rest(_a, ["source"]);
return util_1.rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}
exports.default = indent;
import { Block } from '../primitives';
export declare type Transform = (Block: any) => Block;
export { default as indent } from './indent';
export { default as align } from './align';
export declare function flow(...transforms: Transform[]): Transform;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.flow = exports.align = exports.indent = void 0;
var indent_1 = require("./indent");
Object.defineProperty(exports, "indent", { enumerable: true, get: function () { return indent_1.default; } });
var align_1 = require("./align");
Object.defineProperty(exports, "align", { enumerable: true, get: function () { return align_1.default; } });
function flow() {
var transforms = [];
for (var _i = 0; _i < arguments.length; _i++) {
transforms[_i] = arguments[_i];
}
return function (block) {
return transforms.reduce(function (block, t) { return t(block); }, block);
};
exports.flow = void 0;
function flow(...transforms) {
return (block) => transforms.reduce((block, t) => t(block), block);
}
exports.flow = flow;
"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -20,3 +9,3 @@ exports.rewireSpecs = exports.rewireSource = exports.seedTokens = exports.seedSpec = exports.seedBlock = exports.splitLines = exports.splitSpace = exports.isSpace = void 0;

function splitSpace(source) {
var matches = source.match(/^\s+/);
const matches = source.match(/^\s+/);
return matches == null

@@ -31,15 +20,12 @@ ? ['', source]

exports.splitLines = splitLines;
function seedBlock(block) {
if (block === void 0) { block = {}; }
return __assign({ description: '', tags: [], source: [], problems: [] }, block);
function seedBlock(block = {}) {
return Object.assign({ description: '', tags: [], source: [], problems: [] }, block);
}
exports.seedBlock = seedBlock;
function seedSpec(spec) {
if (spec === void 0) { spec = {}; }
return __assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
function seedSpec(spec = {}) {
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
}
exports.seedSpec = seedSpec;
function seedTokens(tokens) {
if (tokens === void 0) { tokens = {}; }
return __assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '' }, tokens);
function seedTokens(tokens = {}) {
return Object.assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '' }, tokens);
}

@@ -53,6 +39,5 @@ exports.seedTokens = seedTokens;

function rewireSource(block) {
var source = block.source.reduce(function (acc, line) { return acc.set(line.number, line); }, new Map());
for (var _i = 0, _a = block.tags; _i < _a.length; _i++) {
var spec = _a[_i];
spec.source = spec.source.map(function (line) { return source.get(line.number); });
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
for (const spec of block.tags) {
spec.source = spec.source.map((line) => source.get(line.number));
}

@@ -68,8 +53,6 @@ return block;

function rewireSpecs(block) {
var source = block.tags.reduce(function (acc, spec) {
return spec.source.reduce(function (acc, line) { return acc.set(line.number, line); }, acc);
}, new Map());
block.source = block.source.map(function (line) { return source.get(line.number) || line; });
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
block.source = block.source.map((line) => source.get(line.number) || line);
return block;
}
exports.rewireSpecs = rewireSpecs;
{
"name": "comment-parser",
"version": "1.0.1",
"version": "1.1.0",
"description": "Generic JSDoc-like comment parser",

@@ -5,0 +5,0 @@ "main": "lib/index.js",

@@ -32,4 +32,4 @@ # comment-parser

* over few lines followed by @tags
* @param {string} name name parameter
* @param {any} value value of any type
* @param {string} name the name parameter
* @param {any} value the value of any type
*/`

@@ -77,3 +77,10 @@

```
| ... | * | ... | @param | ... | {any} | ... | value | ... | the value of any type
|line|start|delimiter|postDelimiter|tag |postTag|name |postName|type |postType|description |end|
|----|-----|---------|-------------|------|-------|-----|--------|--------|--------|--------------------------------|---|
| 0|{2} |/** | | | | | | | | | |
| 1|{3} |* |{1} | | | | | | |Description may go | |
| 2|{3} |* |{1} | | | | | | |over few lines followed by @tags| |
| 3|{3} |* |{1} |@param|{1} |name |{1} |{string}|{1} |the name parameter | |
| 4|{3} |* |{1} |@param|{1} |value|{1} |{any} |{1} |the value of any type | |
| 5|{3} | | | | | | | | | |*/ |
```

@@ -157,4 +164,4 @@

fence: string;
// block and comment description compaction strategy, see Spacer
spacing: 'compact' | 'preserve' | Spacer;
// block and comment description compaction strategy
spacing: 'compact' | 'preserve';
// tokenizer functions extracting name, type, and description out of tag, see Tokenizer

@@ -161,0 +168,0 @@ tokenizers: Tokenizer[];

import getParser, { Options as ParserOptions } from './parser/index';
import descriptionTokenizer from './parser/tokenizers/description';
import nameTokenizer from './parser/tokenizers/name';
import tagTokenizer from './parser/tokenizers/tag';
import typeTokenizer from './parser/tokenizers/type';
import getStringifier from './stringifier/index';
import { Block } from './primitives';
import alignTransform from './transforms/align';
import indentTransform from './transforms/indent';
import { flow as flowTransform } from './transforms/index';

@@ -10,3 +16,15 @@ export function parse(source: string, options: Partial<ParserOptions> = {}) {

export const stringify = getStringifier();
export { default as inspect } from './stringifier/inspect';
export * as transforms from './transforms/index';
export const transforms = {
flow: flowTransform,
align: alignTransform,
indent: indentTransform,
};
export const tokenizers = {
tag: tagTokenizer,
type: typeTokenizer,
name: nameTokenizer,
description: descriptionTokenizer,
};

@@ -5,10 +5,28 @@ import { Line } from '../primitives';

/**
* Groups source lines in sections representing tags.
* First section is a block description if present. Last section captures lines starting with
* the last tag to the end of the block, including dangling closing marker.
* @param {Line[]} block souce lines making a single comment block
*/
export type Parser = (block: Line[]) => Line[][];
type Fencer = (source: string) => boolean;
/**
* Predicate telling if string contains opening/closing escaping sequence
* @param {string} source raw source line
*/
export type Fencer = (source: string) => boolean;
/**
* `Parser` configuration options
*/
export interface Options {
// escaping sequence or predicate
fence: string | Fencer;
}
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({

@@ -15,0 +33,0 @@ fence = '```',

@@ -1,14 +0,13 @@

import { Problem } from '../primitives';
import sourceParser, { Options as SourceOptions } from './source-parser';
import blockParser, { Options as BlockOptions } from './block-parser';
import specParser, {
Tokenizer,
tagTokenizer,
nameTokenizer,
typeTokenizer,
descriptionTokenizer,
} from './spec-parser';
import { Block, Line, Spec } from '../primitives';
import getSpacer, { Spacer } from './spacer';
import { Block, Line, Problem } from '../primitives';
import { splitLines } from '../util';
import blockParser from './block-parser';
import sourceParser from './source-parser';
import specParser from './spec-parser';
import { Tokenizer } from './tokenizers/index';
import tokenizeTag from './tokenizers/tag';
import tokenizeType from './tokenizers/type';
import tokenizeName from './tokenizers/name';
import tokenizeDescription, {
getJoiner as getDescriptionJoiner,
} from './tokenizers/description';

@@ -20,4 +19,4 @@ export interface Options {

fence: string;
// block and comment description compaction strategy, see Spacer
spacing: 'compact' | 'preserve' | Spacer;
// block and comment description compaction strategy
spacing: 'compact' | 'preserve';
// tokenizer functions extracting name, type, and description out of tag, see Tokenizer

@@ -32,6 +31,6 @@ tokenizers: Tokenizer[];

tokenizers = [
tagTokenizer(),
typeTokenizer(),
nameTokenizer(),
descriptionTokenizer(getSpacer(spacing)),
tokenizeTag(),
tokenizeType(spacing),
tokenizeName(),
tokenizeDescription(spacing),
],

@@ -44,3 +43,3 @@ }: Partial<Options> = {}) {

const parseSpec = specParser({ tokenizers });
const join = getSpacer(spacing);
const joinDescription = getDescriptionJoiner(spacing);

@@ -62,3 +61,3 @@ const notEmpty = (line: Line): boolean =>

blocks.push({
description: join(sections[0]),
description: joinDescription(sections[0]),
tags: specs,

@@ -65,0 +64,0 @@ source: lines,

@@ -1,3 +0,3 @@

import { splitSpace, seedTokens } from '../util';
import { Line, Tokens, Markers } from '../primitives';
import { Line, Markers, Tokens } from '../primitives';
import { seedTokens, splitSpace } from '../util';

@@ -4,0 +4,0 @@ export interface Options {

@@ -1,9 +0,7 @@

import { splitSpace, isSpace, seedSpec } from '../util';
import { Line, Spec } from '../primitives';
import { Spacer } from './spacer';
import { seedSpec } from '../util';
import { Tokenizer } from './tokenizers/index';
export type Parser = (source: Line[]) => Spec;
export type Tokenizer = (spec: Spec) => Spec;
export interface Options {

@@ -23,167 +21,1 @@ tokenizers: Tokenizer[];

}
export function tagTokenizer(): Tokenizer {
return (spec: Spec): Spec => {
const { tokens } = spec.source[0];
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
if (match === null) {
spec.problems.push({
code: 'spec:tag:prefix',
message: 'tag should start with "@" symbol',
line: spec.source[0].number,
critical: true,
});
return spec;
}
tokens.tag = match[1];
tokens.postTag = match[3];
tokens.description = tokens.description.slice(match[0].length);
spec.tag = match[2];
return spec;
};
}
export function typeTokenizer(): Tokenizer {
return (spec: Spec): Spec => {
let res = '';
let curlies = 0;
const { tokens } = spec.source[0];
const source = tokens.description.trimLeft();
if (source[0] !== '{') return spec;
for (const ch of source) {
if (ch === '{') curlies++;
if (ch === '}') curlies--;
res += ch;
if (curlies === 0) {
break;
}
}
if (curlies !== 0) {
spec.problems.push({
code: 'spec:type:unpaired-curlies',
message: 'unpaired curlies',
line: spec.source[0].number,
critical: true,
});
return spec;
}
spec.type = res.slice(1, -1);
tokens.type = res;
[tokens.postType, tokens.description] = splitSpace(
source.slice(tokens.type.length)
);
return spec;
};
}
export function nameTokenizer(): Tokenizer {
return (spec: Spec): Spec => {
const { tokens } = spec.source[0];
const source = tokens.description.trimLeft();
const quotedGroups = source.split('"');
// if it starts with quoted group, assume it is a literal
if (
quotedGroups.length > 1 &&
quotedGroups[0] === '' &&
quotedGroups.length % 2 === 1
) {
spec.name = quotedGroups[1];
tokens.name = `"${quotedGroups[1]}"`;
[tokens.postName, tokens.description] = splitSpace(
source.slice(tokens.name.length)
);
return spec;
}
let brackets = 0;
let name = '';
let optional = false;
let defaultValue;
// assume name is non-space string or anything wrapped into brackets
for (const ch of source) {
if (brackets === 0 && isSpace(ch)) break;
if (ch === '[') brackets++;
if (ch === ']') brackets--;
name += ch;
}
if (brackets !== 0) {
spec.problems.push({
code: 'spec:name:unpaired-brackets',
message: 'unpaired brackets',
line: spec.source[0].number,
critical: true,
});
return spec;
}
const nameToken = name;
if (name[0] === '[' && name[name.length - 1] === ']') {
optional = true;
name = name.slice(1, -1);
const parts = name.split('=');
name = parts[0].trim();
defaultValue = parts[1]?.trim();
if (name === '') {
spec.problems.push({
code: 'spec:name:empty-name',
message: 'empty name',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (parts.length > 2) {
spec.problems.push({
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (defaultValue === '') {
spec.problems.push({
code: 'spec:name:empty-default',
message: 'empty default value',
line: spec.source[0].number,
critical: true,
});
return spec;
}
}
spec.optional = optional;
spec.name = name;
tokens.name = nameToken;
if (defaultValue !== undefined) spec.default = defaultValue;
[tokens.postName, tokens.description] = splitSpace(
source.slice(tokens.name.length)
);
return spec;
};
}
export function descriptionTokenizer(join: Spacer): Tokenizer {
return (spec: Spec): Spec => {
spec.description = join(spec.source);
return spec;
};
}

@@ -5,5 +5,2 @@ import { Block } from '../primitives';

export { default as indent } from './indent';
export { default as align } from './align';
export function flow(...transforms: Transform[]): Transform {

@@ -10,0 +7,0 @@ return (block: Block): Block =>

@@ -63,2 +63,6 @@ // This file is a source for playground examples.

* with multiline description
* @param {function(
* number,
* string
* )} options the options
*/

@@ -68,4 +72,4 @@

const stringified = parsed[0].tags
.map((tag) => `@${tag.tag} - ${tag.description}`)
.join('\n');
.map((tag) => `@${tag.tag} - ${tag.description}\n\n${tag.type}`)
.join('\n----\n');
}

@@ -161,2 +165,34 @@

function parse_advanced_parsing(source, parse, _, _, tokenizers) {
// Each '@tag ...' section results into Spec. Spec is computed by
// the chain of tokenizers each contributing change to the Spec.* and Spec.tags[].tokens.
// Default parse() options come with stadart tokenizers
// {
// ...,
// spacing = 'compact',
// tokenizers = [
// tokenizers.tag(),
// tokenizers.type(spacing),
// tokenizers.name(),
// tokenizers.description(spacing),
// ]
// }
// You can reorder those, or even replace any with a custom function (spec: Spec) => Spec
// This example allows to parse "@tag description" comments
/**
* @arg0 my parameter
* @arg1
* another parameter
* with a strange formatting
*/
const parsed = parse(source, {
tokenizers: [tokenizers.tag(), tokenizers.description('preserve')],
});
const stringified = parsed[0].tags
.map((tag) => `@${tag.tag} - ${tag.description}`)
.join('\n');
}
(typeof window === 'undefined' ? module.exports : window).examples = [

@@ -168,3 +204,4 @@ parse_defaults,

parse_source_exploration,
parse_advanced_parsing,
stringify_formatting,
];

@@ -1,2 +0,2 @@

const { parse, stringify, transforms } = require('../../lib');
const { parse, stringify, transforms, tokenizers } = require('../../lib');
const { examples } = require('./examples');

@@ -8,3 +8,5 @@

const source = fn.toString();
expect(() => fn(source, parse, stringify, transforms)).not.toThrow();
expect(() =>
fn(source, parse, stringify, transforms, tokenizers)
).not.toThrow();
});

@@ -1,3 +0,6 @@

const { parse, stringify } = require('../../lib/');
const { flow, indent, align } = require('../../lib/transforms');
const {
parse,
stringify,
transforms: { flow, indent, align },
} = require('../../lib/');

@@ -4,0 +7,0 @@ test('align + indent', () => {

@@ -1,4 +0,3 @@

import { descriptionTokenizer } from '../../src/parser/spec-parser';
import { seedTokens, seedSpec } from '../../src/util';
import getSpacer from '../../src/parser/spacer';
import descriptionTokenizer from '../../src/parser/tokenizers/description';
import { seedSpec, seedTokens } from '../../src/util';

@@ -37,3 +36,3 @@ const sourceSingle = [

test('compact - single line', () => {
const tokenize = descriptionTokenizer(getSpacer('compact'));
const tokenize = descriptionTokenizer('compact');
const input = seedSpec({ source: sourceSingle });

@@ -45,3 +44,3 @@ const output = seedSpec({ source: sourceSingle, description: 'one two' });

test('compact - multiple lines', () => {
const tokenize = descriptionTokenizer(getSpacer('compact'));
const tokenize = descriptionTokenizer('compact');
const input = seedSpec({ source: sourceMultiple });

@@ -56,3 +55,3 @@ const output = seedSpec({

test('preserve - multiple lines', () => {
const tokenize = descriptionTokenizer(getSpacer('preserve'));
const tokenize = descriptionTokenizer('preserve');
const input = seedSpec({ source: sourceMultiple });

@@ -68,3 +67,3 @@ const output = seedSpec({

test('preserve - one-liner', () => {
const tokenize = descriptionTokenizer(getSpacer('preserve'));
const tokenize = descriptionTokenizer('preserve');
const input = seedSpec({

@@ -102,1 +101,95 @@ source: [

});
test('preserve - leading empty lines', () => {
const source = [
{
number: 1,
source: '...',
tokens: seedTokens({ delimiter: '/**' }),
},
{
number: 2,
source: '...',
tokens: seedTokens(),
},
{
number: 3,
source: '...',
tokens: seedTokens({ description: ' line 1 ' }),
},
{
number: 4,
source: '...',
tokens: seedTokens({ description: ' line 2 ' }),
},
{
number: 5,
source: '...',
tokens: seedTokens({ description: '' }),
},
];
const tokenize = descriptionTokenizer('preserve');
const input = seedSpec({ source });
const output = seedSpec({
source,
description: '\n line 1 \n line 2 \n',
});
expect(tokenize(input)).toEqual(output);
});
test('preserve - leading type lines', () => {
const source = [
{
number: 1,
source: '...',
tokens: seedTokens({ delimiter: '/**' }),
},
{
number: 2,
source: '...',
tokens: seedTokens(),
},
{
number: 3,
source: '...',
tokens: seedTokens({ type: '{function(' }),
},
{
number: 4,
source: '...',
tokens: seedTokens({ type: ' number' }),
},
{
number: 5,
source: '...',
tokens: seedTokens({
type: ')}',
postType: ' ',
description: 'line 1 ',
}),
},
{
number: 6,
source: '...',
tokens: seedTokens({ description: ' line 2 ' }),
},
{
number: 7,
source: '...',
tokens: seedTokens({ description: '' }),
},
];
const tokenize = descriptionTokenizer('preserve');
const input = seedSpec({ source });
const output = seedSpec({
source,
description: 'line 1 \n line 2 \n',
});
expect(tokenize(input)).toEqual(output);
});

@@ -1,2 +0,2 @@

import { nameTokenizer } from '../../src/parser/spec-parser';
import nameTokenizer from '../../src/parser/tokenizers/name';
import { seedTokens, seedSpec } from '../../src/util';

@@ -267,2 +267,37 @@

test('quoted default with =', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param="value=1"] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
optional: true,
default: '"value=1"',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[param="value=1"]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});
test('non-alphanumeric', () => {

@@ -626,1 +661,36 @@ expect(

});
test('default with arrow', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param = value => value] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
optional: true,
default: 'value => value',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[param = value => value]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});

@@ -1,9 +0,6 @@

import getParser, {
Tokenizer,
tagTokenizer,
typeTokenizer,
nameTokenizer,
descriptionTokenizer,
} from '../../src/parser/spec-parser';
import getSpacer from '../../src/parser/spacer';
import descriptionTokenizer from '../../src/parser/tokenizers/description';
import nameTokenizer from '../../src/parser/tokenizers/name';
import tagTokenizer from '../../src/parser/tokenizers/tag';
import typeTokenizer from '../../src/parser/tokenizers/type';
import getParser from '../../src/parser/spec-parser';
import { seedTokens, seedSpec } from '../../src/util';

@@ -17,3 +14,3 @@ import { Spec, Problem } from '../../src/primitives';

nameTokenizer(),
descriptionTokenizer(getSpacer('compact')),
descriptionTokenizer(),
],

@@ -20,0 +17,0 @@ });

@@ -1,2 +0,2 @@

import { tagTokenizer } from '../../src/parser/spec-parser';
import tagTokenizer from '../../src/parser/tokenizers/tag';
import { seedTokens, seedSpec } from '../../src/util';

@@ -3,0 +3,0 @@

@@ -1,2 +0,2 @@

import { typeTokenizer } from '../../src/parser/spec-parser';
import typeTokenizer from '../../src/parser/tokenizers/type';
import { seedTokens, seedSpec } from '../../src/util';

@@ -139,1 +139,135 @@

});
test('multiline - preserve', () => {
const spec = seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '{function(',
}),
},
{
number: 2,
source: '...',
tokens: seedTokens({
postDelimiter: ' ',
description: 'number',
}),
},
{
number: 3,
source: '...',
tokens: seedTokens({
description: ')} function type',
}),
},
{
number: 4,
source: '...',
tokens: seedTokens(),
},
{
number: 5,
source: '...',
tokens: seedTokens({
end: '*/',
}),
},
],
});
const tokenized = tokenize(spec);
const expected = seedSpec({
type: 'function(number)',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
type: '{function(',
}),
},
{
number: 2,
source: '...',
tokens: seedTokens({
type: ' number',
}),
},
{
number: 3,
source: '...',
tokens: seedTokens({
type: ')}',
postType: ' ',
description: 'function type',
}),
},
{
number: 4,
source: '...',
tokens: seedTokens(),
},
{
number: 5,
source: '...',
tokens: seedTokens({
end: '*/',
}),
},
],
});
expect(tokenized).toEqual(expected);
});
test.each([
['default', undefined, 'function(number,string)'],
['preserve', 'preserve', 'function(\n number,\n string\n)'],
['compact', 'compact', 'function(number,string)'],
])('spacing - %s', (name, spacing, type) => {
const tokenize =
spacing === 'preserve' || spacing === 'compact'
? typeTokenizer(spacing)
: typeTokenizer();
const spec = seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '{function(',
}),
},
{
number: 2,
source: '...',
tokens: seedTokens({
postDelimiter: ' ',
description: 'number,',
}),
},
{
number: 2,
source: '...',
tokens: seedTokens({
postDelimiter: ' ',
description: 'string',
}),
},
{
number: 3,
source: '...',
tokens: seedTokens({
description: ')} function type',
}),
},
],
});
const tokenized = tokenize(spec);
expect(tokenized.type).toEqual(type);
});

@@ -1,2 +0,1 @@

import { Markers, Tokens } from '../../lib/primitives';
import getStringifier from '../../src/stringifier';

@@ -3,0 +2,0 @@

@@ -1,2 +0,2 @@

import { align } from '../../src/transforms/index';
import align from '../../src/transforms/align';
import getParser from '../../src/parser/index';

@@ -3,0 +3,0 @@ import getStringifier from '../../src/stringifier/index';

{
"compilerOptions": {
"target": "es5",
"target": "es2015",
"module": "es2015",

@@ -5,0 +5,0 @@ "moduleResolution": "node",

{
"compilerOptions": {
"target": "es5",
"target": "es2015",
"module": "commonjs",

@@ -5,0 +5,0 @@ "moduleResolution": "node",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc