markdown-it-ins
Advanced tools
Comparing version 1.0.0 to 2.0.0
@@ -0,1 +1,7 @@ | ||
2.0.0 / 2015-10-05 | ||
------------------ | ||
- Markdown-it 5.0.0 support. Use 1.x version for 4.x. | ||
1.0.0 / 2015-03-12 | ||
@@ -2,0 +8,0 @@ ------------------ |
@@ -1,143 +0,121 @@ | ||
/*! markdown-it-ins 1.0.0 https://github.com//markdown-it/markdown-it-ins @license MIT */(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.markdownitIns = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ | ||
/*! markdown-it-ins 2.0.0 https://github.com//markdown-it/markdown-it-ins @license MIT */(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.markdownitIns = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ | ||
'use strict'; | ||
// parse sequence of markers, | ||
// "start" should point at a valid marker | ||
function scanDelims(state, start) { | ||
var pos = start, lastChar, nextChar, count, | ||
isLastWhiteSpace, isLastPunctChar, | ||
isNextWhiteSpace, isNextPunctChar, | ||
can_open = true, | ||
can_close = true, | ||
max = state.posMax, | ||
marker = state.src.charCodeAt(start), | ||
isWhiteSpace = state.md.utils.isWhiteSpace, | ||
isPunctChar = state.md.utils.isPunctChar, | ||
isMdAsciiPunct = state.md.utils.isMdAsciiPunct; | ||
module.exports = function ins_plugin(md) { | ||
// Insert each marker as a separate text token, and add it to delimiter list | ||
// | ||
function tokenize(state, silent) { | ||
var i, scanned, token, len, ch, | ||
start = state.pos, | ||
marker = state.src.charCodeAt(start); | ||
// treat beginning of the line as a whitespace | ||
lastChar = start > 0 ? state.src.charCodeAt(start - 1) : 0x20; | ||
if (silent) { return false; } | ||
while (pos < max && state.src.charCodeAt(pos) === marker) { pos++; } | ||
if (marker !== 0x2B/* + */) { return false; } | ||
if (pos >= max) { | ||
can_open = false; | ||
} | ||
scanned = state.scanDelims(state.pos, true); | ||
len = scanned.length; | ||
ch = String.fromCharCode(marker); | ||
count = pos - start; | ||
if (len < 2) { return false; } | ||
// treat end of the line as a whitespace | ||
nextChar = pos < max ? state.src.charCodeAt(pos) : 0x20; | ||
if (len % 2) { | ||
token = state.push('text', '', 0); | ||
token.content = ch; | ||
len--; | ||
} | ||
isLastPunctChar = isMdAsciiPunct(lastChar) || isPunctChar(String.fromCharCode(lastChar)); | ||
isNextPunctChar = isMdAsciiPunct(nextChar) || isPunctChar(String.fromCharCode(nextChar)); | ||
for (i = 0; i < len; i += 2) { | ||
token = state.push('text', '', 0); | ||
token.content = ch + ch; | ||
isLastWhiteSpace = isWhiteSpace(lastChar); | ||
isNextWhiteSpace = isWhiteSpace(nextChar); | ||
if (isNextWhiteSpace) { | ||
can_open = false; | ||
} else if (isNextPunctChar) { | ||
if (!(isLastWhiteSpace || isLastPunctChar)) { | ||
can_open = false; | ||
state.delimiters.push({ | ||
marker: marker, | ||
jump: i, | ||
token: state.tokens.length - 1, | ||
level: state.level, | ||
end: -1, | ||
open: scanned.can_open, | ||
close: scanned.can_close | ||
}); | ||
} | ||
} | ||
if (isLastWhiteSpace) { | ||
can_close = false; | ||
} else if (isLastPunctChar) { | ||
if (!(isNextWhiteSpace || isNextPunctChar)) { | ||
can_close = false; | ||
} | ||
state.pos += scanned.length; | ||
return true; | ||
} | ||
return { | ||
can_open: can_open, | ||
can_close: can_close, | ||
delims: count | ||
}; | ||
} | ||
// Walk through delimiter list and replace text tokens with tags | ||
// | ||
function postProcess(state) { | ||
var i, j, | ||
startDelim, | ||
endDelim, | ||
token, | ||
loneMarkers = [], | ||
delimiters = state.delimiters, | ||
max = state.delimiters.length; | ||
function insert(state, silent) { | ||
var startCount, | ||
count, | ||
tagCount, | ||
found, | ||
stack, | ||
res, | ||
token, | ||
max = state.posMax, | ||
start = state.pos, | ||
marker = state.src.charCodeAt(start); | ||
for (i = 0; i < max; i++) { | ||
startDelim = delimiters[i]; | ||
if (marker !== 0x2B/* + */) { return false; } | ||
if (silent) { return false; } // don't run any pairs in validation mode | ||
if (startDelim.marker !== 0x2B/* + */) { | ||
continue; | ||
} | ||
res = scanDelims(state, start); | ||
startCount = res.delims; | ||
if (!res.can_open) { | ||
state.pos += startCount; | ||
// Earlier we checked !silent, but this implementation does not need it | ||
state.pending += state.src.slice(start, state.pos); | ||
return true; | ||
} | ||
stack = Math.floor(startCount / 2); | ||
if (stack <= 0) { return false; } | ||
state.pos = start + startCount; | ||
while (state.pos < max) { | ||
if (state.src.charCodeAt(state.pos) === marker) { | ||
res = scanDelims(state, state.pos); | ||
count = res.delims; | ||
tagCount = Math.floor(count / 2); | ||
if (res.can_close) { | ||
if (tagCount >= stack) { | ||
state.pos += count - 2; | ||
found = true; | ||
break; | ||
} | ||
stack -= tagCount; | ||
state.pos += count; | ||
if (startDelim.end === -1) { | ||
continue; | ||
} | ||
if (res.can_open) { stack += tagCount; } | ||
state.pos += count; | ||
continue; | ||
} | ||
endDelim = delimiters[startDelim.end]; | ||
state.md.inline.skipToken(state); | ||
} | ||
token = state.tokens[startDelim.token]; | ||
token.type = 'ins_open'; | ||
token.tag = 'ins'; | ||
token.nesting = 1; | ||
token.markup = '++'; | ||
token.content = ''; | ||
if (!found) { | ||
// parser failed to find ending tag, so it's not valid emphasis | ||
state.pos = start; | ||
return false; | ||
} | ||
token = state.tokens[endDelim.token]; | ||
token.type = 'ins_close'; | ||
token.tag = 'ins'; | ||
token.nesting = -1; | ||
token.markup = '++'; | ||
token.content = ''; | ||
// found! | ||
state.posMax = state.pos; | ||
state.pos = start + 2; | ||
if (state.tokens[endDelim.token - 1].type === 'text' && | ||
state.tokens[endDelim.token - 1].content === '+') { | ||
// Earlier we checked !silent, but this implementation does not need it | ||
token = state.push('ins_open', 'ins', 1); | ||
token.markup = String.fromCharCode(marker) + String.fromCharCode(marker); | ||
loneMarkers.push(endDelim.token - 1); | ||
} | ||
} | ||
state.md.inline.tokenize(state); | ||
// If a marker sequence has an odd number of characters, it's splitted | ||
// like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the | ||
// start of the sequence. | ||
// | ||
// So, we have to move all those markers after subsequent s_close tags. | ||
// | ||
while (loneMarkers.length) { | ||
i = loneMarkers.pop(); | ||
j = i + 1; | ||
token = state.push('ins_close', 'ins', -1); | ||
token.markup = String.fromCharCode(marker) + String.fromCharCode(marker); | ||
while (j < state.tokens.length && state.tokens[j].type === 'ins_close') { | ||
j++; | ||
} | ||
state.pos = state.posMax + 2; | ||
state.posMax = max; | ||
return true; | ||
} | ||
j--; | ||
if (i !== j) { | ||
token = state.tokens[j]; | ||
state.tokens[j] = state.tokens[i]; | ||
state.tokens[i] = token; | ||
} | ||
} | ||
} | ||
module.exports = function ins_plugin(md) { | ||
md.inline.ruler.before('emphasis', 'ins', insert); | ||
md.inline.ruler.before('emphasis', 'ins', tokenize); | ||
md.inline.ruler2.before('emphasis', 'ins', postProcess); | ||
}; | ||
@@ -144,0 +122,0 @@ |
@@ -1,2 +0,2 @@ | ||
/*! markdown-it-ins 1.0.0 https://github.com//markdown-it/markdown-it-ins @license MIT */ | ||
!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var r;r="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,r.markdownitIns=e()}}(function(){return function e(r,o,n){function i(s,f){if(!o[s]){if(!r[s]){var u="function"==typeof require&&require;if(!f&&u)return u(s,!0);if(t)return t(s,!0);var p=new Error("Cannot find module '"+s+"'");throw p.code="MODULE_NOT_FOUND",p}var a=o[s]={exports:{}};r[s][0].call(a.exports,function(e){var o=r[s][1][e];return i(o?o:e)},a,a.exports,e,r,o,n)}return o[s].exports}for(var t="function"==typeof require&&require,s=0;s<n.length;s++)i(n[s]);return i}({1:[function(e,r){"use strict";function o(e,r){var o,n,i,t,s,f,u,p=r,a=!0,c=!0,d=e.posMax,l=e.src.charCodeAt(r),h=e.md.utils.isWhiteSpace,m=e.md.utils.isPunctChar,C=e.md.utils.isMdAsciiPunct;for(o=r>0?e.src.charCodeAt(r-1):32;d>p&&e.src.charCodeAt(p)===l;)p++;return p>=d&&(a=!1),i=p-r,n=d>p?e.src.charCodeAt(p):32,s=C(o)||m(String.fromCharCode(o)),u=C(n)||m(String.fromCharCode(n)),t=h(o),f=h(n),f?a=!1:u&&(t||s||(a=!1)),t?c=!1:s&&(f||u||(c=!1)),{can_open:a,can_close:c,delims:i}}function n(e,r){var n,i,t,s,f,u,p,a=e.posMax,c=e.pos,d=e.src.charCodeAt(c);if(43!==d)return!1;if(r)return!1;if(u=o(e,c),n=u.delims,!u.can_open)return e.pos+=n,e.pending+=e.src.slice(c,e.pos),!0;if(f=Math.floor(n/2),0>=f)return!1;for(e.pos=c+n;e.pos<a;)if(e.src.charCodeAt(e.pos)!==d)e.md.inline.skipToken(e);else{if(u=o(e,e.pos),i=u.delims,t=Math.floor(i/2),u.can_close){if(t>=f){e.pos+=i-2,s=!0;break}f-=t,e.pos+=i;continue}u.can_open&&(f+=t),e.pos+=i}return s?(e.posMax=e.pos,e.pos=c+2,p=e.push("ins_open","ins",1),p.markup=String.fromCharCode(d)+String.fromCharCode(d),e.md.inline.tokenize(e),p=e.push("ins_close","ins",-1),p.markup=String.fromCharCode(d)+String.fromCharCode(d),e.pos=e.posMax+2,e.posMax=a,!0):(e.pos=c,!1)}r.exports=function(e){e.inline.ruler.before("emphasis","ins",n)}},{}]},{},[1])(1)}); | ||
/*! markdown-it-ins 2.0.0 https://github.com//markdown-it/markdown-it-ins @license MIT */ | ||
!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var n;n="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,n.markdownitIns=e()}}(function(){return function e(n,t,o){function r(s,f){if(!t[s]){if(!n[s]){var u="function"==typeof require&&require;if(!f&&u)return u(s,!0);if(i)return i(s,!0);var l=new Error("Cannot find module '"+s+"'");throw l.code="MODULE_NOT_FOUND",l}var p=t[s]={exports:{}};n[s][0].call(p.exports,function(e){var t=n[s][1][e];return r(t?t:e)},p,p.exports,e,n,t,o)}return t[s].exports}for(var i="function"==typeof require&&require,s=0;s<o.length;s++)r(o[s]);return r}({1:[function(e,n,t){"use strict";n.exports=function(e){function n(e,n){var t,o,r,i,s,f=e.pos,u=e.src.charCodeAt(f);if(n)return!1;if(43!==u)return!1;if(o=e.scanDelims(e.pos,!0),i=o.length,s=String.fromCharCode(u),2>i)return!1;for(i%2&&(r=e.push("text","",0),r.content=s,i--),t=0;i>t;t+=2)r=e.push("text","",0),r.content=s+s,e.delimiters.push({marker:u,jump:t,token:e.tokens.length-1,level:e.level,end:-1,open:o.can_open,close:o.can_close});return e.pos+=o.length,!0}function t(e){var n,t,o,r,i,s=[],f=e.delimiters,u=e.delimiters.length;for(n=0;u>n;n++)o=f[n],43===o.marker&&-1!==o.end&&(r=f[o.end],i=e.tokens[o.token],i.type="ins_open",i.tag="ins",i.nesting=1,i.markup="++",i.content="",i=e.tokens[r.token],i.type="ins_close",i.tag="ins",i.nesting=-1,i.markup="++",i.content="","text"===e.tokens[r.token-1].type&&"+"===e.tokens[r.token-1].content&&s.push(r.token-1));for(;s.length;){for(n=s.pop(),t=n+1;t<e.tokens.length&&"ins_close"===e.tokens[t].type;)t++;t--,n!==t&&(i=e.tokens[t],e.tokens[t]=e.tokens[n],e.tokens[n]=i)}}e.inline.ruler.before("emphasis","ins",n),e.inline.ruler2.before("emphasis","ins",t)}},{}]},{},[1])(1)}); |
200
index.js
'use strict'; | ||
// parse sequence of markers, | ||
// "start" should point at a valid marker | ||
function scanDelims(state, start) { | ||
var pos = start, lastChar, nextChar, count, | ||
isLastWhiteSpace, isLastPunctChar, | ||
isNextWhiteSpace, isNextPunctChar, | ||
can_open = true, | ||
can_close = true, | ||
max = state.posMax, | ||
marker = state.src.charCodeAt(start), | ||
isWhiteSpace = state.md.utils.isWhiteSpace, | ||
isPunctChar = state.md.utils.isPunctChar, | ||
isMdAsciiPunct = state.md.utils.isMdAsciiPunct; | ||
module.exports = function ins_plugin(md) { | ||
// Insert each marker as a separate text token, and add it to delimiter list | ||
// | ||
function tokenize(state, silent) { | ||
var i, scanned, token, len, ch, | ||
start = state.pos, | ||
marker = state.src.charCodeAt(start); | ||
// treat beginning of the line as a whitespace | ||
lastChar = start > 0 ? state.src.charCodeAt(start - 1) : 0x20; | ||
if (silent) { return false; } | ||
while (pos < max && state.src.charCodeAt(pos) === marker) { pos++; } | ||
if (marker !== 0x2B/* + */) { return false; } | ||
if (pos >= max) { | ||
can_open = false; | ||
} | ||
scanned = state.scanDelims(state.pos, true); | ||
len = scanned.length; | ||
ch = String.fromCharCode(marker); | ||
count = pos - start; | ||
if (len < 2) { return false; } | ||
// treat end of the line as a whitespace | ||
nextChar = pos < max ? state.src.charCodeAt(pos) : 0x20; | ||
if (len % 2) { | ||
token = state.push('text', '', 0); | ||
token.content = ch; | ||
len--; | ||
} | ||
isLastPunctChar = isMdAsciiPunct(lastChar) || isPunctChar(String.fromCharCode(lastChar)); | ||
isNextPunctChar = isMdAsciiPunct(nextChar) || isPunctChar(String.fromCharCode(nextChar)); | ||
for (i = 0; i < len; i += 2) { | ||
token = state.push('text', '', 0); | ||
token.content = ch + ch; | ||
isLastWhiteSpace = isWhiteSpace(lastChar); | ||
isNextWhiteSpace = isWhiteSpace(nextChar); | ||
if (isNextWhiteSpace) { | ||
can_open = false; | ||
} else if (isNextPunctChar) { | ||
if (!(isLastWhiteSpace || isLastPunctChar)) { | ||
can_open = false; | ||
state.delimiters.push({ | ||
marker: marker, | ||
jump: i, | ||
token: state.tokens.length - 1, | ||
level: state.level, | ||
end: -1, | ||
open: scanned.can_open, | ||
close: scanned.can_close | ||
}); | ||
} | ||
} | ||
if (isLastWhiteSpace) { | ||
can_close = false; | ||
} else if (isLastPunctChar) { | ||
if (!(isNextWhiteSpace || isNextPunctChar)) { | ||
can_close = false; | ||
} | ||
state.pos += scanned.length; | ||
return true; | ||
} | ||
return { | ||
can_open: can_open, | ||
can_close: can_close, | ||
delims: count | ||
}; | ||
} | ||
// Walk through delimiter list and replace text tokens with tags | ||
// | ||
function postProcess(state) { | ||
var i, j, | ||
startDelim, | ||
endDelim, | ||
token, | ||
loneMarkers = [], | ||
delimiters = state.delimiters, | ||
max = state.delimiters.length; | ||
function insert(state, silent) { | ||
var startCount, | ||
count, | ||
tagCount, | ||
found, | ||
stack, | ||
res, | ||
token, | ||
max = state.posMax, | ||
start = state.pos, | ||
marker = state.src.charCodeAt(start); | ||
for (i = 0; i < max; i++) { | ||
startDelim = delimiters[i]; | ||
if (marker !== 0x2B/* + */) { return false; } | ||
if (silent) { return false; } // don't run any pairs in validation mode | ||
if (startDelim.marker !== 0x2B/* + */) { | ||
continue; | ||
} | ||
res = scanDelims(state, start); | ||
startCount = res.delims; | ||
if (!res.can_open) { | ||
state.pos += startCount; | ||
// Earlier we checked !silent, but this implementation does not need it | ||
state.pending += state.src.slice(start, state.pos); | ||
return true; | ||
} | ||
stack = Math.floor(startCount / 2); | ||
if (stack <= 0) { return false; } | ||
state.pos = start + startCount; | ||
while (state.pos < max) { | ||
if (state.src.charCodeAt(state.pos) === marker) { | ||
res = scanDelims(state, state.pos); | ||
count = res.delims; | ||
tagCount = Math.floor(count / 2); | ||
if (res.can_close) { | ||
if (tagCount >= stack) { | ||
state.pos += count - 2; | ||
found = true; | ||
break; | ||
} | ||
stack -= tagCount; | ||
state.pos += count; | ||
if (startDelim.end === -1) { | ||
continue; | ||
} | ||
if (res.can_open) { stack += tagCount; } | ||
state.pos += count; | ||
continue; | ||
} | ||
endDelim = delimiters[startDelim.end]; | ||
state.md.inline.skipToken(state); | ||
} | ||
token = state.tokens[startDelim.token]; | ||
token.type = 'ins_open'; | ||
token.tag = 'ins'; | ||
token.nesting = 1; | ||
token.markup = '++'; | ||
token.content = ''; | ||
if (!found) { | ||
// parser failed to find ending tag, so it's not valid emphasis | ||
state.pos = start; | ||
return false; | ||
} | ||
token = state.tokens[endDelim.token]; | ||
token.type = 'ins_close'; | ||
token.tag = 'ins'; | ||
token.nesting = -1; | ||
token.markup = '++'; | ||
token.content = ''; | ||
// found! | ||
state.posMax = state.pos; | ||
state.pos = start + 2; | ||
if (state.tokens[endDelim.token - 1].type === 'text' && | ||
state.tokens[endDelim.token - 1].content === '+') { | ||
// Earlier we checked !silent, but this implementation does not need it | ||
token = state.push('ins_open', 'ins', 1); | ||
token.markup = String.fromCharCode(marker) + String.fromCharCode(marker); | ||
loneMarkers.push(endDelim.token - 1); | ||
} | ||
} | ||
state.md.inline.tokenize(state); | ||
// If a marker sequence has an odd number of characters, it's splitted | ||
// like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the | ||
// start of the sequence. | ||
// | ||
// So, we have to move all those markers after subsequent s_close tags. | ||
// | ||
while (loneMarkers.length) { | ||
i = loneMarkers.pop(); | ||
j = i + 1; | ||
token = state.push('ins_close', 'ins', -1); | ||
token.markup = String.fromCharCode(marker) + String.fromCharCode(marker); | ||
while (j < state.tokens.length && state.tokens[j].type === 'ins_close') { | ||
j++; | ||
} | ||
state.pos = state.posMax + 2; | ||
state.posMax = max; | ||
return true; | ||
} | ||
j--; | ||
if (i !== j) { | ||
token = state.tokens[j]; | ||
state.tokens[j] = state.tokens[i]; | ||
state.tokens[i] = token; | ||
} | ||
} | ||
} | ||
module.exports = function ins_plugin(md) { | ||
md.inline.ruler.before('emphasis', 'ins', insert); | ||
md.inline.ruler.before('emphasis', 'ins', tokenize); | ||
md.inline.ruler2.before('emphasis', 'ins', postProcess); | ||
}; |
{ | ||
"name": "markdown-it-ins", | ||
"version": "1.0.0", | ||
"version": "2.0.0", | ||
"description": "<ins> tag for markdown-it markdown parser.", | ||
@@ -32,3 +32,3 @@ "keywords": [ | ||
"lodash": "*", | ||
"markdown-it": "4.0.0", | ||
"markdown-it": "markdown-it/markdown-it", | ||
"markdown-it-testgen": "~0.1.0", | ||
@@ -35,0 +35,0 @@ "mocha": "*", |
@@ -9,3 +9,3 @@ # markdown-it-ins | ||
__v1.+ requires `markdown-it` v4.+, see changelog.__ | ||
__v2.+ requires `markdown-it` v5.+, see changelog.__ | ||
@@ -12,0 +12,0 @@ `++inserted++` => `<ins>inserted</ins>` |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
12755
230
1