@lexical/text
Advanced tools
Comparing version 0.1.16 to 0.1.17
@@ -215,3 +215,155 @@ /** | ||
} | ||
function registerLexicalTextEntity(editor, getMatch, targetNode, createNode) { | ||
const isTargetNode = node => { | ||
return node instanceof targetNode; | ||
}; | ||
const replaceWithSimpleText = node => { | ||
const textNode = lexical.$createTextNode(node.getTextContent()); | ||
textNode.setFormat(node.getFormat()); | ||
node.replace(textNode); | ||
}; | ||
const getMode = node => { | ||
return node.getLatest().__mode; | ||
}; | ||
const textNodeTransform = node => { | ||
if (!node.isSimpleText()) { | ||
return; | ||
} | ||
const prevSibling = node.getPreviousSibling(); | ||
let text = node.getTextContent(); | ||
let currentNode = node; | ||
let match; | ||
if (lexical.$isTextNode(prevSibling)) { | ||
const previousText = prevSibling.getTextContent(); | ||
const combinedText = previousText + text; | ||
const prevMatch = getMatch(combinedText); | ||
if (isTargetNode(prevSibling)) { | ||
if (prevMatch === null || getMode(prevSibling) !== 0) { | ||
replaceWithSimpleText(prevSibling); | ||
return; | ||
} else { | ||
const diff = prevMatch.end - previousText.length; | ||
if (diff > 0) { | ||
const concatText = text.slice(0, diff); | ||
const newTextContent = previousText + concatText; | ||
prevSibling.select(); | ||
prevSibling.setTextContent(newTextContent); | ||
if (diff === text.length) { | ||
node.remove(); | ||
} else { | ||
const remainingText = text.slice(diff); | ||
node.setTextContent(remainingText); | ||
} | ||
return; | ||
} | ||
} | ||
} else if (prevMatch === null || prevMatch.start < previousText.length) { | ||
return; | ||
} | ||
} | ||
while (true) { | ||
match = getMatch(text); | ||
let nextText = match === null ? '' : text.slice(match.end); | ||
text = nextText; | ||
if (nextText === '') { | ||
const nextSibling = currentNode.getNextSibling(); | ||
if (lexical.$isTextNode(nextSibling)) { | ||
nextText = currentNode.getTextContent() + nextSibling.getTextContent(); | ||
const nextMatch = getMatch(nextText); | ||
if (nextMatch === null) { | ||
if (isTargetNode(nextSibling)) { | ||
replaceWithSimpleText(nextSibling); | ||
} else { | ||
nextSibling.markDirty(); | ||
} | ||
return; | ||
} else if (nextMatch.start !== 0) { | ||
return; | ||
} | ||
} | ||
} else { | ||
const nextMatch = getMatch(nextText); | ||
if (nextMatch !== null && nextMatch.start === 0) { | ||
return; | ||
} | ||
} | ||
if (match === null) { | ||
return; | ||
} | ||
if (match.start === 0 && lexical.$isTextNode(prevSibling) && prevSibling.isTextEntity()) { | ||
continue; | ||
} | ||
let nodeToReplace; | ||
if (match.start === 0) { | ||
[nodeToReplace, currentNode] = currentNode.splitText(match.end); | ||
} else { | ||
[, nodeToReplace, currentNode] = currentNode.splitText(match.start, match.end); | ||
} | ||
const replacementNode = createNode(nodeToReplace); | ||
nodeToReplace.replace(replacementNode); | ||
if (currentNode == null) { | ||
return; | ||
} | ||
} | ||
}; | ||
const reverseNodeTransform = node => { | ||
const text = node.getTextContent(); | ||
const match = getMatch(text); | ||
if (match === null || match.start !== 0) { | ||
replaceWithSimpleText(node); | ||
return; | ||
} | ||
if (text.length > match.end) { | ||
// This will split out the rest of the text as simple text | ||
node.splitText(match.end); | ||
return; | ||
} | ||
const prevSibling = node.getPreviousSibling(); | ||
if (lexical.$isTextNode(prevSibling) && prevSibling.isTextEntity()) { | ||
replaceWithSimpleText(prevSibling); | ||
replaceWithSimpleText(node); | ||
} | ||
const nextSibling = node.getNextSibling(); | ||
if (lexical.$isTextNode(nextSibling) && nextSibling.isTextEntity()) { | ||
replaceWithSimpleText(nextSibling); // This may have already been converted in the previous block | ||
if (isTargetNode(node)) { | ||
replaceWithSimpleText(node); | ||
} | ||
} | ||
}; | ||
const removePlainTextTransform = editor.registerNodeTransform(lexical.TextNode, textNodeTransform); | ||
const removeReverseNodeTransform = editor.registerNodeTransform(targetNode, reverseNodeTransform); | ||
return [removePlainTextTransform, removeReverseNodeTransform]; | ||
} | ||
exports.$canShowPlaceholder = $canShowPlaceholder; | ||
@@ -225,1 +377,2 @@ exports.$canShowPlaceholderCurry = $canShowPlaceholderCurry; | ||
exports.$rootTextContentCurry = $rootTextContentCurry; | ||
exports.registerLexicalTextEntity = registerLexicalTextEntity; |
@@ -7,6 +7,9 @@ /** | ||
*/ | ||
var f=require("lexical");function m(a,e=!0){if(a)return!1;a=p();e&&(a=a.trim());return""===a}function p(){return f.$getRoot().getTextContent()}function q(a){if(!m(a,!1))return!1;a=f.$getRoot().getChildren();const e=a.length;if(1<e)return!1;for(let c=0;c<e;c++){var b=a[c];if(f.$isElementNode(b)){if("paragraph"!==b.__type||0!==b.__indent)return!1;b=b.getChildren();const k=b.length;for(let d=0;d<k;d++)if(!f.$isTextNode(b[c]))return!1}}return!0}exports.$canShowPlaceholder=q; | ||
exports.$canShowPlaceholderCurry=function(a){return()=>q(a)};exports.$findNodeWithOffsetFromJoinedText=function(a,e,b,c){c=c.getChildren();const k=c.length;let d=0,g=!1;for(let n=0;n<k&&!(d>e);++n){const l=c[n],r=f.$isTextNode(l);var h=r?l.getTextContent().length:b;h=d+h;if((!1===g&&d===a||0===d&&d===a||d<a&&a<=h)&&f.$isTextNode(l))return{node:l,offset:a-d};d=h;g=r}return null}; | ||
exports.$findTextIntersectionFromCharacters=function(a,e){var b=a.getFirstChild();a=0;a:for(;null!==b;){if(f.$isElementNode(b)){var c=b.getFirstChild();if(null!==c){b=c;continue}}else if(f.$isTextNode(b)){c=b.getTextContentSize();if(a+c>e)return{node:b,offset:e-a};a+=c}c=b.getNextSibling();if(null!==c)b=c;else{for(b=b.getParent();null!==b;){c=b.getNextSibling();if(null!==c){b=c;continue a}b=b.getParent()}break}}return null};exports.$isRootTextContentEmpty=m; | ||
exports.$isRootTextContentEmptyCurry=function(a,e){return()=>m(a,e)}; | ||
exports.$joinTextNodesInElementNode=function(a,e,b){let c="";a=a.getChildren();const k=a.length;for(let d=0;d<k;++d){const g=a[d];if(f.$isTextNode(g)){const h=g.getTextContent();if(g.is(b.node)){if(b.offset>h.length)throw Error("Minified Lexical error #50; see codes.json for the full message or use the non-minified dev environment for full errors and additional helpful warnings.");c+=g.getTextContent().substr(0,b.offset);break}else c+=h}else c+=e}return c};exports.$rootTextContentCurry=p; | ||
var k=require("lexical");function r(b,f=!0){if(b)return!1;b=t();f&&(b=b.trim());return""===b}function t(){return k.$getRoot().getTextContent()}function u(b){if(!r(b,!1))return!1;b=k.$getRoot().getChildren();const f=b.length;if(1<f)return!1;for(let e=0;e<f;e++){var c=b[e];if(k.$isElementNode(c)){if("paragraph"!==c.__type||0!==c.__indent)return!1;c=c.getChildren();const p=c.length;for(let g=0;g<p;g++)if(!k.$isTextNode(c[e]))return!1}}return!0}exports.$canShowPlaceholder=u; | ||
exports.$canShowPlaceholderCurry=function(b){return()=>u(b)};exports.$findNodeWithOffsetFromJoinedText=function(b,f,c,e){e=e.getChildren();const p=e.length;let g=0,a=!1;for(let l=0;l<p&&!(g>f);++l){const m=e[l],n=k.$isTextNode(m);var d=n?m.getTextContent().length:c;d=g+d;if((!1===a&&g===b||0===g&&g===b||g<b&&b<=d)&&k.$isTextNode(m))return{node:m,offset:b-g};g=d;a=n}return null}; | ||
exports.$findTextIntersectionFromCharacters=function(b,f){var c=b.getFirstChild();b=0;a:for(;null!==c;){if(k.$isElementNode(c)){var e=c.getFirstChild();if(null!==e){c=e;continue}}else if(k.$isTextNode(c)){e=c.getTextContentSize();if(b+e>f)return{node:c,offset:f-b};b+=e}e=c.getNextSibling();if(null!==e)c=e;else{for(c=c.getParent();null!==c;){e=c.getNextSibling();if(null!==e){c=e;continue a}c=c.getParent()}break}}return null};exports.$isRootTextContentEmpty=r; | ||
exports.$isRootTextContentEmptyCurry=function(b,f){return()=>r(b,f)}; | ||
exports.$joinTextNodesInElementNode=function(b,f,c){let e="";b=b.getChildren();const p=b.length;for(let g=0;g<p;++g){const a=b[g];if(k.$isTextNode(a)){const d=a.getTextContent();if(a.is(c.node)){if(c.offset>d.length)throw Error("Minified Lexical error #50; see codes.json for the full message or use the non-minified dev environment for full errors and additional helpful warnings.");e+=a.getTextContent().substr(0,c.offset);break}else e+=d}else e+=f}return e};exports.$rootTextContentCurry=t; | ||
exports.registerLexicalTextEntity=function(b,f,c,e){const p=a=>{const d=k.$createTextNode(a.getTextContent());d.setFormat(a.getFormat());a.replace(d)},g=b.registerNodeTransform(k.TextNode,a=>{if(a.isSimpleText()){var d=a.getPreviousSibling(),l=a.getTextContent(),m=a;if(k.$isTextNode(d)){var n=d.getTextContent(),h=f(n+l);if(d instanceof c){if(null===h||0!==d.getLatest().__mode){p(d);return}h=h.end-n.length;if(0<h){m=l.slice(0,h);m=n+m;d.select();d.setTextContent(m);h===l.length?a.remove():(d=l.slice(h), | ||
a.setTextContent(d));return}}else if(null===h||h.start<n.length)return}for(;;){a=f(l);l=h=null===a?"":l.slice(a.end);if(""===h){if(n=m.getNextSibling(),k.$isTextNode(n))if(h=m.getTextContent()+n.getTextContent(),h=f(h),null===h){n instanceof c?p(n):n.markDirty();break}else if(0!==h.start)break}else if(n=f(h),null!==n&&0===n.start)break;if(null===a)break;if(0===a.start&&k.$isTextNode(d)&&d.isTextEntity())continue;let q;0===a.start?[q,m]=m.splitText(a.end):[,q,m]=m.splitText(a.start,a.end);a=e(q);q.replace(a); | ||
if(null==m)break}}});b=b.registerNodeTransform(c,a=>{var d=a.getTextContent();const l=f(d);null===l||0!==l.start?p(a):d.length>l.end?a.splitText(l.end):(d=a.getPreviousSibling(),k.$isTextNode(d)&&d.isTextEntity()&&(p(d),p(a)),d=a.getNextSibling(),k.$isTextNode(d)&&d.isTextEntity()&&(p(d),a instanceof c&&p(a)))});return[g,b]}; |
{ | ||
"name": "@lexical/text", | ||
"author": { | ||
"name": "Dominic Gannaway", | ||
"email": "dg@domgan.com" | ||
}, | ||
"description": "This package contains utilities and helpers for handling Lexical text.", | ||
@@ -16,6 +12,6 @@ "keywords": [ | ||
"license": "MIT", | ||
"version": "0.1.16", | ||
"version": "0.1.17", | ||
"main": "LexicalText.js", | ||
"peerDependencies": { | ||
"lexical": "0.1.16" | ||
"lexical": "0.1.17" | ||
}, | ||
@@ -22,0 +18,0 @@ "repository": { |
Sorry, the diff of this file is not supported yet
No contributors or author data
MaintenancePackage does not specify a list of contributors or an author in package.json.
Found 1 instance in 1 package
19656
8
389
2