@lexical/text
Advanced tools
Comparing version 0.4.1 to 0.5.0
@@ -9,3 +9,3 @@ /** @module @lexical/text */ | ||
*/ | ||
import type { ElementNode, Klass, LexicalEditor, RootNode } from 'lexical'; | ||
import type { Klass, LexicalEditor, RootNode } from 'lexical'; | ||
import { TextNode } from 'lexical'; | ||
@@ -20,9 +20,7 @@ export declare type TextNodeWithOffset = { | ||
}; | ||
export declare function $joinTextNodesInElementNode(elementNode: ElementNode, separator: string, stopAt: TextNodeWithOffset): string; | ||
export declare function $findNodeWithOffsetFromJoinedText(offsetInJoinedText: number, joinedTextLength: number, separatorLength: number, elementNode: ElementNode): TextNodeWithOffset | null; | ||
export declare function $isRootTextContentEmpty(isEditorComposing: boolean, trim?: boolean): boolean; | ||
export declare function $isRootTextContentEmptyCurry(isEditorComposing: boolean, trim?: boolean): () => boolean; | ||
export declare function $rootTextContent(): string; | ||
export declare function $canShowPlaceholder(isComposing: boolean, isEditable?: boolean): boolean; | ||
export declare function $canShowPlaceholderCurry(isEditorComposing: boolean, isEditable?: boolean): () => boolean; | ||
export declare function $canShowPlaceholder(isComposing: boolean, isEditable: boolean): boolean; | ||
export declare function $canShowPlaceholderCurry(isEditorComposing: boolean, isEditable: boolean): () => boolean; | ||
export declare type EntityMatch = { | ||
@@ -29,0 +27,0 @@ end: number; |
@@ -61,85 +61,2 @@ /** | ||
return null; | ||
} // Return text content for child text nodes. Each non-text node is separated by input string. | ||
// Caution, this function creates a string and should not be used within a tight loop. | ||
// Use $getNodeWithOffsetsFromJoinedTextNodesFromElementNode below to convert | ||
// indexes in the return string back into their corresponding node and offsets. | ||
function $joinTextNodesInElementNode(elementNode, separator, stopAt) { | ||
let textContent = ''; | ||
const children = elementNode.getChildren(); | ||
const length = children.length; | ||
for (let i = 0; i < length; ++i) { | ||
const child = children[i]; | ||
if (lexical.$isTextNode(child)) { | ||
const childTextContent = child.getTextContent(); | ||
if (child.is(stopAt.node)) { | ||
if (stopAt.offset > childTextContent.length) { | ||
{ | ||
throw Error(`Node ${child.__key} and selection point do not match.`); | ||
} | ||
} | ||
textContent += child.getTextContent().substr(0, stopAt.offset); | ||
break; | ||
} else { | ||
textContent += childTextContent; | ||
} | ||
} else { | ||
textContent += separator; | ||
} | ||
} | ||
return textContent; | ||
} // This function converts the offsetInJoinedText to | ||
// a node and offset result or null if not found. | ||
// This function is to be used in conjunction with joinTextNodesInElementNode above. | ||
// The joinedTextContent should be return value from joinTextNodesInElementNode. | ||
// | ||
// The offsetInJoinedText is relative to the entire string which | ||
// itself is relevant to the parent ElementNode. | ||
// | ||
// Example: | ||
// Given a Paragraph with 2 TextNodes. The first is Hello, the second is World. | ||
// The joinedTextContent would be "HelloWorld" | ||
// The offsetInJoinedText might be for the letter "e" = 1 or "r" = 7. | ||
// The return values would be {TextNode1, 1} or {TextNode2,2}, respectively. | ||
function $findNodeWithOffsetFromJoinedText(offsetInJoinedText, joinedTextLength, separatorLength, elementNode) { | ||
const children = elementNode.getChildren(); | ||
const childrenLength = children.length; | ||
let runningLength = 0; | ||
let isPriorNodeTextNode = false; | ||
for (let i = 0; i < childrenLength; ++i) { | ||
// We must examine the offsetInJoinedText that is located | ||
// at the length of the string. | ||
// For example, given "hello", the length is 5, yet | ||
// the caller still wants the node + offset at the | ||
// right edge of the "o". | ||
if (runningLength > joinedTextLength) { | ||
break; | ||
} | ||
const child = children[i]; | ||
const isChildNodeTestNode = lexical.$isTextNode(child); | ||
const childContentLength = isChildNodeTestNode ? child.getTextContent().length : separatorLength; | ||
const newRunningLength = runningLength + childContentLength; | ||
const isJoinedOffsetWithinNode = isPriorNodeTextNode === false && runningLength === offsetInJoinedText || runningLength === 0 && runningLength === offsetInJoinedText || runningLength < offsetInJoinedText && offsetInJoinedText <= newRunningLength; | ||
if (isJoinedOffsetWithinNode && lexical.$isTextNode(child)) { | ||
// Check isTextNode again for flow. | ||
return { | ||
node: child, | ||
offset: offsetInJoinedText - runningLength | ||
}; | ||
} | ||
runningLength = newRunningLength; | ||
isPriorNodeTextNode = isChildNodeTestNode; | ||
} | ||
return null; | ||
} | ||
@@ -166,4 +83,3 @@ function $isRootTextContentEmpty(isEditorComposing, trim = true) { | ||
} | ||
function $canShowPlaceholder(isComposing, // TODO 0.5 make mandatory | ||
isEditable = true) { | ||
function $canShowPlaceholder(isComposing, isEditable) { | ||
if (!isEditable || !$isRootTextContentEmpty(isComposing, false)) { | ||
@@ -208,4 +124,3 @@ return false; | ||
} | ||
function $canShowPlaceholderCurry(isEditorComposing, // TODO 0.5 make mandatory | ||
isEditable = true) { | ||
function $canShowPlaceholderCurry(isEditorComposing, isEditable) { | ||
return () => $canShowPlaceholder(isEditorComposing, isEditable); | ||
@@ -369,8 +284,6 @@ } | ||
exports.$canShowPlaceholderCurry = $canShowPlaceholderCurry; | ||
exports.$findNodeWithOffsetFromJoinedText = $findNodeWithOffsetFromJoinedText; | ||
exports.$findTextIntersectionFromCharacters = $findTextIntersectionFromCharacters; | ||
exports.$isRootTextContentEmpty = $isRootTextContentEmpty; | ||
exports.$isRootTextContentEmptyCurry = $isRootTextContentEmptyCurry; | ||
exports.$joinTextNodesInElementNode = $joinTextNodesInElementNode; | ||
exports.$rootTextContent = $rootTextContent; | ||
exports.registerLexicalTextEntity = registerLexicalTextEntity; |
@@ -7,10 +7,7 @@ /** | ||
*/ | ||
'use strict';var k=require("lexical");function r(b,f=!0){if(b)return!1;b=t();f&&(b=b.trim());return""===b}function t(){return k.$getRoot().getTextContent()}function u(b,f=!0){if(!f||!r(b,!1))return!1;b=k.$getRoot().getChildren();f=b.length;if(1<f)return!1;for(let e=0;e<f;e++){var c=b[e];if(k.$isElementNode(c)){if("paragraph"!==c.__type||0!==c.__indent)return!1;c=c.getChildren();let p=c.length;for(let g=0;g<p;g++)if(!k.$isTextNode(c[e]))return!1}}return!0}exports.$canShowPlaceholder=u; | ||
exports.$canShowPlaceholderCurry=function(b,f=!0){return()=>u(b,f)};exports.$findNodeWithOffsetFromJoinedText=function(b,f,c,e){e=e.getChildren();let p=e.length,g=0,a=!1;for(let l=0;l<p&&!(g>f);++l){let m=e[l],n=k.$isTextNode(m);var d=n?m.getTextContent().length:c;d=g+d;if((!1===a&&g===b||0===g&&g===b||g<b&&b<=d)&&k.$isTextNode(m))return{node:m,offset:b-g};g=d;a=n}return null}; | ||
exports.$findTextIntersectionFromCharacters=function(b,f){var c=b.getFirstChild();b=0;a:for(;null!==c;){if(k.$isElementNode(c)){var e=c.getFirstChild();if(null!==e){c=e;continue}}else if(k.$isTextNode(c)){e=c.getTextContentSize();if(b+e>f)return{node:c,offset:f-b};b+=e}e=c.getNextSibling();if(null!==e)c=e;else{for(c=c.getParent();null!==c;){e=c.getNextSibling();if(null!==e){c=e;continue a}c=c.getParent()}break}}return null};exports.$isRootTextContentEmpty=r; | ||
exports.$isRootTextContentEmptyCurry=function(b,f){return()=>r(b,f)}; | ||
exports.$joinTextNodesInElementNode=function(b,f,c){let e="";b=b.getChildren();let p=b.length;for(let g=0;g<p;++g){let a=b[g];if(k.$isTextNode(a)){let d=a.getTextContent();if(a.is(c.node)){if(c.offset>d.length)throw Error("Minified Lexical error #2; visit https://lexical.dev/docs/error?code=2 for the full message or use the non-minified dev environment for full errors and additional helpful warnings.");e+=a.getTextContent().substr(0,c.offset);break}else e+=d}else e+=f}return e}; | ||
exports.$rootTextContent=t; | ||
exports.registerLexicalTextEntity=function(b,f,c,e){let p=a=>{const d=k.$createTextNode(a.getTextContent());d.setFormat(a.getFormat());a.replace(d)},g=b.registerNodeTransform(k.TextNode,a=>{if(a.isSimpleText()){var d=a.getPreviousSibling(),l=a.getTextContent(),m=a;if(k.$isTextNode(d)){var n=d.getTextContent(),h=f(n+l);if(d instanceof c){if(null===h||0!==d.getLatest().__mode){p(d);return}h=h.end-n.length;if(0<h){m=l.slice(0,h);m=n+m;d.select();d.setTextContent(m);h===l.length?a.remove():(d=l.slice(h), | ||
a.setTextContent(d));return}}else if(null===h||h.start<n.length)return}for(;;){a=f(l);l=h=null===a?"":l.slice(a.end);if(""===h){if(n=m.getNextSibling(),k.$isTextNode(n))if(h=m.getTextContent()+n.getTextContent(),h=f(h),null===h){n instanceof c?p(n):n.markDirty();break}else if(0!==h.start)break}else if(n=f(h),null!==n&&0===n.start)break;if(null===a)break;if(0===a.start&&k.$isTextNode(d)&&d.isTextEntity())continue;let q;0===a.start?[q,m]=m.splitText(a.end):[,q,m]=m.splitText(a.start,a.end);a=e(q);q.replace(a); | ||
if(null==m)break}}});b=b.registerNodeTransform(c,a=>{var d=a.getTextContent();const l=f(d);null===l||0!==l.start?p(a):d.length>l.end?a.splitText(l.end):(d=a.getPreviousSibling(),k.$isTextNode(d)&&d.isTextEntity()&&(p(d),p(a)),d=a.getNextSibling(),k.$isTextNode(d)&&d.isTextEntity()&&(p(d),a instanceof c&&p(a)))});return[g,b]} | ||
'use strict';var h=require("lexical");function r(c,e=!0){if(c)return!1;c=t();e&&(c=c.trim());return""===c}function t(){return h.$getRoot().getTextContent()}function u(c,e){if(!e||!r(c,!1))return!1;c=h.$getRoot().getChildren();e=c.length;if(1<e)return!1;for(let f=0;f<e;f++){var b=c[f];if(h.$isElementNode(b)){if("paragraph"!==b.__type||0!==b.__indent)return!1;b=b.getChildren();let n=b.length;for(let p=0;p<n;p++)if(!h.$isTextNode(b[f]))return!1}}return!0}exports.$canShowPlaceholder=u; | ||
exports.$canShowPlaceholderCurry=function(c,e){return()=>u(c,e)};exports.$findTextIntersectionFromCharacters=function(c,e){var b=c.getFirstChild();c=0;a:for(;null!==b;){if(h.$isElementNode(b)){var f=b.getFirstChild();if(null!==f){b=f;continue}}else if(h.$isTextNode(b)){f=b.getTextContentSize();if(c+f>e)return{node:b,offset:e-c};c+=f}f=b.getNextSibling();if(null!==f)b=f;else{for(b=b.getParent();null!==b;){f=b.getNextSibling();if(null!==f){b=f;continue a}b=b.getParent()}break}}return null}; | ||
exports.$isRootTextContentEmpty=r;exports.$isRootTextContentEmptyCurry=function(c,e){return()=>r(c,e)};exports.$rootTextContent=t; | ||
exports.registerLexicalTextEntity=function(c,e,b,f){let n=a=>{const d=h.$createTextNode(a.getTextContent());d.setFormat(a.getFormat());a.replace(d)},p=c.registerNodeTransform(h.TextNode,a=>{if(a.isSimpleText()){var d=a.getPreviousSibling(),l=a.getTextContent(),m=a;if(h.$isTextNode(d)){var k=d.getTextContent(),g=e(k+l);if(d instanceof b){if(null===g||0!==d.getLatest().__mode){n(d);return}g=g.end-k.length;if(0<g){m=l.slice(0,g);m=k+m;d.select();d.setTextContent(m);g===l.length?a.remove():(d=l.slice(g), | ||
a.setTextContent(d));return}}else if(null===g||g.start<k.length)return}for(;;){a=e(l);l=g=null===a?"":l.slice(a.end);if(""===g){if(k=m.getNextSibling(),h.$isTextNode(k))if(g=m.getTextContent()+k.getTextContent(),g=e(g),null===g){k instanceof b?n(k):k.markDirty();break}else if(0!==g.start)break}else if(k=e(g),null!==k&&0===k.start)break;if(null===a)break;if(0===a.start&&h.$isTextNode(d)&&d.isTextEntity())continue;let q;0===a.start?[q,m]=m.splitText(a.end):[,q,m]=m.splitText(a.start,a.end);a=f(q);q.replace(a); | ||
if(null==m)break}}});c=c.registerNodeTransform(b,a=>{var d=a.getTextContent();const l=e(d);null===l||0!==l.start?n(a):d.length>l.end?a.splitText(l.end):(d=a.getPreviousSibling(),h.$isTextNode(d)&&d.isTextEntity()&&(n(d),n(a)),d=a.getNextSibling(),h.$isTextNode(d)&&d.isTextEntity()&&(n(d),a instanceof b&&n(a)))});return[p,c]} |
@@ -12,6 +12,6 @@ { | ||
"license": "MIT", | ||
"version": "0.4.1", | ||
"version": "0.5.0", | ||
"main": "LexicalText.js", | ||
"peerDependencies": { | ||
"lexical": "0.4.1" | ||
"lexical": "0.5.0" | ||
}, | ||
@@ -18,0 +18,0 @@ "repository": { |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
14882
280