@lexical/helpers
Advanced tools
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict'; | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| function addClassNamesToElement(element, ...classNames) { | ||
| classNames.forEach(className => { | ||
| if (className != null && typeof className === 'string') { | ||
| element.classList.add(...className.split(' ')); | ||
| } | ||
| }); | ||
| } | ||
| function removeClassNamesFromElement(element, ...classNames) { | ||
| classNames.forEach(className => { | ||
| element.classList.remove(...className.split(' ')); | ||
| }); | ||
| } | ||
| exports.addClassNamesToElement = addClassNamesToElement; | ||
| exports.removeClassNamesFromElement = removeClassNamesFromElement; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict';exports.addClassNamesToElement=function(b,...c){c.forEach(a=>{null!=a&&"string"===typeof a&&b.classList.add(...a.split(" "))})};exports.removeClassNamesFromElement=function(b,...c){c.forEach(a=>{b.classList.remove(...a.split(" "))})}; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict'; | ||
| var lexical = require('lexical'); | ||
| var list = require('@lexical/list'); | ||
| var CodeNode = require('lexical/CodeNode'); | ||
| var HeadingNode = require('lexical/HeadingNode'); | ||
| var LinkNode = require('lexical/LinkNode'); | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| function $cloneWithProperties(node) { | ||
| const latest = node.getLatest(); | ||
| const constructor = latest.constructor; | ||
| const clone = constructor.clone(latest); | ||
| clone.__parent = latest.__parent; | ||
| if (lexical.$isElementNode(latest) && lexical.$isElementNode(clone)) { | ||
| clone.__children = Array.from(latest.__children); | ||
| clone.__format = latest.__format; | ||
| clone.__indent = latest.__indent; | ||
| clone.__dir = latest.__dir; | ||
| } else if (lexical.$isTextNode(latest) && lexical.$isTextNode(clone)) { | ||
| clone.__format = latest.__format; | ||
| clone.__style = latest.__style; | ||
| clone.__mode = latest.__mode; | ||
| clone.__detail = latest.__detail; | ||
| } else if (lexical.$isDecoratorNode(latest) && lexical.$isDecoratorNode(clone)) { | ||
| clone.__state = latest.__state; | ||
| } // $FlowFixMe | ||
| return clone; | ||
| } | ||
| function $getIndexFromPossibleClone(node, parent, nodeMap) { | ||
| const parentClone = nodeMap.get(parent.getKey()); | ||
| if (lexical.$isElementNode(parentClone)) { | ||
| return parentClone.__children.indexOf(node.getKey()); | ||
| } | ||
| return node.getIndexWithinParent(); | ||
| } | ||
| function $getParentAvoidingExcludedElements(node) { | ||
| let parent = node.getParent(); | ||
| while (parent !== null && parent.excludeFromCopy()) { | ||
| parent = parent.getParent(); | ||
| } | ||
| return parent; | ||
| } | ||
| function $copyLeafNodeBranchToRoot(leaf, startingOffset, isLeftSide, range, nodeMap) { | ||
| let node = leaf; | ||
| let offset = startingOffset; | ||
| while (node !== null) { | ||
| const parent = $getParentAvoidingExcludedElements(node); | ||
| if (parent === null) { | ||
| break; | ||
| } | ||
| if (!lexical.$isElementNode(node) || !node.excludeFromCopy()) { | ||
| const key = node.getKey(); | ||
| let clone = nodeMap.get(key); | ||
| const needsClone = clone === undefined; | ||
| if (needsClone) { | ||
| clone = $cloneWithProperties(node); | ||
| nodeMap.set(key, clone); | ||
| } | ||
| if (lexical.$isTextNode(clone) && !clone.isSegmented() && !clone.isToken()) { | ||
| clone.__text = clone.__text.slice(isLeftSide ? offset : 0, isLeftSide ? undefined : offset); | ||
| } else if (lexical.$isElementNode(clone)) { | ||
| clone.__children = clone.__children.slice(isLeftSide ? offset : 0, isLeftSide ? undefined : offset + 1); | ||
| } | ||
| if (lexical.$isRootNode(parent)) { | ||
| if (needsClone) { | ||
| // We only want to collect a range of top level nodes. | ||
| // So if the parent is the root, we know this is a top level. | ||
| range.push(key); | ||
| } | ||
| break; | ||
| } | ||
| } | ||
| offset = $getIndexFromPossibleClone(node, parent, nodeMap); | ||
| node = parent; | ||
| } | ||
| } | ||
| function $cloneContents(selection) { | ||
| const anchor = selection.anchor; | ||
| const focus = selection.focus; | ||
| const anchorOffset = anchor.getCharacterOffset(); | ||
| const focusOffset = focus.getCharacterOffset(); | ||
| const anchorNode = anchor.getNode(); | ||
| const focusNode = focus.getNode(); | ||
| const anchorNodeParent = anchorNode.getParentOrThrow(); // Handle a single text node extraction | ||
| if (anchorNode === focusNode && lexical.$isTextNode(anchorNode) && (anchorNodeParent.canBeEmpty() || anchorNodeParent.getChildrenSize() > 1)) { | ||
| const clonedFirstNode = $cloneWithProperties(anchorNode); | ||
| const isBefore = focusOffset > anchorOffset; | ||
| const startOffset = isBefore ? anchorOffset : focusOffset; | ||
| const endOffset = isBefore ? focusOffset : anchorOffset; | ||
| clonedFirstNode.__text = clonedFirstNode.__text.slice(startOffset, endOffset); | ||
| const key = clonedFirstNode.getKey(); | ||
| return { | ||
| nodeMap: [[key, clonedFirstNode]], | ||
| range: [key] | ||
| }; | ||
| } | ||
| const nodes = selection.getNodes(); | ||
| if (nodes.length === 0) { | ||
| return { | ||
| nodeMap: [], | ||
| range: [] | ||
| }; | ||
| } // Check if we can use the parent of the nodes, if the | ||
| // parent can't be empty, then it's important that we | ||
| // also copy that element node along with its children. | ||
| let nodesLength = nodes.length; | ||
| const firstNode = nodes[0]; | ||
| const firstNodeParent = firstNode.getParent(); | ||
| if (firstNodeParent !== null && !firstNodeParent.canBeEmpty()) { | ||
| const parentChildren = firstNodeParent.__children; | ||
| const parentChildrenLength = parentChildren.length; | ||
| if (parentChildrenLength === nodesLength) { | ||
| let areTheSame = true; | ||
| for (let i = 0; i < parentChildren.length; i++) { | ||
| if (parentChildren[i] !== nodes[i].__key) { | ||
| areTheSame = false; | ||
| break; | ||
| } | ||
| } | ||
| if (areTheSame) { | ||
| nodesLength++; | ||
| nodes.push(firstNodeParent); | ||
| } | ||
| } | ||
| } | ||
| const lastNode = nodes[nodesLength - 1]; | ||
| const isBefore = anchor.isBefore(focus); | ||
| const nodeMap = new Map(); | ||
| const range = []; // Do first node to root | ||
| $copyLeafNodeBranchToRoot(firstNode, isBefore ? anchorOffset : focusOffset, true, range, nodeMap); // Copy all nodes between | ||
| for (let i = 0; i < nodesLength; i++) { | ||
| const node = nodes[i]; | ||
| const key = node.getKey(); | ||
| if (!nodeMap.has(key) && (!lexical.$isElementNode(node) || !node.excludeFromCopy())) { | ||
| const clone = $cloneWithProperties(node); | ||
| if (lexical.$isRootNode(node.getParent())) { | ||
| range.push(node.getKey()); | ||
| } | ||
| nodeMap.set(key, clone); | ||
| } | ||
| } // Do last node to root | ||
| $copyLeafNodeBranchToRoot(lastNode, isBefore ? focusOffset : anchorOffset, false, range, nodeMap); | ||
| return { | ||
| nodeMap: Array.from(nodeMap.entries()), | ||
| range | ||
| }; | ||
| } | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| function getPossibleDecoratorNode(focus, isBackward) { | ||
| const focusOffset = focus.offset; | ||
| if (focus.type === 'element') { | ||
| const block = focus.getNode(); | ||
| return block.getChildAtIndex(isBackward ? focusOffset - 1 : focusOffset); | ||
| } else { | ||
| const focusNode = focus.getNode(); | ||
| if (isBackward && focusOffset === 0 || !isBackward && focusOffset === focusNode.getTextContentSize()) { | ||
| return isBackward ? focusNode.getPreviousSibling() : focusNode.getNextSibling(); | ||
| } | ||
| } | ||
| return null; | ||
| } | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| const isCodeElement = div => { | ||
| return div.style.fontFamily.match('monospace') !== null; | ||
| }; | ||
| const DOM_NODE_NAME_TO_LEXICAL_NODE = { | ||
| '#text': domNode => ({ | ||
| node: lexical.$createTextNode(domNode.textContent) | ||
| }), | ||
| a: domNode => { | ||
| let node; | ||
| if (domNode instanceof HTMLAnchorElement) { | ||
| node = LinkNode.$createLinkNode(domNode.href); | ||
| } else { | ||
| node = lexical.$createTextNode(domNode.textContent); | ||
| } | ||
| return { | ||
| node | ||
| }; | ||
| }, | ||
| b: domNode => { | ||
| // $FlowFixMe[incompatible-type] domNode is a <b> since we matched it by nodeName | ||
| const b = domNode; // Google Docs wraps all copied HTML in a <b> with font-weight normal | ||
| const hasNormalFontWeight = b.style.fontWeight === 'normal'; | ||
| return { | ||
| forChild: lexicalNode => { | ||
| if (lexical.$isTextNode(lexicalNode) && !hasNormalFontWeight) { | ||
| lexicalNode.toggleFormat('bold'); | ||
| } | ||
| }, | ||
| node: null | ||
| }; | ||
| }, | ||
| br: () => ({ | ||
| node: lexical.$createLineBreakNode() | ||
| }), | ||
| div: domNode => { | ||
| // $FlowFixMe[incompatible-type] domNode is a <div> since we matched it by nodeName | ||
| const div = domNode; | ||
| return { | ||
| after: childLexicalNodes => { | ||
| const domParent = domNode.parentNode; | ||
| if (domParent != null && domNode !== domParent.lastChild) { | ||
| childLexicalNodes.push(lexical.$createLineBreakNode()); | ||
| } | ||
| return childLexicalNodes; | ||
| }, | ||
| node: isCodeElement(div) ? CodeNode.$createCodeNode() : null | ||
| }; | ||
| }, | ||
| em: domNode => { | ||
| return { | ||
| forChild: lexicalNode => { | ||
| if (lexical.$isTextNode(lexicalNode)) { | ||
| lexicalNode.toggleFormat('italic'); | ||
| } | ||
| }, | ||
| node: null | ||
| }; | ||
| }, | ||
| h1: () => ({ | ||
| node: HeadingNode.$createHeadingNode('h1') | ||
| }), | ||
| h2: () => ({ | ||
| node: HeadingNode.$createHeadingNode('h2') | ||
| }), | ||
| h3: () => ({ | ||
| node: HeadingNode.$createHeadingNode('h3') | ||
| }), | ||
| h4: () => ({ | ||
| node: HeadingNode.$createHeadingNode('h4') | ||
| }), | ||
| h5: () => ({ | ||
| node: HeadingNode.$createHeadingNode('h5') | ||
| }), | ||
| i: domNode => { | ||
| return { | ||
| forChild: lexicalNode => { | ||
| if (lexical.$isTextNode(lexicalNode)) { | ||
| lexicalNode.toggleFormat('italic'); | ||
| } | ||
| }, | ||
| node: null | ||
| }; | ||
| }, | ||
| li: () => ({ | ||
| node: list.$createListItemNode() | ||
| }), | ||
| ol: () => ({ | ||
| node: list.$createListNode('ol') | ||
| }), | ||
| p: () => ({ | ||
| node: lexical.$createParagraphNode() | ||
| }), | ||
| pre: domNode => ({ | ||
| node: CodeNode.$createCodeNode() | ||
| }), | ||
| span: domNode => { | ||
| // $FlowFixMe[incompatible-type] domNode is a <span> since we matched it by nodeName | ||
| const span = domNode; // Google Docs uses span tags + font-weight for bold text | ||
| const hasBoldFontWeight = span.style.fontWeight === '700'; | ||
| return { | ||
| forChild: lexicalNode => { | ||
| if (lexical.$isTextNode(lexicalNode) && hasBoldFontWeight) { | ||
| lexicalNode.toggleFormat('bold'); | ||
| } | ||
| }, | ||
| node: null | ||
| }; | ||
| }, | ||
| strong: domNode => { | ||
| return { | ||
| forChild: lexicalNode => { | ||
| if (lexical.$isTextNode(lexicalNode)) { | ||
| lexicalNode.toggleFormat('bold'); | ||
| } | ||
| }, | ||
| node: null | ||
| }; | ||
| }, | ||
| table: domNode => { | ||
| // $FlowFixMe[incompatible-type] domNode is a <table> since we matched it by nodeName | ||
| const table = domNode; | ||
| const isGitHubCodeTable = table.classList.contains('js-file-line-container'); | ||
| return { | ||
| node: isGitHubCodeTable ? CodeNode.$createCodeNode() : null | ||
| }; | ||
| }, | ||
| td: domNode => { | ||
| // $FlowFixMe[incompatible-type] domNode is a <table> since we matched it by nodeName | ||
| const cell = domNode; | ||
| const isGitHubCodeCell = cell.classList.contains('js-file-line'); | ||
| return { | ||
| after: childLexicalNodes => { | ||
| if (isGitHubCodeCell && cell.parentNode && cell.parentNode.nextSibling) { | ||
| // Append newline between code lines | ||
| childLexicalNodes.push(lexical.$createLineBreakNode()); | ||
| } | ||
| return childLexicalNodes; | ||
| }, | ||
| node: null | ||
| }; | ||
| }, | ||
| u: domNode => { | ||
| return { | ||
| forChild: lexicalNode => { | ||
| if (lexical.$isTextNode(lexicalNode)) { | ||
| lexicalNode.toggleFormat('underline'); | ||
| } | ||
| }, | ||
| node: null | ||
| }; | ||
| }, | ||
| ul: () => ({ | ||
| node: list.$createListNode('ul') | ||
| }) | ||
| }; | ||
| function $generateNodes(nodeRange) { | ||
| const { | ||
| range, | ||
| nodeMap | ||
| } = nodeRange; | ||
| const parsedNodeMap = new Map(nodeMap); | ||
| const nodes = []; | ||
| for (let i = 0; i < range.length; i++) { | ||
| const key = range[i]; | ||
| const parsedNode = parsedNodeMap.get(key); | ||
| if (parsedNode !== undefined) { | ||
| const node = lexical.$createNodeFromParse(parsedNode, parsedNodeMap); | ||
| nodes.push(node); | ||
| } | ||
| } | ||
| return nodes; | ||
| } | ||
| function $createNodesFromDOM(node, conversionMap, editor, forChildMap = new Map()) { | ||
| let lexicalNodes = []; | ||
| let currentLexicalNode = null; | ||
| const nodeName = node.nodeName.toLowerCase(); | ||
| const customHtmlTransforms = editor._config.htmlTransforms || {}; | ||
| const transformFunction = customHtmlTransforms[nodeName] || conversionMap[nodeName]; | ||
| const transformOutput = transformFunction ? transformFunction(node) : null; | ||
| let postTransform = null; | ||
| if (transformOutput !== null) { | ||
| postTransform = transformOutput.after; | ||
| currentLexicalNode = transformOutput.node; | ||
| if (currentLexicalNode !== null) { | ||
| lexicalNodes.push(currentLexicalNode); | ||
| const forChildFunctions = Array.from(forChildMap.values()); | ||
| for (let i = 0; i < forChildFunctions.length; i++) { | ||
| forChildFunctions[i](currentLexicalNode); | ||
| } | ||
| } | ||
| if (transformOutput.forChild != null) { | ||
| forChildMap.set(nodeName, transformOutput.forChild); | ||
| } | ||
| } // If the DOM node doesn't have a transformer, we don't know what | ||
| // to do with it but we still need to process any childNodes. | ||
| const children = node.childNodes; | ||
| let childLexicalNodes = []; | ||
| for (let i = 0; i < children.length; i++) { | ||
| childLexicalNodes.push(...$createNodesFromDOM(children[i], conversionMap, editor, forChildMap)); | ||
| } | ||
| if (postTransform != null) { | ||
| childLexicalNodes = postTransform(childLexicalNodes); | ||
| } | ||
| if (currentLexicalNode == null) { | ||
| // If it hasn't been converted to a LexicalNode, we hoist its children | ||
| // up to the same level as it. | ||
| lexicalNodes = lexicalNodes.concat(childLexicalNodes); | ||
| } else { | ||
| if (lexical.$isElementNode(currentLexicalNode)) { | ||
| // If the current node is a ElementNode after conversion, | ||
| // we can append all the children to it. | ||
| currentLexicalNode.append(...childLexicalNodes); | ||
| } | ||
| } | ||
| return lexicalNodes; | ||
| } | ||
| function $generateNodesFromDOM(dom, conversionMap, editor) { | ||
| let lexicalNodes = []; | ||
| const elements = dom.body ? Array.from(dom.body.childNodes) : []; | ||
| const elementsLength = elements.length; | ||
| for (let i = 0; i < elementsLength; i++) { | ||
| const lexicalNode = $createNodesFromDOM(elements[i], conversionMap, editor); | ||
| if (lexicalNode !== null) { | ||
| lexicalNodes = lexicalNodes.concat(lexicalNode); | ||
| } | ||
| } | ||
| return lexicalNodes; | ||
| } | ||
| function $insertDataTransferForRichText(dataTransfer, selection, editor) { | ||
| const lexicalNodesString = dataTransfer.getData('application/x-lexical-editor'); | ||
| if (lexicalNodesString) { | ||
| const namespace = editor._config.namespace; | ||
| try { | ||
| const lexicalClipboardData = JSON.parse(lexicalNodesString); | ||
| if (lexicalClipboardData.namespace === namespace) { | ||
| const nodeRange = lexicalClipboardData.state; | ||
| const nodes = $generateNodes(nodeRange); | ||
| selection.insertNodes(nodes); | ||
| return; | ||
| } | ||
| } catch (e) {// Malformed, missing nodes.. | ||
| } | ||
| } | ||
| const textHtmlMimeType = 'text/html'; | ||
| const htmlString = dataTransfer.getData(textHtmlMimeType); | ||
| if (htmlString) { | ||
| const parser = new DOMParser(); | ||
| const dom = parser.parseFromString(htmlString, textHtmlMimeType); | ||
| const nodes = $generateNodesFromDOM(dom, DOM_NODE_NAME_TO_LEXICAL_NODE, editor); // Wrap text and inline nodes in paragraph nodes so we have all blocks at the top-level | ||
| const topLevelBlocks = []; | ||
| let currentBlock = null; | ||
| for (let i = 0; i < nodes.length; i++) { | ||
| const node = nodes[i]; | ||
| if (!lexical.$isElementNode(node) || node.isInline()) { | ||
| if (currentBlock === null) { | ||
| currentBlock = lexical.$createParagraphNode(); | ||
| topLevelBlocks.push(currentBlock); | ||
| } | ||
| if (currentBlock !== null) { | ||
| currentBlock.append(node); | ||
| } | ||
| } else { | ||
| topLevelBlocks.push(node); | ||
| currentBlock = null; | ||
| } | ||
| } | ||
| selection.insertNodes(topLevelBlocks); | ||
| return; | ||
| } | ||
| $insertDataTransferForPlainText(dataTransfer, selection); | ||
| } | ||
| function $insertDataTransferForPlainText(dataTransfer, selection) { | ||
| const text = dataTransfer.getData('text/plain'); | ||
| if (text != null) { | ||
| selection.insertRawText(text); | ||
| } | ||
| } | ||
| function $shouldOverrideDefaultCharacterSelection(selection, isBackward) { | ||
| const possibleDecoratorNode = getPossibleDecoratorNode(selection.focus, isBackward); | ||
| return lexical.$isDecoratorNode(possibleDecoratorNode); | ||
| } | ||
| function onPasteForPlainText(event, editor) { | ||
| event.preventDefault(); | ||
| editor.update(() => { | ||
| lexical.$log('onPasteForPlainText'); | ||
| const selection = lexical.$getSelection(); | ||
| const clipboardData = event.clipboardData; | ||
| if (clipboardData != null && selection !== null) { | ||
| $insertDataTransferForPlainText(clipboardData, selection); | ||
| } | ||
| }); | ||
| } | ||
| function onPasteForRichText(event, editor) { | ||
| event.preventDefault(); | ||
| editor.update(() => { | ||
| lexical.$log('onPasteForRichText'); | ||
| const selection = lexical.$getSelection(); | ||
| const clipboardData = event.clipboardData; | ||
| if (clipboardData != null && selection !== null) { | ||
| $insertDataTransferForRichText(clipboardData, selection, editor); | ||
| } | ||
| }); | ||
| } | ||
| function onCutForPlainText(event, editor) { | ||
| onCopyForPlainText(event, editor); | ||
| editor.update(() => { | ||
| lexical.$log('onCutForPlainText'); | ||
| const selection = lexical.$getSelection(); | ||
| if (selection !== null) { | ||
| selection.removeText(); | ||
| } | ||
| }); | ||
| } | ||
| function onCutForRichText(event, editor) { | ||
| onCopyForRichText(event, editor); | ||
| editor.update(() => { | ||
| lexical.$log('onCutForRichText'); | ||
| const selection = lexical.$getSelection(); | ||
| if (selection !== null) { | ||
| selection.removeText(); | ||
| } | ||
| }); | ||
| } | ||
| function onCopyForPlainText(event, editor) { | ||
| event.preventDefault(); | ||
| editor.update(() => { | ||
| lexical.$log('onCopyForPlainText'); | ||
| const clipboardData = event.clipboardData; | ||
| const selection = lexical.$getSelection(); | ||
| if (selection !== null) { | ||
| if (clipboardData != null) { | ||
| const domSelection = window.getSelection(); // If we haven't selected a range, then don't copy anything | ||
| if (domSelection.isCollapsed) { | ||
| return; | ||
| } | ||
| const range = domSelection.getRangeAt(0); | ||
| if (range) { | ||
| const container = document.createElement('div'); | ||
| const frag = range.cloneContents(); | ||
| container.appendChild(frag); | ||
| clipboardData.setData('text/html', container.innerHTML); | ||
| } | ||
| clipboardData.setData('text/plain', selection.getTextContent()); | ||
| } | ||
| } | ||
| }); | ||
| } | ||
| function onCopyForRichText(event, editor) { | ||
| event.preventDefault(); | ||
| editor.update(() => { | ||
| lexical.$log('onCopyForRichText'); | ||
| const clipboardData = event.clipboardData; | ||
| const selection = lexical.$getSelection(); | ||
| if (selection !== null) { | ||
| if (clipboardData != null) { | ||
| const domSelection = window.getSelection(); // If we haven't selected a range, then don't copy anything | ||
| if (domSelection.isCollapsed) { | ||
| return; | ||
| } | ||
| const range = domSelection.getRangeAt(0); | ||
| if (range) { | ||
| const container = document.createElement('div'); | ||
| const frag = range.cloneContents(); | ||
| container.appendChild(frag); | ||
| clipboardData.setData('text/html', container.innerHTML); | ||
| } | ||
| clipboardData.setData('text/plain', selection.getTextContent()); | ||
| const namespace = editor._config.namespace; | ||
| clipboardData.setData('application/x-lexical-editor', JSON.stringify({ | ||
| namespace, | ||
| state: $cloneContents(selection) | ||
| })); | ||
| } | ||
| } | ||
| }); | ||
| } | ||
| exports.$createNodesFromDOM = $createNodesFromDOM; | ||
| exports.$insertDataTransferForPlainText = $insertDataTransferForPlainText; | ||
| exports.$insertDataTransferForRichText = $insertDataTransferForRichText; | ||
| exports.$shouldOverrideDefaultCharacterSelection = $shouldOverrideDefaultCharacterSelection; | ||
| exports.onCopyForPlainText = onCopyForPlainText; | ||
| exports.onCopyForRichText = onCopyForRichText; | ||
| exports.onCutForPlainText = onCutForPlainText; | ||
| exports.onCutForRichText = onCutForRichText; | ||
| exports.onPasteForPlainText = onPasteForPlainText; | ||
| exports.onPasteForRichText = onPasteForRichText; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict';var e=require("lexical"),w=require("@lexical/list"),y=require("lexical/CodeNode"),z=require("lexical/HeadingNode"),A=require("lexical/LinkNode"); | ||
| function B(a){a=a.getLatest();const b=a.constructor.clone(a);b.__parent=a.__parent;e.$isElementNode(a)&&e.$isElementNode(b)?(b.__children=Array.from(a.__children),b.__format=a.__format,b.__indent=a.__indent,b.__dir=a.__dir):e.$isTextNode(a)&&e.$isTextNode(b)?(b.__format=a.__format,b.__style=a.__style,b.__mode=a.__mode,b.__detail=a.__detail):e.$isDecoratorNode(a)&&e.$isDecoratorNode(b)&&(b.__state=a.__state);return b} | ||
| function C(a,b,c,f,h){for(var d=b;null!==a;){for(b=a.getParent();null!==b&&b.excludeFromCopy();)b=b.getParent();if(null===b)break;if(!e.$isElementNode(a)||!a.excludeFromCopy()){const k=a.getKey();let g=h.get(k);const l=void 0===g;l&&(g=B(a),h.set(k,g));!e.$isTextNode(g)||g.isSegmented()||g.isToken()?e.$isElementNode(g)&&(g.__children=g.__children.slice(c?d:0,c?void 0:d+1)):g.__text=g.__text.slice(c?d:0,c?void 0:d);if(e.$isRootNode(b)){l&&f.push(k);break}}d=h.get(b.getKey());d=e.$isElementNode(d)? | ||
| d.__children.indexOf(a.getKey()):a.getIndexWithinParent();a=b}} | ||
| const D={"#text":a=>({node:e.$createTextNode(a.textContent)}),a:a=>({node:a instanceof HTMLAnchorElement?A.$createLinkNode(a.href):e.$createTextNode(a.textContent)}),b:a=>{const b="normal"===a.style.fontWeight;return{forChild:c=>{e.$isTextNode(c)&&!b&&c.toggleFormat("bold")},node:null}},br:()=>({node:e.$createLineBreakNode()}),div:a=>({after:b=>{const c=a.parentNode;null!=c&&a!==c.lastChild&&b.push(e.$createLineBreakNode());return b},node:null!==a.style.fontFamily.match("monospace")?y.$createCodeNode(): | ||
| null}),em:()=>({forChild:a=>{e.$isTextNode(a)&&a.toggleFormat("italic")},node:null}),h1:()=>({node:z.$createHeadingNode("h1")}),h2:()=>({node:z.$createHeadingNode("h2")}),h3:()=>({node:z.$createHeadingNode("h3")}),h4:()=>({node:z.$createHeadingNode("h4")}),h5:()=>({node:z.$createHeadingNode("h5")}),i:()=>({forChild:a=>{e.$isTextNode(a)&&a.toggleFormat("italic")},node:null}),li:()=>({node:w.$createListItemNode()}),ol:()=>({node:w.$createListNode("ol")}),p:()=>({node:e.$createParagraphNode()}),pre:()=> | ||
| ({node:y.$createCodeNode()}),span:a=>{const b="700"===a.style.fontWeight;return{forChild:c=>{e.$isTextNode(c)&&b&&c.toggleFormat("bold")},node:null}},strong:()=>({forChild:a=>{e.$isTextNode(a)&&a.toggleFormat("bold")},node:null}),table:a=>({node:a.classList.contains("js-file-line-container")?y.$createCodeNode():null}),td:a=>{const b=a.classList.contains("js-file-line");return{after:c=>{b&&a.parentNode&&a.parentNode.nextSibling&&c.push(e.$createLineBreakNode());return c},node:null}},u:()=>({forChild:a=> | ||
| {e.$isTextNode(a)&&a.toggleFormat("underline")},node:null}),ul:()=>({node:w.$createListNode("ul")})}; | ||
| function E(a,b,c,f=new Map){let h=[],d=null;var k=a.nodeName.toLowerCase(),g=(c._config.htmlTransforms||{})[k]||b[k],l=g?g(a):null;g=null;if(null!==l){g=l.after;d=l.node;if(null!==d){h.push(d);const n=Array.from(f.values());for(let m=0;m<n.length;m++)n[m](d)}null!=l.forChild&&f.set(k,l.forChild)}a=a.childNodes;k=[];for(l=0;l<a.length;l++)k.push(...E(a[l],b,c,f));null!=g&&(k=g(k));null==d?h=h.concat(k):e.$isElementNode(d)&&d.append(...k);return h} | ||
| function F(a,b,c){var f=a.getData("application/x-lexical-editor");if(f){var h=c._config.namespace;try{const l=JSON.parse(f);if(l.namespace===h){const {range:n,nodeMap:m}=l.state;var d=new Map(m);f=[];for(h=0;h<n.length;h++){var k=d.get(n[h]);if(void 0!==k){var g=e.$createNodeFromParse(k,d);f.push(g)}}b.insertNodes(f);return}}catch(l){}}if(d=a.getData("text/html")){d=(new DOMParser).parseFromString(d,"text/html");a=[];d=d.body?Array.from(d.body.childNodes):[];k=d.length;for(g=0;g<k;g++)f=E(d[g],D, | ||
| c),null!==f&&(a=a.concat(f));c=a;a=[];d=null;for(k=0;k<c.length;k++)g=c[k],!e.$isElementNode(g)||g.isInline()?(null===d&&(d=e.$createParagraphNode(),a.push(d)),null!==d&&d.append(g)):(a.push(g),d=null);b.insertNodes(a)}else G(a,b)}function G(a,b){a=a.getData("text/plain");null!=a&&b.insertRawText(a)} | ||
| function H(a,b){a.preventDefault();b.update(()=>{e.$log("onCopyForPlainText");const c=a.clipboardData,f=e.$getSelection();if(null!==f&&null!=c){var h=window.getSelection();if(!h.isCollapsed){var d=h.getRangeAt(0);d&&(h=document.createElement("div"),d=d.cloneContents(),h.appendChild(d),c.setData("text/html",h.innerHTML));c.setData("text/plain",f.getTextContent())}}})} | ||
| function I(a,b){a.preventDefault();b.update(()=>{e.$log("onCopyForRichText");const c=a.clipboardData;var f=e.$getSelection();if(null!==f&&null!=c){var h=window.getSelection();if(!h.isCollapsed){var d=h.getRangeAt(0);d&&(h=document.createElement("div"),d=d.cloneContents(),h.appendChild(d),c.setData("text/html",h.innerHTML));c.setData("text/plain",f.getTextContent());h=b._config.namespace;d=c.setData;var k=JSON,g=k.stringify;{var l=f.anchor,n=f.focus;var m=l.getCharacterOffset();const x=n.getCharacterOffset(); | ||
| var p=l.getNode(),t=n.getNode(),q=p.getParentOrThrow();if(p===t&&e.$isTextNode(p)&&(q.canBeEmpty()||1<q.getChildrenSize()))f=B(p),p=x>m,f.__text=f.__text.slice(p?m:x,p?x:m),m=f.getKey(),m={nodeMap:[[m,f]],range:[m]};else if(f=f.getNodes(),0===f.length)m={nodeMap:[],range:[]};else{p=f.length;t=f[0];q=t.getParent();if(null!==q&&!q.canBeEmpty()){var u=q.__children;if(u.length===p){var r=!0;for(var v=0;v<u.length;v++)if(u[v]!==f[v].__key){r=!1;break}r&&(p++,f.push(q))}}q=f[p-1];l=l.isBefore(n);n=new Map; | ||
| u=[];C(t,l?m:x,!0,u,n);for(t=0;t<p;t++)if(r=f[t],v=r.getKey(),!(n.has(v)||e.$isElementNode(r)&&r.excludeFromCopy())){const J=B(r);e.$isRootNode(r.getParent())&&u.push(r.getKey());n.set(v,J)}C(q,l?x:m,!1,u,n);m={nodeMap:Array.from(n.entries()),range:u}}}d.call(c,"application/x-lexical-editor",g.call(k,{namespace:h,state:m}))}}})}exports.$createNodesFromDOM=E;exports.$insertDataTransferForPlainText=G;exports.$insertDataTransferForRichText=F; | ||
| exports.$shouldOverrideDefaultCharacterSelection=function(a,b){var c=a.focus;a=c.offset;"element"===c.type?b=c.getNode().getChildAtIndex(b?a-1:a):(c=c.getNode(),b=b&&0===a||!b&&a===c.getTextContentSize()?b?c.getPreviousSibling():c.getNextSibling():null);return e.$isDecoratorNode(b)};exports.onCopyForPlainText=H;exports.onCopyForRichText=I;exports.onCutForPlainText=function(a,b){H(a,b);b.update(()=>{e.$log("onCutForPlainText");const c=e.$getSelection();null!==c&&c.removeText()})}; | ||
| exports.onCutForRichText=function(a,b){I(a,b);b.update(()=>{e.$log("onCutForRichText");const c=e.$getSelection();null!==c&&c.removeText()})};exports.onPasteForPlainText=function(a,b){a.preventDefault();b.update(()=>{e.$log("onPasteForPlainText");const c=e.$getSelection(),f=a.clipboardData;null!=f&&null!==c&&G(f,c)})};exports.onPasteForRichText=function(a,b){a.preventDefault();b.update(()=>{e.$log("onPasteForRichText");const c=e.$getSelection(),f=a.clipboardData;null!=f&&null!==c&&F(f,c,b)})}; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict'; | ||
| var lexical = require('lexical'); | ||
| /** | ||
| * Copyright (c) Facebook, Inc. and its affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| function importFile(editor) { | ||
| readTextFileFromSystem(text => { | ||
| const json = JSON.parse(text); | ||
| const editorState = editor.parseEditorState(JSON.stringify(json.editorState)); | ||
| editor.setEditorState(editorState); | ||
| editor.execCommand('clearHistory'); | ||
| }); | ||
| } | ||
| function readTextFileFromSystem(callback) { | ||
| const input = document.createElement('input'); | ||
| input.type = 'file'; | ||
| input.accept = '.lexical'; | ||
| input.addEventListener('change', e => { | ||
| // $FlowFixMe | ||
| const file = e.target.files[0]; | ||
| const reader = new FileReader(); | ||
| reader.readAsText(file, 'UTF-8'); | ||
| reader.onload = readerEvent => { | ||
| // $FlowFixMe | ||
| const content = readerEvent.target.result; | ||
| callback(content); | ||
| }; | ||
| }); | ||
| input.click(); | ||
| } | ||
| function exportFile(editor, config = Object.freeze({})) { | ||
| const now = new Date(); | ||
| const editorState = editor.getEditorState(); | ||
| const documentJSON = { | ||
| editorState: editorState, | ||
| lastSaved: now.getTime(), | ||
| source: config.source || 'Lexical', | ||
| version: lexical.VERSION | ||
| }; | ||
| const fileName = config.fileName || now.toISOString(); | ||
| exportBlob(documentJSON, `${fileName}.lexical`); | ||
| } // Adapted from https://stackoverflow.com/a/19328891/2013580 | ||
| function exportBlob(data, fileName) { | ||
| const a = document.createElement('a'); | ||
| const body = document.body; | ||
| if (body === null) { | ||
| return; | ||
| } | ||
| body.appendChild(a); | ||
| a.style.display = 'none'; | ||
| const json = JSON.stringify(data); | ||
| const blob = new Blob([json], { | ||
| type: 'octet/stream' | ||
| }); | ||
| const url = window.URL.createObjectURL(blob); | ||
| a.href = url; | ||
| a.download = fileName; | ||
| a.click(); | ||
| window.URL.revokeObjectURL(url); | ||
| a.remove(); | ||
| } | ||
| exports.exportFile = exportFile; | ||
| exports.importFile = importFile; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict';var e=require("lexical");function f(a){const b=document.createElement("input");b.type="file";b.accept=".lexical";b.addEventListener("change",c=>{c=c.target.files[0];const d=new FileReader;d.readAsText(c,"UTF-8");d.onload=g=>{a(g.target.result)}});b.click()} | ||
| exports.exportFile=function(a,b=Object.freeze({})){var c=new Date;a={editorState:a.getEditorState(),lastSaved:c.getTime(),source:b.source||"Lexical",version:e.VERSION};{b=`${b.fileName||c.toISOString()}.lexical`;c=document.createElement("a");const d=document.body;null!==d&&(d.appendChild(c),c.style.display="none",a=JSON.stringify(a),a=new Blob([a],{type:"octet/stream"}),a=window.URL.createObjectURL(a),c.href=a,c.download=b,c.click(),window.URL.revokeObjectURL(a),c.remove())}}; | ||
| exports.importFile=function(a){f(b=>{b=JSON.parse(b);b=a.parseEditorState(JSON.stringify(b.editorState));a.setEditorState(b);a.execCommand("clearHistory")})}; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict'; | ||
| var lexical = require('lexical'); | ||
| var TableCellNode = require('lexical/TableCellNode'); | ||
| var TableNode = require('lexical/TableNode'); | ||
| var TableRowNode = require('lexical/TableRowNode'); | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| function $dfs__DEPRECATED(startingNode, nextNode) { | ||
| let node = startingNode; | ||
| nextNode(node); | ||
| while (node !== null) { | ||
| if (lexical.$isElementNode(node) && node.getChildrenSize() > 0) { | ||
| node = node.getFirstChild(); | ||
| } else { | ||
| // Find immediate sibling or nearest parent sibling | ||
| let sibling = null; | ||
| while (sibling === null && node !== null) { | ||
| sibling = node.getNextSibling(); | ||
| if (sibling === null) { | ||
| node = node.getParent(); | ||
| } else { | ||
| node = sibling; | ||
| } | ||
| } | ||
| } | ||
| if (node !== null) { | ||
| node = nextNode(node); | ||
| } | ||
| } | ||
| } | ||
| function $getNearestNodeOfType(node, klass) { | ||
| let parent = node; | ||
| while (parent != null) { | ||
| if (parent instanceof klass) { | ||
| return parent; | ||
| } | ||
| parent = parent.getParent(); | ||
| } | ||
| return parent; | ||
| } | ||
| function $createLexicalNodeFromDOMNode(node, conversionMap) { | ||
| let lexicalNode = null; | ||
| const createFunction = conversionMap[node.nodeName.toLowerCase()]; | ||
| if (createFunction) { | ||
| lexicalNode = createFunction(node); | ||
| if (lexical.$isElementNode(lexicalNode)) { | ||
| const children = node.childNodes; | ||
| for (let i = 0; i < children.length; i++) { | ||
| const child = $createLexicalNodeFromDOMNode(children[i], conversionMap); | ||
| if (child !== null) { | ||
| lexicalNode.append(child); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| return lexicalNode; | ||
| } | ||
| function $createTableNodeWithDimensions(rowCount, columnCount, includeHeader = true) { | ||
| const tableNode = TableNode.$createTableNode(); | ||
| for (let iRow = 0; iRow < rowCount; iRow++) { | ||
| const tableRowNode = TableRowNode.$createTableRowNode(); | ||
| for (let iColumn = 0; iColumn < columnCount; iColumn++) { | ||
| const tableCellNode = TableCellNode.$createTableCellNode(iRow === 0 && includeHeader); | ||
| const paragraphNode = lexical.$createParagraphNode(); | ||
| paragraphNode.append(lexical.$createTextNode()); | ||
| tableCellNode.append(paragraphNode); | ||
| tableRowNode.append(tableCellNode); | ||
| } | ||
| tableNode.append(tableRowNode); | ||
| } | ||
| return tableNode; | ||
| } | ||
| function $findMatchingParent(startingNode, findFn) { | ||
| let curr = startingNode; | ||
| while (curr !== lexical.$getRoot() && curr != null) { | ||
| if (findFn(curr)) { | ||
| return curr; | ||
| } | ||
| curr = curr.getParent(); | ||
| } | ||
| return null; | ||
| } | ||
| function $areSiblingsNullOrSpace(node) { | ||
| return $isPreviousSiblingNullOrSpace(node) && $isNextSiblingNullOrSpace(node); | ||
| } | ||
| function $isPreviousSiblingNullOrSpace(node) { | ||
| const previousSibling = node.getPreviousSibling(); | ||
| return previousSibling === null || lexical.$isLineBreakNode(previousSibling) || lexical.$isTextNode(previousSibling) && previousSibling.isSimpleText() && previousSibling.getTextContent().endsWith(' '); | ||
| } | ||
| function $isNextSiblingNullOrSpace(node) { | ||
| const nextSibling = node.getNextSibling(); | ||
| return nextSibling === null || lexical.$isLineBreakNode(nextSibling) || lexical.$isTextNode(nextSibling) && nextSibling.isSimpleText() && nextSibling.getTextContent().startsWith(' '); | ||
| } | ||
| exports.$areSiblingsNullOrSpace = $areSiblingsNullOrSpace; | ||
| exports.$createLexicalNodeFromDOMNode = $createLexicalNodeFromDOMNode; | ||
| exports.$createTableNodeWithDimensions = $createTableNodeWithDimensions; | ||
| exports.$dfs__DEPRECATED = $dfs__DEPRECATED; | ||
| exports.$findMatchingParent = $findMatchingParent; | ||
| exports.$getNearestNodeOfType = $getNearestNodeOfType; | ||
| exports.$isNextSiblingNullOrSpace = $isNextSiblingNullOrSpace; | ||
| exports.$isPreviousSiblingNullOrSpace = $isPreviousSiblingNullOrSpace; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict';var d=require("lexical"),g=require("lexical/TableCellNode"),h=require("lexical/TableNode"),k=require("lexical/TableRowNode");function q(a,b){let c=null;var e=b[a.nodeName.toLowerCase()];if(e&&(c=e(a),d.$isElementNode(c)))for(a=a.childNodes,e=0;e<a.length;e++){const f=q(a[e],b);null!==f&&c.append(f)}return c}function r(a){a=a.getPreviousSibling();return null===a||d.$isLineBreakNode(a)||d.$isTextNode(a)&&a.isSimpleText()&&a.getTextContent().endsWith(" ")} | ||
| function t(a){a=a.getNextSibling();return null===a||d.$isLineBreakNode(a)||d.$isTextNode(a)&&a.isSimpleText()&&a.getTextContent().startsWith(" ")}exports.$areSiblingsNullOrSpace=function(a){return r(a)&&t(a)};exports.$createLexicalNodeFromDOMNode=q; | ||
| exports.$createTableNodeWithDimensions=function(a,b,c=!0){const e=h.$createTableNode();for(let f=0;f<a;f++){const l=k.$createTableRowNode();for(let m=0;m<b;m++){const n=g.$createTableCellNode(0===f&&c),p=d.$createParagraphNode();p.append(d.$createTextNode());n.append(p);l.append(n)}e.append(l)}return e}; | ||
| exports.$dfs__DEPRECATED=function(a,b){for(b(a);null!==a;){if(d.$isElementNode(a)&&0<a.getChildrenSize())a=a.getFirstChild();else{let c=null;for(;null===c&&null!==a;)c=a.getNextSibling(),a=null===c?a.getParent():c}null!==a&&(a=b(a))}};exports.$findMatchingParent=function(a,b){for(;a!==d.$getRoot()&&null!=a;){if(b(a))return a;a=a.getParent()}return null};exports.$getNearestNodeOfType=function(a,b){for(;null!=a&&!(a instanceof b);)a=a.getParent();return a};exports.$isNextSiblingNullOrSpace=t; | ||
| exports.$isPreviousSiblingNullOrSpace=r; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict'; | ||
| var lexical = require('lexical'); | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| class OffsetView { | ||
| constructor(offsetMap, firstNode, blockOffsetSize = 1) { | ||
| this._offsetMap = offsetMap; | ||
| this._firstNode = firstNode; | ||
| this._blockOffsetSize = blockOffsetSize; | ||
| } | ||
| createSelectionFromOffsets(originalStart, originalEnd, diffOffsetView) { | ||
| const firstNode = this._firstNode; | ||
| if (firstNode === null) { | ||
| return null; | ||
| } | ||
| let start = originalStart; | ||
| let end = originalEnd; | ||
| let startOffsetNode = $searchForNodeWithOffset(firstNode, start, this._blockOffsetSize); | ||
| let endOffsetNode = $searchForNodeWithOffset(firstNode, end, this._blockOffsetSize); | ||
| if (diffOffsetView !== undefined) { | ||
| start = $getAdjustedOffsetFromDiff(start, startOffsetNode, diffOffsetView, this, this._blockOffsetSize); | ||
| startOffsetNode = $searchForNodeWithOffset(firstNode, start, this._blockOffsetSize); | ||
| end = $getAdjustedOffsetFromDiff(end, endOffsetNode, diffOffsetView, this, this._blockOffsetSize); | ||
| endOffsetNode = $searchForNodeWithOffset(firstNode, end, this._blockOffsetSize); | ||
| } | ||
| if (startOffsetNode === null || endOffsetNode === null) { | ||
| return null; | ||
| } | ||
| let startKey = startOffsetNode.key; | ||
| let endKey = endOffsetNode.key; | ||
| const startNode = lexical.$getNodeByKey(startKey); | ||
| const endNode = lexical.$getNodeByKey(endKey); | ||
| if (startNode === null || endNode === null) { | ||
| return null; | ||
| } | ||
| let startOffset = 0; | ||
| let endOffset = 0; | ||
| let startType = 'element'; | ||
| let endType = 'element'; | ||
| if (startOffsetNode.type === 'text') { | ||
| startOffset = start - startOffsetNode.start; | ||
| startType = 'text'; // If we are at the edge of a text node and we | ||
| // don't have a collapsed selection, then let's | ||
| // try and correct the offset node. | ||
| const sibling = startNode.getNextSibling(); | ||
| if (start !== end && startOffset === startNode.getTextContentSize() && lexical.$isTextNode(sibling)) { | ||
| startOffset = 0; | ||
| startKey = sibling.__key; | ||
| } | ||
| } else if (startOffsetNode.type === 'inline') { | ||
| startKey = startNode.getParentOrThrow().getKey(); | ||
| startOffset = end > startOffsetNode.start ? startOffsetNode.end : startOffsetNode.start; | ||
| } | ||
| if (endOffsetNode.type === 'text') { | ||
| endOffset = end - endOffsetNode.start; | ||
| endType = 'text'; | ||
| } else if (endOffsetNode.type === 'inline') { | ||
| endKey = endNode.getParentOrThrow().getKey(); | ||
| endOffset = end > endOffsetNode.start ? endOffsetNode.end : endOffsetNode.start; | ||
| } | ||
| const selection = lexical.$createRangeSelection(); | ||
| if (selection === null) { | ||
| return null; | ||
| } | ||
| selection.anchor.set(startKey, startOffset, startType); | ||
| selection.focus.set(endKey, endOffset, endType); | ||
| return selection; | ||
| } | ||
| getOffsetsFromSelection(selection) { | ||
| const anchor = selection.anchor; | ||
| const focus = selection.focus; | ||
| const offsetMap = this._offsetMap; | ||
| const anchorOffset = anchor.offset; | ||
| const focusOffset = focus.offset; | ||
| let start = -1; | ||
| let end = -1; | ||
| if (anchor.type === 'text') { | ||
| const offsetNode = offsetMap.get(anchor.key); | ||
| if (offsetNode !== undefined) { | ||
| start = offsetNode.start + anchorOffset; | ||
| } | ||
| } else { | ||
| const node = anchor.getNode().getDescendantByIndex(anchorOffset); | ||
| const offsetNode = offsetMap.get(node.getKey()); | ||
| if (offsetNode !== undefined) { | ||
| const isAtEnd = node.getIndexWithinParent() !== anchorOffset; | ||
| start = isAtEnd ? offsetNode.end : offsetNode.start; | ||
| } | ||
| } | ||
| if (focus.type === 'text') { | ||
| const offsetNode = offsetMap.get(focus.key); | ||
| if (offsetNode !== undefined) { | ||
| end = offsetNode.start + focus.offset; | ||
| } | ||
| } else { | ||
| const node = focus.getNode().getDescendantByIndex(focusOffset); | ||
| const offsetNode = offsetMap.get(node.getKey()); | ||
| if (offsetNode !== undefined) { | ||
| const isAtEnd = node.getIndexWithinParent() !== focusOffset; | ||
| end = isAtEnd ? offsetNode.end : offsetNode.start; | ||
| } | ||
| } | ||
| return [start, end]; | ||
| } | ||
| } | ||
| function $getAdjustedOffsetFromDiff(offset, offsetNode, prevOffsetView, offsetView, blockOffsetSize) { | ||
| const prevOffsetMap = prevOffsetView._offsetMap; | ||
| const offsetMap = offsetView._offsetMap; | ||
| const visited = new Set(); | ||
| let adjustedOffset = offset; | ||
| let currentNode = offsetNode; | ||
| while (currentNode !== null) { | ||
| const key = currentNode.key; | ||
| const prevNode = prevOffsetMap.get(key); | ||
| const diff = currentNode.end - currentNode.start; | ||
| visited.add(key); | ||
| if (prevNode === undefined) { | ||
| adjustedOffset += diff; | ||
| } else { | ||
| const prevDiff = prevNode.end - prevNode.start; | ||
| if (prevDiff !== diff) { | ||
| adjustedOffset += diff - prevDiff; | ||
| } | ||
| } | ||
| const sibling = currentNode.prev; | ||
| if (sibling !== null) { | ||
| currentNode = sibling; | ||
| continue; | ||
| } | ||
| let parent = currentNode.parent; | ||
| while (parent !== null) { | ||
| let parentSibling = parent.prev; | ||
| if (parentSibling !== null) { | ||
| const parentSiblingKey = parentSibling.key; | ||
| const prevParentSibling = prevOffsetMap.get(parentSiblingKey); | ||
| const parentDiff = parentSibling.end - parentSibling.start; | ||
| visited.add(parentSiblingKey); | ||
| if (prevParentSibling === undefined) { | ||
| adjustedOffset += parentDiff; | ||
| } else { | ||
| const prevParentDiff = prevParentSibling.end - prevParentSibling.start; | ||
| if (prevParentDiff !== parentDiff) { | ||
| adjustedOffset += parentDiff - prevParentDiff; | ||
| } | ||
| } | ||
| parentSibling = parentSibling.prev; | ||
| } | ||
| parent = parent.parent; | ||
| } | ||
| break; | ||
| } // Now traverse through the old offsets nodes and find any nodes we missed | ||
| // above, because they were not in the latest offset node view (they have been | ||
| // deleted). | ||
| const prevFirstNode = prevOffsetView._firstNode; | ||
| if (prevFirstNode !== null) { | ||
| currentNode = $searchForNodeWithOffset(prevFirstNode, offset, blockOffsetSize); | ||
| let alreadyVisistedParentOfCurrentNode = false; | ||
| while (currentNode !== null) { | ||
| if (!visited.has(currentNode.key)) { | ||
| alreadyVisistedParentOfCurrentNode = true; | ||
| break; | ||
| } | ||
| currentNode = currentNode.parent; | ||
| } | ||
| if (!alreadyVisistedParentOfCurrentNode) { | ||
| while (currentNode !== null) { | ||
| const key = currentNode.key; | ||
| if (!visited.has(key)) { | ||
| const node = offsetMap.get(key); | ||
| const prevDiff = currentNode.end - currentNode.start; | ||
| if (node === undefined) { | ||
| adjustedOffset -= prevDiff; | ||
| } else { | ||
| const diff = node.end - node.start; | ||
| if (prevDiff !== diff) { | ||
| adjustedOffset += diff - prevDiff; | ||
| } | ||
| } | ||
| } | ||
| currentNode = currentNode.prev; | ||
| } | ||
| } | ||
| } | ||
| return adjustedOffset; | ||
| } | ||
| function $searchForNodeWithOffset(firstNode, offset, blockOffsetSize) { | ||
| let currentNode = firstNode; | ||
| while (currentNode !== null) { | ||
| const end = currentNode.end + (currentNode.type !== 'element' || blockOffsetSize === 0 ? 1 : 0); | ||
| if (offset < end) { | ||
| const child = currentNode.child; | ||
| if (child !== null) { | ||
| currentNode = child; | ||
| continue; | ||
| } | ||
| return currentNode; | ||
| } | ||
| const sibling = currentNode.next; | ||
| if (sibling === null) { | ||
| break; | ||
| } | ||
| currentNode = sibling; | ||
| } | ||
| return null; | ||
| } | ||
| function $createInternalOffsetNode(child, type, start, end, key, parent) { | ||
| // $FlowFixMe: not sure why Flow doesn't like this? | ||
| return { | ||
| child, | ||
| end, | ||
| key, | ||
| next: null, | ||
| parent, | ||
| prev: null, | ||
| start, | ||
| type | ||
| }; | ||
| } | ||
| function $createOffsetNode(state, key, parent, nodeMap, offsetMap, blockOffsetSize) { | ||
| const node = nodeMap.get(key); | ||
| if (node === undefined) { | ||
| { | ||
| throw Error(`createOffsetModel: could not find node by key`); | ||
| } | ||
| } | ||
| const start = state.offset; | ||
| if (lexical.$isElementNode(node)) { | ||
| const childKeys = node.__children; | ||
| const blockIsEmpty = childKeys.length === 0; | ||
| const child = blockIsEmpty ? null : $createOffsetChild(state, childKeys, null, nodeMap, offsetMap, blockOffsetSize); // If the prev node was not a block or the block is empty, we should | ||
| // account for the user being able to selection the block (due to the \n). | ||
| if (!state.prevIsBlock || blockIsEmpty) { | ||
| state.prevIsBlock = true; | ||
| state.offset += blockOffsetSize; | ||
| } | ||
| const offsetNode = $createInternalOffsetNode(child, 'element', start, start, key, parent); | ||
| if (child !== null) { | ||
| child.parent = offsetNode; | ||
| } | ||
| const end = state.offset; | ||
| offsetNode.end = end; | ||
| offsetMap.set(key, offsetNode); | ||
| return offsetNode; | ||
| } | ||
| state.prevIsBlock = false; | ||
| const isText = lexical.$isTextNode(node); // $FlowFixMe: isText means __text is available | ||
| const length = isText ? node.__text.length : 1; | ||
| const end = state.offset += length; | ||
| const offsetNode = $createInternalOffsetNode(null, isText ? 'text' : 'inline', start, end, key, parent); | ||
| offsetMap.set(key, offsetNode); | ||
| return offsetNode; | ||
| } | ||
| function $createOffsetChild(state, children, parent, nodeMap, offsetMap, blockOffsetSize) { | ||
| let firstNode = null; | ||
| let currentNode = null; | ||
| const childrenLength = children.length; | ||
| for (let i = 0; i < childrenLength; i++) { | ||
| const childKey = children[i]; | ||
| const offsetNode = $createOffsetNode(state, childKey, parent, nodeMap, offsetMap, blockOffsetSize); | ||
| if (currentNode === null) { | ||
| firstNode = offsetNode; | ||
| } else { | ||
| offsetNode.prev = currentNode; | ||
| currentNode.next = offsetNode; | ||
| } | ||
| currentNode = offsetNode; | ||
| } | ||
| return firstNode; | ||
| } | ||
| function $createOffsetView(editor, blockOffsetSize = 1, editorState) { | ||
| const targetEditorState = editorState || editor._pendingEditorState || editor._editorState; | ||
| const nodeMap = targetEditorState._nodeMap; // $FlowFixMe: root is always in the Map | ||
| const root = nodeMap.get('root'); | ||
| const offsetMap = new Map(); | ||
| const state = { | ||
| offset: 0, | ||
| prevIsBlock: false | ||
| }; | ||
| const node = $createOffsetChild(state, root.__children, null, nodeMap, offsetMap, blockOffsetSize); | ||
| return new OffsetView(offsetMap, node, blockOffsetSize); | ||
| } | ||
| exports.$createOffsetView = $createOffsetView; | ||
| exports.OffsetView = OffsetView; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict';var q=require("lexical"); | ||
| class t{constructor(b,a,c=1){this._offsetMap=b;this._firstNode=a;this._blockOffsetSize=c}createSelectionFromOffsets(b,a,c){var e=this._firstNode;if(null===e)return null;var f=y(e,b,this._blockOffsetSize);let h=y(e,a,this._blockOffsetSize);void 0!==c&&(b=z(b,f,c,this,this._blockOffsetSize),f=y(e,b,this._blockOffsetSize),a=z(a,h,c,this,this._blockOffsetSize),h=y(e,a,this._blockOffsetSize));if(null===f||null===h)return null;c=f.key;e=h.key;const m=q.$getNodeByKey(c),k=q.$getNodeByKey(e);if(null===m|| | ||
| null===k)return null;let l=0,d=0,g="element",n="element";"text"===f.type?(l=b-f.start,g="text",f=m.getNextSibling(),b!==a&&l===m.getTextContentSize()&&q.$isTextNode(f)&&(l=0,c=f.__key)):"inline"===f.type&&(c=m.getParentOrThrow().getKey(),l=a>f.start?f.end:f.start);"text"===h.type?(d=a-h.start,n="text"):"inline"===h.type&&(e=k.getParentOrThrow().getKey(),d=a>h.start?h.end:h.start);b=q.$createRangeSelection();if(null===b)return null;b.anchor.set(c,l,g);b.focus.set(e,d,n);return b}getOffsetsFromSelection(b){var a= | ||
| b.anchor,c=b.focus,e=this._offsetMap;const f=a.offset;var h=c.offset;let m=b=-1;if("text"===a.type)a=e.get(a.key),void 0!==a&&(b=a.start+f);else{a=a.getNode().getDescendantByIndex(f);const k=e.get(a.getKey());void 0!==k&&(b=a.getIndexWithinParent()!==f?k.end:k.start)}"text"===c.type?(h=e.get(c.key),void 0!==h&&(m=h.start+c.offset)):(c=c.getNode().getDescendantByIndex(h),e=e.get(c.getKey()),void 0!==e&&(m=c.getIndexWithinParent()!==h?e.end:e.start));return[b,m]}} | ||
| function z(b,a,c,e,f){const h=c._offsetMap;e=e._offsetMap;const m=new Set;let k=b;for(;null!==a;){var l=a.key,d=h.get(l),g=a.end-a.start;m.add(l);void 0===d?k+=g:(l=d.end-d.start,l!==g&&(k+=g-l));g=a.prev;if(null!==g)a=g;else{for(a=a.parent;null!==a;)d=a.prev,null!==d&&(g=d.key,l=h.get(g),d=d.end-d.start,m.add(g),void 0===l?k+=d:(g=l.end-l.start,g!==d&&(k+=d-g))),a=a.parent;break}}c=c._firstNode;if(null!==c){a=y(c,b,f);for(b=!1;null!==a;){if(!m.has(a.key)){b=!0;break}a=a.parent}if(!b)for(;null!== | ||
| a;)b=a.key,m.has(b)||(f=e.get(b),b=a.end-a.start,void 0===f?k-=b:(f=f.end-f.start,b!==f&&(k+=f-b))),a=a.prev}return k}function y(b,a,c){for(;null!==b;){if(a<b.end+("element"!==b.type||0===c?1:0)){const e=b.child;if(null!==e){b=e;continue}return b}b=b.next;if(null===b)break}return null} | ||
| function A(b,a,c,e,f,h){let m=null,k=null;const l=a.length;for(let v=0;v<l;v++){{var d=b;var g=a[v];var n=c,r=e,w=f,u=h,p=r.get(g);if(void 0===p)throw Error("Minified Lexical error #3; see codes.json for the full message or use the non-minified dev environment for full errors and additional helpful warnings.");const x=d.offset;if(q.$isElementNode(p)){const B=p.__children;r=(p=0===B.length)?null:A(d,B,null,r,w,u);if(!d.prevIsBlock||p)d.prevIsBlock=!0,d.offset+=u;n={child:r,end:x,key:g,next:null,parent:n, | ||
| prev:null,start:x,type:"element"};null!==r&&(r.parent=n);n.end=d.offset;w.set(g,n);g=n}else d.prevIsBlock=!1,u=q.$isTextNode(p),d={child:null,end:d.offset+=u?p.__text.length:1,key:g,next:null,parent:n,prev:null,start:x,type:u?"text":"inline"},w.set(g,d),g=d}null===k?m=g:(g.prev=k,k.next=g);k=g}return m} | ||
| exports.$createOffsetView=function(b,a=1,c){c=(c||b._pendingEditorState||b._editorState)._nodeMap;const e=c.get("root");b=new Map;c=A({offset:0,prevIsBlock:!1},e.__children,null,c,b,a);return new t(b,c,a)};exports.OffsetView=t; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict'; | ||
| var lexical = require('lexical'); | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| function $textContent() { | ||
| const root = lexical.$getRoot(); | ||
| return root.getTextContent(); | ||
| } | ||
| const $textContentCurry = $textContent; | ||
| function $isTextContentEmpty(isEditorComposing, trim = true) { | ||
| if (isEditorComposing) { | ||
| return false; | ||
| } | ||
| let text = $textContent(); | ||
| if (trim) { | ||
| text = text.trim(); | ||
| } | ||
| return text === ''; | ||
| } | ||
| function $isTextContentEmptyCurry(isEditorComposing, trim) { | ||
| return () => $isTextContentEmpty(isEditorComposing, trim); | ||
| } | ||
| function $canShowPlaceholder(isComposing) { | ||
| if (!$isTextContentEmpty(isComposing, false)) { | ||
| return false; | ||
| } | ||
| const root = lexical.$getRoot(); | ||
| const children = root.getChildren(); | ||
| const childrenLength = children.length; | ||
| if (childrenLength > 1) { | ||
| return false; | ||
| } | ||
| for (let i = 0; i < childrenLength; i++) { | ||
| const topBlock = children[i]; | ||
| if (lexical.$isElementNode(topBlock)) { | ||
| if (topBlock.__type !== 'paragraph') { | ||
| return false; | ||
| } | ||
| if (topBlock.__indent !== 0) { | ||
| return false; | ||
| } | ||
| const topBlockChildren = topBlock.getChildren(); | ||
| const topBlockChildrenLength = topBlockChildren.length; | ||
| for (let s = 0; s < topBlockChildrenLength; s++) { | ||
| const child = topBlockChildren[i]; | ||
| if (!lexical.$isTextNode(child)) { | ||
| return false; | ||
| } | ||
| } | ||
| } | ||
| } | ||
| return true; | ||
| } | ||
| function $canShowPlaceholderCurry(isEditorComposing) { | ||
| return () => $canShowPlaceholder(isEditorComposing); | ||
| } | ||
| exports.$canShowPlaceholder = $canShowPlaceholder; | ||
| exports.$canShowPlaceholderCurry = $canShowPlaceholderCurry; | ||
| exports.$isTextContentEmpty = $isTextContentEmpty; | ||
| exports.$isTextContentEmptyCurry = $isTextContentEmptyCurry; | ||
| exports.$textContent = $textContent; | ||
| exports.$textContentCurry = $textContentCurry; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict';var d=require("lexical");function f(){return d.$getRoot().getTextContent()}function g(a,c=!0){if(a)return!1;a=f();c&&(a=a.trim());return""===a}function h(a){if(!g(a,!1))return!1;a=d.$getRoot().getChildren();const c=a.length;if(1<c)return!1;for(let e=0;e<c;e++){var b=a[e];if(d.$isElementNode(b)){if("paragraph"!==b.__type||0!==b.__indent)return!1;b=b.getChildren();const l=b.length;for(let k=0;k<l;k++)if(!d.$isTextNode(b[e]))return!1}}return!0}exports.$canShowPlaceholder=h; | ||
| exports.$canShowPlaceholderCurry=function(a){return()=>h(a)};exports.$isTextContentEmpty=g;exports.$isTextContentEmptyCurry=function(a,c){return()=>g(a,c)};exports.$textContent=f;exports.$textContentCurry=f; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict'; | ||
| var lexical = require('lexical'); | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| const cssToStyles = new Map(); | ||
| function $cloneWithProperties(node) { | ||
| const latest = node.getLatest(); | ||
| const constructor = latest.constructor; | ||
| const clone = constructor.clone(latest); | ||
| clone.__parent = latest.__parent; | ||
| if (lexical.$isElementNode(latest) && lexical.$isElementNode(clone)) { | ||
| clone.__children = Array.from(latest.__children); | ||
| clone.__format = latest.__format; | ||
| clone.__indent = latest.__indent; | ||
| clone.__dir = latest.__dir; | ||
| } else if (lexical.$isTextNode(latest) && lexical.$isTextNode(clone)) { | ||
| clone.__format = latest.__format; | ||
| clone.__style = latest.__style; | ||
| clone.__mode = latest.__mode; | ||
| clone.__detail = latest.__detail; | ||
| } else if (lexical.$isDecoratorNode(latest) && lexical.$isDecoratorNode(clone)) { | ||
| clone.__state = latest.__state; | ||
| } // $FlowFixMe | ||
| return clone; | ||
| } | ||
| function $getIndexFromPossibleClone(node, parent, nodeMap) { | ||
| const parentClone = nodeMap.get(parent.getKey()); | ||
| if (lexical.$isElementNode(parentClone)) { | ||
| return parentClone.__children.indexOf(node.getKey()); | ||
| } | ||
| return node.getIndexWithinParent(); | ||
| } | ||
| function $getParentAvoidingExcludedElements(node) { | ||
| let parent = node.getParent(); | ||
| while (parent !== null && parent.excludeFromCopy()) { | ||
| parent = parent.getParent(); | ||
| } | ||
| return parent; | ||
| } | ||
| function $copyLeafNodeBranchToRoot(leaf, startingOffset, isLeftSide, range, nodeMap) { | ||
| let node = leaf; | ||
| let offset = startingOffset; | ||
| while (node !== null) { | ||
| const parent = $getParentAvoidingExcludedElements(node); | ||
| if (parent === null) { | ||
| break; | ||
| } | ||
| if (!lexical.$isElementNode(node) || !node.excludeFromCopy()) { | ||
| const key = node.getKey(); | ||
| let clone = nodeMap.get(key); | ||
| const needsClone = clone === undefined; | ||
| if (needsClone) { | ||
| clone = $cloneWithProperties(node); | ||
| nodeMap.set(key, clone); | ||
| } | ||
| if (lexical.$isTextNode(clone) && !clone.isSegmented() && !clone.isToken()) { | ||
| clone.__text = clone.__text.slice(isLeftSide ? offset : 0, isLeftSide ? undefined : offset); | ||
| } else if (lexical.$isElementNode(clone)) { | ||
| clone.__children = clone.__children.slice(isLeftSide ? offset : 0, isLeftSide ? undefined : offset + 1); | ||
| } | ||
| if (lexical.$isRootNode(parent)) { | ||
| if (needsClone) { | ||
| // We only want to collect a range of top level nodes. | ||
| // So if the parent is the root, we know this is a top level. | ||
| range.push(key); | ||
| } | ||
| break; | ||
| } | ||
| } | ||
| offset = $getIndexFromPossibleClone(node, parent, nodeMap); | ||
| node = parent; | ||
| } | ||
| } | ||
| function $cloneContents(selection) { | ||
| const anchor = selection.anchor; | ||
| const focus = selection.focus; | ||
| const anchorOffset = anchor.getCharacterOffset(); | ||
| const focusOffset = focus.getCharacterOffset(); | ||
| const anchorNode = anchor.getNode(); | ||
| const focusNode = focus.getNode(); | ||
| const anchorNodeParent = anchorNode.getParentOrThrow(); // Handle a single text node extraction | ||
| if (anchorNode === focusNode && lexical.$isTextNode(anchorNode) && (anchorNodeParent.canBeEmpty() || anchorNodeParent.getChildrenSize() > 1)) { | ||
| const clonedFirstNode = $cloneWithProperties(anchorNode); | ||
| const isBefore = focusOffset > anchorOffset; | ||
| const startOffset = isBefore ? anchorOffset : focusOffset; | ||
| const endOffset = isBefore ? focusOffset : anchorOffset; | ||
| clonedFirstNode.__text = clonedFirstNode.__text.slice(startOffset, endOffset); | ||
| const key = clonedFirstNode.getKey(); | ||
| return { | ||
| nodeMap: [[key, clonedFirstNode]], | ||
| range: [key] | ||
| }; | ||
| } | ||
| const nodes = selection.getNodes(); | ||
| if (nodes.length === 0) { | ||
| return { | ||
| nodeMap: [], | ||
| range: [] | ||
| }; | ||
| } // Check if we can use the parent of the nodes, if the | ||
| // parent can't be empty, then it's important that we | ||
| // also copy that element node along with its children. | ||
| let nodesLength = nodes.length; | ||
| const firstNode = nodes[0]; | ||
| const firstNodeParent = firstNode.getParent(); | ||
| if (firstNodeParent !== null && !firstNodeParent.canBeEmpty()) { | ||
| const parentChildren = firstNodeParent.__children; | ||
| const parentChildrenLength = parentChildren.length; | ||
| if (parentChildrenLength === nodesLength) { | ||
| let areTheSame = true; | ||
| for (let i = 0; i < parentChildren.length; i++) { | ||
| if (parentChildren[i] !== nodes[i].__key) { | ||
| areTheSame = false; | ||
| break; | ||
| } | ||
| } | ||
| if (areTheSame) { | ||
| nodesLength++; | ||
| nodes.push(firstNodeParent); | ||
| } | ||
| } | ||
| } | ||
| const lastNode = nodes[nodesLength - 1]; | ||
| const isBefore = anchor.isBefore(focus); | ||
| const nodeMap = new Map(); | ||
| const range = []; // Do first node to root | ||
| $copyLeafNodeBranchToRoot(firstNode, isBefore ? anchorOffset : focusOffset, true, range, nodeMap); // Copy all nodes between | ||
| for (let i = 0; i < nodesLength; i++) { | ||
| const node = nodes[i]; | ||
| const key = node.getKey(); | ||
| if (!nodeMap.has(key) && (!lexical.$isElementNode(node) || !node.excludeFromCopy())) { | ||
| const clone = $cloneWithProperties(node); | ||
| if (lexical.$isRootNode(node.getParent())) { | ||
| range.push(node.getKey()); | ||
| } | ||
| nodeMap.set(key, clone); | ||
| } | ||
| } // Do last node to root | ||
| $copyLeafNodeBranchToRoot(lastNode, isBefore ? focusOffset : anchorOffset, false, range, nodeMap); | ||
| return { | ||
| nodeMap: Array.from(nodeMap.entries()), | ||
| range | ||
| }; | ||
| } | ||
| function getStyleObjectFromCSS(css) { | ||
| return cssToStyles.get(css) || null; | ||
| } | ||
| function getCSSFromStyleObject(styles) { | ||
| let css = ''; | ||
| for (const style in styles) { | ||
| if (style) { | ||
| css += `${style}: ${styles[style]};`; | ||
| } | ||
| } | ||
| return css; | ||
| } | ||
| function $patchNodeStyle(node, patch) { | ||
| const prevStyles = getStyleObjectFromCSS(node.getStyle()); | ||
| const newStyles = prevStyles ? { ...prevStyles, | ||
| ...patch | ||
| } : patch; | ||
| const newCSSText = getCSSFromStyleObject(newStyles); | ||
| node.setStyle(newCSSText); | ||
| cssToStyles.set(newCSSText, newStyles); | ||
| } | ||
| function $patchStyleText(selection, patch) { | ||
| const selectedNodes = selection.getNodes(); | ||
| const selectedNodesLength = selectedNodes.length; | ||
| const lastIndex = selectedNodesLength - 1; | ||
| let firstNode = selectedNodes[0]; | ||
| let lastNode = selectedNodes[lastIndex]; | ||
| if (selection.isCollapsed()) { | ||
| return; | ||
| } | ||
| const anchor = selection.anchor; | ||
| const focus = selection.focus; | ||
| const firstNodeText = firstNode.getTextContent(); | ||
| const firstNodeTextLength = firstNodeText.length; | ||
| const focusOffset = focus.offset; | ||
| let anchorOffset = anchor.offset; | ||
| let startOffset; | ||
| let endOffset; | ||
| const isBefore = anchor.isBefore(focus); | ||
| startOffset = isBefore ? anchorOffset : focusOffset; | ||
| endOffset = isBefore ? focusOffset : anchorOffset; // This is the case where the user only selected the very end of the | ||
| // first node so we don't want to include it in the formatting change. | ||
| if (startOffset === firstNode.getTextContentSize()) { | ||
| const nextSibling = firstNode.getNextSibling(); | ||
| if (lexical.$isTextNode(nextSibling)) { | ||
| // we basically make the second node the firstNode, changing offsets accordingly | ||
| anchorOffset = 0; | ||
| startOffset = 0; | ||
| firstNode = nextSibling; | ||
| } | ||
| } // This is the case where we only selected a single node | ||
| if (firstNode.is(lastNode)) { | ||
| if (lexical.$isTextNode(firstNode)) { | ||
| startOffset = anchorOffset > focusOffset ? focusOffset : anchorOffset; | ||
| endOffset = anchorOffset > focusOffset ? anchorOffset : focusOffset; // No actual text is selected, so do nothing. | ||
| if (startOffset === endOffset) { | ||
| return; | ||
| } // The entire node is selected, so just format it | ||
| if (startOffset === 0 && endOffset === firstNodeTextLength) { | ||
| $patchNodeStyle(firstNode, patch); | ||
| firstNode.select(startOffset, endOffset); | ||
| } else { | ||
| // The node is partially selected, so split it into two nodes | ||
| // and style the selected one. | ||
| const splitNodes = firstNode.splitText(startOffset, endOffset); | ||
| const replacement = startOffset === 0 ? splitNodes[0] : splitNodes[1]; | ||
| $patchNodeStyle(replacement, patch); | ||
| replacement.select(0, endOffset - startOffset); | ||
| } | ||
| } // multiple nodes selected. | ||
| } else { | ||
| if (lexical.$isTextNode(firstNode)) { | ||
| if (startOffset !== 0) { | ||
| // the entire first node isn't selected, so split it | ||
| [, firstNode] = firstNode.splitText(startOffset); | ||
| startOffset = 0; | ||
| } | ||
| $patchNodeStyle(firstNode, patch); | ||
| } | ||
| if (lexical.$isTextNode(lastNode)) { | ||
| const lastNodeText = lastNode.getTextContent(); | ||
| const lastNodeTextLength = lastNodeText.length; // if the entire last node isn't selected, split it | ||
| if (endOffset !== lastNodeTextLength) { | ||
| [lastNode] = lastNode.splitText(endOffset); | ||
| } | ||
| if (endOffset !== 0) { | ||
| $patchNodeStyle(lastNode, patch); | ||
| } | ||
| } // style all the text nodes in between | ||
| for (let i = 1; i < lastIndex; i++) { | ||
| const selectedNode = selectedNodes[i]; | ||
| const selectedNodeKey = selectedNode.getKey(); | ||
| if (lexical.$isTextNode(selectedNode) && selectedNodeKey !== firstNode.getKey() && selectedNodeKey !== lastNode.getKey() && !selectedNode.isToken()) { | ||
| $patchNodeStyle(selectedNode, patch); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| function $getSelectionStyleValueForProperty(selection, styleProperty, defaultValue = '') { | ||
| let styleValue = null; | ||
| const nodes = selection.getNodes(); | ||
| const anchor = selection.anchor; | ||
| const focus = selection.focus; | ||
| const isBackward = selection.isBackward(); | ||
| const endOffset = isBackward ? focus.offset : anchor.offset; | ||
| const endNode = isBackward ? focus.getNode() : anchor.getNode(); | ||
| for (let i = 0; i < nodes.length; i++) { | ||
| const node = nodes[i]; // if no actual characters in the end node are selected, we don't | ||
| // include it in the selection for purposes of determining style | ||
| // value | ||
| if (i !== 0 && endOffset === 0 && node.is(endNode)) { | ||
| continue; | ||
| } | ||
| if (lexical.$isTextNode(node)) { | ||
| const nodeStyleValue = $getNodeStyleValueForProperty(node, styleProperty, defaultValue); | ||
| if (styleValue === null) { | ||
| styleValue = nodeStyleValue; | ||
| } else if (styleValue !== nodeStyleValue) { | ||
| // multiple text nodes are in the selection and they don't all | ||
| // have the same font size. | ||
| styleValue = ''; | ||
| break; | ||
| } | ||
| } | ||
| } | ||
| return styleValue === null ? defaultValue : styleValue; | ||
| } | ||
| function $getNodeStyleValueForProperty(node, styleProperty, defaultValue) { | ||
| const css = node.getStyle(); | ||
| const styleObject = getStyleObjectFromCSS(css); | ||
| if (styleObject !== null) { | ||
| return styleObject[styleProperty] || defaultValue; | ||
| } | ||
| return defaultValue; | ||
| } | ||
| function $moveCaretSelection(selection, isHoldingShift, isBackward, granularity) { | ||
| selection.modify(isHoldingShift ? 'extend' : 'move', isBackward, granularity); | ||
| } | ||
| function $isParentElementRTL(selection) { | ||
| const anchorNode = selection.anchor.getNode(); | ||
| const parent = anchorNode.getParentOrThrow(); | ||
| return parent.getDirection() === 'rtl'; | ||
| } | ||
| function $moveCharacter(selection, isHoldingShift, isBackward) { | ||
| const isRTL = $isParentElementRTL(selection); | ||
| $moveCaretSelection(selection, isHoldingShift, isBackward ? !isRTL : isRTL, 'character'); | ||
| } | ||
| function $selectAll(selection) { | ||
| const anchor = selection.anchor; | ||
| const focus = selection.focus; | ||
| const anchorNode = anchor.getNode(); | ||
| const topParent = anchorNode.getTopLevelElementOrThrow(); | ||
| const root = topParent.getParentOrThrow(); | ||
| let firstNode = root.getFirstDescendant(); | ||
| let lastNode = root.getLastDescendant(); | ||
| let firstType = 'element'; | ||
| let lastType = 'element'; | ||
| let lastOffset = 0; | ||
| if (lexical.$isTextNode(firstNode)) { | ||
| firstType = 'text'; | ||
| } else if (!lexical.$isElementNode(firstNode) && firstNode !== null) { | ||
| firstNode = firstNode.getParentOrThrow(); | ||
| } | ||
| if (lexical.$isTextNode(lastNode)) { | ||
| lastType = 'text'; | ||
| lastOffset = lastNode.getTextContentSize(); | ||
| } else if (!lexical.$isElementNode(lastNode) && lastNode !== null) { | ||
| lastNode = lastNode.getParentOrThrow(); | ||
| lastOffset = lastNode.getChildrenSize(); | ||
| } | ||
| if (firstNode && lastNode) { | ||
| anchor.set(firstNode.getKey(), 0, firstType); | ||
| focus.set(lastNode.getKey(), lastOffset, lastType); | ||
| } | ||
| } | ||
| function $removeParentEmptyElements(startingNode) { | ||
| let node = startingNode; | ||
| while (node !== null && !lexical.$isRootNode(node)) { | ||
| const latest = node.getLatest(); | ||
| const parentNode = node.getParent(); | ||
| if (latest.__children.length === 0) { | ||
| node.remove(); | ||
| } | ||
| node = parentNode; | ||
| } | ||
| } | ||
| function $wrapLeafNodesInElements(selection, createElement, wrappingElement) { | ||
| const nodes = selection.getNodes(); | ||
| const nodesLength = nodes.length; | ||
| if (nodesLength === 0) { | ||
| const anchor = selection.anchor; | ||
| const target = anchor.type === 'text' ? anchor.getNode().getParentOrThrow() : anchor.getNode(); | ||
| const children = target.getChildren(); | ||
| let element = createElement(); | ||
| children.forEach(child => element.append(child)); | ||
| if (wrappingElement) { | ||
| element = wrappingElement.append(element); | ||
| } | ||
| target.replace(element); | ||
| return; | ||
| } | ||
| const firstNode = nodes[0]; | ||
| const elementMapping = new Map(); | ||
| const elements = []; // The below logic is to find the right target for us to | ||
| // either insertAfter/insertBefore/append the corresponding | ||
| // elements to. This is made more complicated due to nested | ||
| // structures. | ||
| let target = lexical.$isElementNode(firstNode) ? firstNode : firstNode.getParentOrThrow(); | ||
| while (target !== null) { | ||
| const prevSibling = target.getPreviousSibling(); | ||
| if (prevSibling !== null) { | ||
| target = prevSibling; | ||
| break; | ||
| } | ||
| target = target.getParentOrThrow(); | ||
| if (lexical.$isRootNode(target)) { | ||
| break; | ||
| } | ||
| } | ||
| const emptyElements = new Set(); // Find any top level empty elements | ||
| for (let i = 0; i < nodesLength; i++) { | ||
| const node = nodes[i]; | ||
| if (lexical.$isElementNode(node) && node.getChildrenSize() === 0) { | ||
| emptyElements.add(node.getKey()); | ||
| } | ||
| } | ||
| const movedLeafNodes = new Set(); // Move out all leaf nodes into our elements array. | ||
| // If we find a top level empty element, also move make | ||
| // an element for that. | ||
| for (let i = 0; i < nodesLength; i++) { | ||
| const node = nodes[i]; | ||
| const parent = node.getParent(); | ||
| if (parent !== null && lexical.$isLeafNode(node) && !movedLeafNodes.has(node.getKey())) { | ||
| const parentKey = parent.getKey(); | ||
| if (elementMapping.get(parentKey) === undefined) { | ||
| const targetElement = createElement(); | ||
| elements.push(targetElement); | ||
| elementMapping.set(parentKey, targetElement); // Move node and its siblings to the new | ||
| // element. | ||
| parent.getChildren().forEach(child => { | ||
| targetElement.append(child); | ||
| movedLeafNodes.add(child.getKey()); | ||
| }); | ||
| $removeParentEmptyElements(parent); | ||
| } | ||
| } else if (emptyElements.has(node.getKey())) { | ||
| elements.push(createElement()); | ||
| node.remove(); | ||
| } | ||
| } | ||
| if (wrappingElement) { | ||
| for (let i = 0; i < elements.length; i++) { | ||
| const element = elements[i]; | ||
| wrappingElement.append(element); | ||
| } | ||
| } // If our target is the root, let's see if we can re-adjust | ||
| // so that the target is the first child instead. | ||
| if (lexical.$isRootNode(target)) { | ||
| const firstChild = target.getFirstChild(); | ||
| if (lexical.$isElementNode(firstChild)) { | ||
| target = firstChild; | ||
| } | ||
| if (firstChild === null) { | ||
| if (wrappingElement) { | ||
| target.append(wrappingElement); | ||
| } else { | ||
| for (let i = 0; i < elements.length; i++) { | ||
| const element = elements[i]; | ||
| target.append(element); | ||
| } | ||
| } | ||
| } else { | ||
| if (wrappingElement) { | ||
| firstChild.insertBefore(wrappingElement); | ||
| } else { | ||
| for (let i = 0; i < elements.length; i++) { | ||
| const element = elements[i]; | ||
| firstChild.insertBefore(element); | ||
| } | ||
| } | ||
| } | ||
| } else { | ||
| if (wrappingElement) { | ||
| target.insertAfter(wrappingElement); | ||
| } else { | ||
| for (let i = elements.length - 1; i >= 0; i--) { | ||
| const element = elements[i]; | ||
| target.insertAfter(element); | ||
| } | ||
| } | ||
| } | ||
| selection.dirty = true; | ||
| } | ||
| function $isAtNodeEnd(point) { | ||
| if (point.type === 'text') { | ||
| return point.offset === point.getNode().getTextContentSize(); | ||
| } | ||
| return point.offset === point.getNode().getChildrenSize(); | ||
| } | ||
| exports.$cloneContents = $cloneContents; | ||
| exports.$getSelectionStyleValueForProperty = $getSelectionStyleValueForProperty; | ||
| exports.$isAtNodeEnd = $isAtNodeEnd; | ||
| exports.$isParentElementRTL = $isParentElementRTL; | ||
| exports.$moveCaretSelection = $moveCaretSelection; | ||
| exports.$moveCharacter = $moveCharacter; | ||
| exports.$patchStyleText = $patchStyleText; | ||
| exports.$selectAll = $selectAll; | ||
| exports.$wrapLeafNodesInElements = $wrapLeafNodesInElements; | ||
| exports.getStyleObjectFromCSS = getStyleObjectFromCSS; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict';var l=require("lexical");const r=new Map;function v(a){a=a.getLatest();const c=a.constructor.clone(a);c.__parent=a.__parent;l.$isElementNode(a)&&l.$isElementNode(c)?(c.__children=Array.from(a.__children),c.__format=a.__format,c.__indent=a.__indent,c.__dir=a.__dir):l.$isTextNode(a)&&l.$isTextNode(c)?(c.__format=a.__format,c.__style=a.__style,c.__mode=a.__mode,c.__detail=a.__detail):l.$isDecoratorNode(a)&&l.$isDecoratorNode(c)&&(c.__state=a.__state);return c} | ||
| function w(a,c,b,g,k){for(var e=c;null!==a;){for(c=a.getParent();null!==c&&c.excludeFromCopy();)c=c.getParent();if(null===c)break;if(!l.$isElementNode(a)||!a.excludeFromCopy()){const d=a.getKey();let f=k.get(d);const h=void 0===f;h&&(f=v(a),k.set(d,f));!l.$isTextNode(f)||f.isSegmented()||f.isToken()?l.$isElementNode(f)&&(f.__children=f.__children.slice(b?e:0,b?void 0:e+1)):f.__text=f.__text.slice(b?e:0,b?void 0:e);if(l.$isRootNode(c)){h&&g.push(d);break}}e=k.get(c.getKey());e=l.$isElementNode(e)? | ||
| e.__children.indexOf(a.getKey()):a.getIndexWithinParent();a=c}}function x(a){return r.get(a)||null}function y(a,c){var b=x(a.getStyle());c=b?{...b,...c}:c;b="";for(g in c)g&&(b+=`${g}: ${c[g]};`);var g=b;a.setStyle(g);r.set(g,c)}function z(a,c,b,g){a.modify(c?"extend":"move",b,g)}function A(a){return"rtl"===a.anchor.getNode().getParentOrThrow().getDirection()}function B(a){for(;null!==a&&!l.$isRootNode(a);){const c=a.getLatest(),b=a.getParent();0===c.__children.length&&a.remove();a=b}} | ||
| exports.$cloneContents=function(a){var c=a.anchor,b=a.focus,g=c.getCharacterOffset();const k=b.getCharacterOffset();var e=c.getNode(),d=b.getNode(),f=e.getParentOrThrow();if(e===d&&l.$isTextNode(e)&&(f.canBeEmpty()||1<f.getChildrenSize()))return a=v(e),e=k>g,a.__text=a.__text.slice(e?g:k,e?k:g),g=a.getKey(),{nodeMap:[[g,a]],range:[g]};a=a.getNodes();if(0===a.length)return{nodeMap:[],range:[]};e=a.length;d=a[0];f=d.getParent();if(null!==f&&!f.canBeEmpty()){var h=f.__children;if(h.length===e){var m= | ||
| !0;for(var n=0;n<h.length;n++)if(h[n]!==a[n].__key){m=!1;break}m&&(e++,a.push(f))}}f=a[e-1];c=c.isBefore(b);b=new Map;h=[];w(d,c?g:k,!0,h,b);for(d=0;d<e;d++)if(m=a[d],n=m.getKey(),!(b.has(n)||l.$isElementNode(m)&&m.excludeFromCopy())){const t=v(m);l.$isRootNode(m.getParent())&&h.push(m.getKey());b.set(n,t)}w(f,c?k:g,!1,h,b);return{nodeMap:Array.from(b.entries()),range:h}}; | ||
| exports.$getSelectionStyleValueForProperty=function(a,c,b=""){let g=null;const k=a.getNodes();var e=a.anchor,d=a.focus,f=a.isBackward();a=f?d.offset:e.offset;e=f?d.getNode():e.getNode();for(d=0;d<k.length;d++){var h=k[d];if((0===d||0!==a||!h.is(e))&&l.$isTextNode(h)){f=c;var m=b;h=h.getStyle();h=x(h);f=null!==h?h[f]||m:m;if(null===g)g=f;else if(g!==f){g="";break}}}return null===g?b:g};exports.$isAtNodeEnd=function(a){return"text"===a.type?a.offset===a.getNode().getTextContentSize():a.offset===a.getNode().getChildrenSize()}; | ||
| exports.$isParentElementRTL=A;exports.$moveCaretSelection=z;exports.$moveCharacter=function(a,c,b){const g=A(a);z(a,c,b?!g:g,"character")}; | ||
| exports.$patchStyleText=function(a,c){var b=a.getNodes();const g=b.length-1;let k=b[0],e=b[g];if(!a.isCollapsed()){var d=a.anchor,f=a.focus;a=k.getTextContent().length;var h=f.offset,m=d.offset;d=(f=d.isBefore(f))?m:h;f=f?h:m;if(d===k.getTextContentSize()){const n=k.getNextSibling();l.$isTextNode(n)&&(d=m=0,k=n)}if(k.is(e))l.$isTextNode(k)&&(d=m>h?h:m,f=m>h?m:h,d!==f&&(0===d&&f===a?(y(k,c),k.select(d,f)):(b=k.splitText(d,f),b=0===d?b[0]:b[1],y(b,c),b.select(0,f-d))));else for(l.$isTextNode(k)&&(0!== | ||
| d&&([,k]=k.splitText(d)),y(k,c)),l.$isTextNode(e)&&(a=e.getTextContent().length,f!==a&&([e]=e.splitText(f)),0!==f&&y(e,c)),a=1;a<g;a++)h=b[a],m=h.getKey(),l.$isTextNode(h)&&m!==k.getKey()&&m!==e.getKey()&&!h.isToken()&&y(h,c)}}; | ||
| exports.$selectAll=function(a){const c=a.anchor;a=a.focus;var b=c.getNode().getTopLevelElementOrThrow().getParentOrThrow();let g=b.getFirstDescendant();b=b.getLastDescendant();let k="element",e="element",d=0;l.$isTextNode(g)?k="text":l.$isElementNode(g)||null===g||(g=g.getParentOrThrow());l.$isTextNode(b)?(e="text",d=b.getTextContentSize()):l.$isElementNode(b)||null===b||(b=b.getParentOrThrow(),d=b.getChildrenSize());g&&b&&(c.set(g.getKey(),0,k),a.set(b.getKey(),d,e))}; | ||
| exports.$wrapLeafNodesInElements=function(a,c,b){const g=a.getNodes(),k=g.length;if(0===k){a=a.anchor;a="text"===a.type?a.getNode().getParentOrThrow():a.getNode();var e=a.getChildren();let p=c();e.forEach(u=>p.append(u));b&&(p=b.append(p));a.replace(p)}else{var d=g[0],f=new Map;e=[];for(d=l.$isElementNode(d)?d:d.getParentOrThrow();null!==d;){var h=d.getPreviousSibling();if(null!==h){d=h;break}d=d.getParentOrThrow();if(l.$isRootNode(d))break}h=new Set;for(var m=0;m<k;m++){var n=g[m];l.$isElementNode(n)&& | ||
| 0===n.getChildrenSize()&&h.add(n.getKey())}var t=new Set;for(m=0;m<k;m++){var q=g[m];n=q.getParent();if(null!==n&&l.$isLeafNode(q)&&!t.has(q.getKey())){if(q=n.getKey(),void 0===f.get(q)){const p=c();e.push(p);f.set(q,p);n.getChildren().forEach(u=>{p.append(u);t.add(u.getKey())});B(n)}}else h.has(q.getKey())&&(e.push(c()),q.remove())}if(b)for(c=0;c<e.length;c++)b.append(e[c]);if(l.$isRootNode(d))if(c=d.getFirstChild(),l.$isElementNode(c)&&(d=c),null===c)if(b)d.append(b);else for(b=0;b<e.length;b++)d.append(e[b]); | ||
| else if(b)c.insertBefore(b);else for(b=0;b<e.length;b++)c.insertBefore(e[b]);else if(b)d.insertAfter(b);else for(b=e.length-1;0<=b;b--)d.insertAfter(e[b]);a.dirty=!0}};exports.getStyleObjectFromCSS=x; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict'; | ||
| var lexical = require('lexical'); | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| * | ||
| * | ||
| */ | ||
| function $findTextIntersectionFromCharacters(root, targetCharacters) { | ||
| let node = root.getFirstChild(); | ||
| let currentCharacters = 0; | ||
| mainLoop: while (node !== null) { | ||
| if (lexical.$isElementNode(node)) { | ||
| const child = node.getFirstChild(); | ||
| if (child !== null) { | ||
| node = child; | ||
| continue; | ||
| } | ||
| } else if (lexical.$isTextNode(node)) { | ||
| const characters = node.getTextContentSize(); | ||
| if (currentCharacters + characters > targetCharacters) { | ||
| return { | ||
| node, | ||
| offset: targetCharacters - currentCharacters | ||
| }; | ||
| } | ||
| currentCharacters += characters; | ||
| } | ||
| const sibling = node.getNextSibling(); | ||
| if (sibling !== null) { | ||
| node = sibling; | ||
| continue; | ||
| } | ||
| let parent = node.getParent(); | ||
| while (parent !== null) { | ||
| const parentSibling = parent.getNextSibling(); | ||
| if (parentSibling !== null) { | ||
| node = parentSibling; | ||
| continue mainLoop; | ||
| } | ||
| parent = parent.getParent(); | ||
| } | ||
| break; | ||
| } | ||
| return null; | ||
| } // Return text content for child text nodes. Each non-text node is separated by input string. | ||
| // Caution, this function creates a string and should not be used within a tight loop. | ||
| // Use $getNodeWithOffsetsFromJoinedTextNodesFromElementNode below to convert | ||
| // indexes in the return string back into their corresponding node and offsets. | ||
| function $joinTextNodesInElementNode(elementNode, separator, stopAt) { | ||
| let textContent = ''; | ||
| const children = elementNode.getChildren(); | ||
| const length = children.length; | ||
| for (let i = 0; i < length; ++i) { | ||
| const child = children[i]; | ||
| if (lexical.$isTextNode(child)) { | ||
| const childTextContent = child.getTextContent(); | ||
| if (child.is(stopAt.node)) { | ||
| if (stopAt.offset > childTextContent.length) { | ||
| { | ||
| throw Error(`Node ${child.__key} and selection point do not match.`); | ||
| } | ||
| } | ||
| textContent += child.getTextContent().substr(0, stopAt.offset); | ||
| break; | ||
| } else { | ||
| textContent += childTextContent; | ||
| } | ||
| } else { | ||
| textContent += separator; | ||
| } | ||
| } | ||
| return textContent; | ||
| } // This function converts the offsetInJoinedText to | ||
| // a node and offset result or null if not found. | ||
| // This function is to be used in conjunction with joinTextNodesInElementNode above. | ||
| // The joinedTextContent should be return value from joinTextNodesInElementNode. | ||
| // | ||
| // The offsetInJoinedText is relative to the entire string which | ||
| // itself is relevant to the parent ElementNode. | ||
| // | ||
| // Example: | ||
| // Given a Paragraph with 2 TextNodes. The first is Hello, the second is World. | ||
| // The joinedTextContent would be "HelloWorld" | ||
| // The offsetInJoinedText might be for the letter "e" = 1 or "r" = 7. | ||
| // The return values would be {TextNode1, 1} or {TextNode2,2}, respectively. | ||
| function $findNodeWithOffsetFromJoinedText(elementNode, joinedTextLength, offsetInJoinedText, separatorLength) { | ||
| const children = elementNode.getChildren(); | ||
| const childrenLength = children.length; | ||
| let runningLength = 0; | ||
| for (let i = 0; i < childrenLength; ++i) { | ||
| if (runningLength >= joinedTextLength) { | ||
| break; | ||
| } | ||
| const child = children[i]; | ||
| const childContentLength = lexical.$isTextNode(child) ? child.getTextContent().length : separatorLength; | ||
| const newRunningLength = runningLength + childContentLength; | ||
| if (runningLength <= offsetInJoinedText && offsetInJoinedText < newRunningLength && lexical.$isTextNode(child)) { | ||
| return { | ||
| node: child, | ||
| offset: offsetInJoinedText - runningLength | ||
| }; | ||
| } | ||
| runningLength = newRunningLength; | ||
| } | ||
| return null; | ||
| } | ||
| exports.$findNodeWithOffsetFromJoinedText = $findNodeWithOffsetFromJoinedText; | ||
| exports.$findTextIntersectionFromCharacters = $findTextIntersectionFromCharacters; | ||
| exports.$joinTextNodesInElementNode = $joinTextNodesInElementNode; |
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict';var h=require("lexical");exports.$findNodeWithOffsetFromJoinedText=function(c,f,a,b){c=c.getChildren();const l=c.length;let d=0;for(let g=0;g<l&&!(d>=f);++g){const k=c[g];var e=h.$isTextNode(k)?k.getTextContent().length:b;e=d+e;if(d<=a&&a<e&&h.$isTextNode(k))return{node:k,offset:a-d};d=e}return null}; | ||
| exports.$findTextIntersectionFromCharacters=function(c,f){var a=c.getFirstChild();c=0;a:for(;null!==a;){if(h.$isElementNode(a)){var b=a.getFirstChild();if(null!==b){a=b;continue}}else if(h.$isTextNode(a)){b=a.getTextContentSize();if(c+b>f)return{node:a,offset:f-c};c+=b}b=a.getNextSibling();if(null!==b)a=b;else{for(a=a.getParent();null!==a;){b=a.getNextSibling();if(null!==b){a=b;continue a}a=a.getParent()}break}}return null}; | ||
| exports.$joinTextNodesInElementNode=function(c,f,a){let b="";c=c.getChildren();const l=c.length;for(let d=0;d<l;++d){const e=c[d];if(h.$isTextNode(e)){const g=e.getTextContent();if(e.is(a.node)){if(a.offset>g.length)throw Error("Minified Lexical error #50; see codes.json for the full message or use the non-minified dev environment for full errors and additional helpful warnings.");b+=e.getTextContent().substr(0,a.offset);break}else b+=g}else b+=f}return b}; |
+9
-1
@@ -1,1 +0,9 @@ | ||
| 'use strict';exports.addClassNamesToElement=function(b,...c){c.forEach(a=>{null!=a&&"string"===typeof a&&b.classList.add(...a.split(" "))})};exports.removeClassNamesFromElement=function(b,...c){c.forEach(a=>{b.classList.remove(...a.split(" "))})}; | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict' | ||
| const LexicalElementHelpers = process.env.NODE_ENV === 'development' ? require('./LexicalElementHelpers.dev.js') : require('./LexicalElementHelpers.prod.js') | ||
| module.exports = LexicalElementHelpers; |
+9
-17
@@ -1,17 +0,9 @@ | ||
| 'use strict';var e=require("lexical"),w=require("@lexical/list"),y=require("lexical/HeadingNode"),z=require("lexical/LinkNode"),A=require("lexical/CodeNode"); | ||
| function B(a){a=a.getLatest();const b=a.constructor.clone(a);b.__parent=a.__parent;e.$isElementNode(a)&&e.$isElementNode(b)?(b.__children=Array.from(a.__children),b.__format=a.__format,b.__indent=a.__indent,b.__dir=a.__dir):e.$isTextNode(a)&&e.$isTextNode(b)?(b.__format=a.__format,b.__style=a.__style,b.__mode=a.__mode,b.__detail=a.__detail):e.$isDecoratorNode(a)&&e.$isDecoratorNode(b)&&(b.__state=a.__state);return b} | ||
| function C(a,b,c,f,h){for(var d=b;null!==a;){for(b=a.getParent();null!==b&&b.excludeFromCopy();)b=b.getParent();if(null===b)break;if(!e.$isElementNode(a)||!a.excludeFromCopy()){const k=a.getKey();let g=h.get(k);const l=void 0===g;l&&(g=B(a),h.set(k,g));!e.$isTextNode(g)||g.isSegmented()||g.isToken()?e.$isElementNode(g)&&(g.__children=g.__children.slice(c?d:0,c?void 0:d+1)):g.__text=g.__text.slice(c?d:0,c?void 0:d);if(e.$isRootNode(b)){l&&f.push(k);break}}d=h.get(b.getKey());d=e.$isElementNode(d)? | ||
| d.__children.indexOf(a.getKey()):a.getIndexWithinParent();a=b}} | ||
| const D={ul:()=>({node:w.$createListNode("ul")}),ol:()=>({node:w.$createListNode("ol")}),li:()=>({node:w.$createListItemNode()}),h1:()=>({node:y.$createHeadingNode("h1")}),h2:()=>({node:y.$createHeadingNode("h2")}),h3:()=>({node:y.$createHeadingNode("h3")}),h4:()=>({node:y.$createHeadingNode("h4")}),h5:()=>({node:y.$createHeadingNode("h5")}),p:()=>({node:e.$createParagraphNode()}),br:()=>({node:e.$createLineBreakNode()}),a:a=>({node:a instanceof HTMLAnchorElement?z.$createLinkNode(a.href):e.$createTextNode(a.textContent)}), | ||
| u:()=>({node:null,forChild:a=>{e.$isTextNode(a)&&a.toggleFormat("underline")}}),b:a=>{const b="normal"===a.style.fontWeight;return{node:null,forChild:c=>{e.$isTextNode(c)&&!b&&c.toggleFormat("bold")}}},strong:()=>({node:null,forChild:a=>{e.$isTextNode(a)&&a.toggleFormat("bold")}}),i:()=>({node:null,forChild:a=>{e.$isTextNode(a)&&a.toggleFormat("italic")}}),em:()=>({node:null,forChild:a=>{e.$isTextNode(a)&&a.toggleFormat("italic")}}),td:a=>{const b=a.classList.contains("js-file-line");return{node:null, | ||
| after:c=>{b&&a.parentNode&&a.parentNode.nextSibling&&c.push(e.$createLineBreakNode());return c}}},table:a=>({node:a.classList.contains("js-file-line-container")?A.$createCodeNode():null}),span:a=>{const b="700"===a.style.fontWeight;return{node:null,forChild:c=>{e.$isTextNode(c)&&b&&c.toggleFormat("bold")}}},"#text":a=>({node:e.$createTextNode(a.textContent)}),pre:()=>({node:A.$createCodeNode()}),div:a=>({node:null!==a.style.fontFamily.match("monospace")?A.$createCodeNode():null,after:b=>{const c= | ||
| a.parentNode;null!=c&&a!==c.lastChild&&b.push(e.$createLineBreakNode());return b}})}; | ||
| function E(a,b,c,f=new Map){let h=[],d=null;var k=a.nodeName.toLowerCase(),g=(c._config.htmlTransforms||{})[k]||b[k],l=g?g(a):null;g=null;if(null!==l){g=l.after;d=l.node;if(null!==d){h.push(d);const n=Array.from(f.values());for(let m=0;m<n.length;m++)n[m](d)}null!=l.forChild&&f.set(k,l.forChild)}a=a.childNodes;k=[];for(l=0;l<a.length;l++)k.push(...E(a[l],b,c,f));null!=g&&(k=g(k));null==d?h=h.concat(k):e.$isElementNode(d)&&d.append(...k);return h} | ||
| function F(a,b,c){var f=a.getData("application/x-lexical-editor");if(f){var h=c._config.namespace;try{const l=JSON.parse(f);if(l.namespace===h){const {range:n,nodeMap:m}=l.state;var d=new Map(m);f=[];for(h=0;h<n.length;h++){var k=d.get(n[h]);if(void 0!==k){var g=e.$createNodeFromParse(k,d);f.push(g)}}b.insertNodes(f);return}}catch(l){}}if(d=a.getData("text/html")){d=(new DOMParser).parseFromString(d,"text/html");a=[];d=d.body?Array.from(d.body.childNodes):[];k=d.length;for(g=0;g<k;g++)f=E(d[g],D, | ||
| c),null!==f&&(a=a.concat(f));c=a;a=[];d=null;for(k=0;k<c.length;k++)g=c[k],!e.$isElementNode(g)||g.isInline()?(null===d&&(d=e.$createParagraphNode(),a.push(d)),null!==d&&d.append(g)):(a.push(g),d=null);b.insertNodes(a)}else G(a,b)}function G(a,b){a=a.getData("text/plain");null!=a&&b.insertRawText(a)} | ||
| function H(a,b){a.preventDefault();b.update(()=>{e.$log("onCopyForPlainText");const c=a.clipboardData,f=e.$getSelection();if(null!==f&&null!=c){var h=window.getSelection();if(!h.isCollapsed){var d=h.getRangeAt(0);d&&(h=document.createElement("div"),d=d.cloneContents(),h.appendChild(d),c.setData("text/html",h.innerHTML));c.setData("text/plain",f.getTextContent())}}})} | ||
| function I(a,b){a.preventDefault();b.update(()=>{e.$log("onCopyForRichText");const c=a.clipboardData;var f=e.$getSelection();if(null!==f&&null!=c){var h=window.getSelection();if(!h.isCollapsed){var d=h.getRangeAt(0);d&&(h=document.createElement("div"),d=d.cloneContents(),h.appendChild(d),c.setData("text/html",h.innerHTML));c.setData("text/plain",f.getTextContent());h=b._config.namespace;d=c.setData;var k=JSON,g=k.stringify;{var l=f.anchor,n=f.focus;var m=l.getCharacterOffset();const x=n.getCharacterOffset(); | ||
| var p=l.getNode(),t=n.getNode(),q=p.getParentOrThrow();if(p===t&&e.$isTextNode(p)&&(q.canBeEmpty()||1<q.getChildrenSize()))f=B(p),p=x>m,f.__text=f.__text.slice(p?m:x,p?x:m),m=f.getKey(),m={range:[m],nodeMap:[[m,f]]};else if(f=f.getNodes(),0===f.length)m={range:[],nodeMap:[]};else{p=f.length;t=f[0];q=t.getParent();if(null!==q&&!q.canBeEmpty()){var u=q.__children;if(u.length===p){var r=!0;for(var v=0;v<u.length;v++)if(u[v]!==f[v].__key){r=!1;break}r&&(p++,f.push(q))}}q=f[p-1];l=l.isBefore(n);n=new Map; | ||
| u=[];C(t,l?m:x,!0,u,n);for(t=0;t<p;t++)if(r=f[t],v=r.getKey(),!(n.has(v)||e.$isElementNode(r)&&r.excludeFromCopy())){const J=B(r);e.$isRootNode(r.getParent())&&u.push(r.getKey());n.set(v,J)}C(q,l?x:m,!1,u,n);m={range:u,nodeMap:Array.from(n.entries())}}}d.call(c,"application/x-lexical-editor",g.call(k,{namespace:h,state:m}))}}})}exports.$createNodesFromDOM=E;exports.$insertDataTransferForPlainText=G;exports.$insertDataTransferForRichText=F; | ||
| exports.$shouldOverrideDefaultCharacterSelection=function(a,b){var c=a.focus;a=c.offset;"element"===c.type?b=c.getNode().getChildAtIndex(b?a-1:a):(c=c.getNode(),b=b&&0===a||!b&&a===c.getTextContentSize()?b?c.getPreviousSibling():c.getNextSibling():null);return e.$isDecoratorNode(b)};exports.onCopyForPlainText=H;exports.onCopyForRichText=I;exports.onCutForPlainText=function(a,b){H(a,b);b.update(()=>{e.$log("onCutForPlainText");const c=e.$getSelection();null!==c&&c.removeText()})}; | ||
| exports.onCutForRichText=function(a,b){I(a,b);b.update(()=>{e.$log("onCutForRichText");const c=e.$getSelection();null!==c&&c.removeText()})};exports.onPasteForPlainText=function(a,b){a.preventDefault();b.update(()=>{e.$log("onPasteForPlainText");const c=e.$getSelection(),f=a.clipboardData;null!=f&&null!==c&&G(f,c)})};exports.onPasteForRichText=function(a,b){a.preventDefault();b.update(()=>{e.$log("onPasteForRichText");const c=e.$getSelection(),f=a.clipboardData;null!=f&&null!==c&&F(f,c,b)})}; | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict' | ||
| const LexicalEventHelpers = process.env.NODE_ENV === 'development' ? require('./LexicalEventHelpers.dev.js') : require('./LexicalEventHelpers.prod.js') | ||
| module.exports = LexicalEventHelpers; |
+9
-3
@@ -1,3 +0,9 @@ | ||
| 'use strict';var e=require("lexical");function f(a){const b=document.createElement("input");b.type="file";b.accept=".lexical";b.addEventListener("change",c=>{c=c.target.files[0];const d=new FileReader;d.readAsText(c,"UTF-8");d.onload=g=>{a(g.target.result)}});b.click()} | ||
| exports.exportFile=function(a,b=Object.freeze({})){var c=new Date;a=a.getEditorState();a={source:b.source||"Lexical",version:e.VERSION,lastSaved:c.getTime(),editorState:a};{b=`${b.fileName||c.toISOString()}.lexical`;c=document.createElement("a");const d=document.body;null!==d&&(d.appendChild(c),c.style.display="none",a=JSON.stringify(a),a=new Blob([a],{type:"octet/stream"}),a=window.URL.createObjectURL(a),c.href=a,c.download=b,c.click(),window.URL.revokeObjectURL(a),c.remove())}}; | ||
| exports.importFile=function(a){f(b=>{b=JSON.parse(b);b=a.parseEditorState(JSON.stringify(b.editorState));a.setEditorState(b);a.execCommand("clearHistory")})}; | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict' | ||
| const LexicalFileHelpers = process.env.NODE_ENV === 'development' ? require('./LexicalFileHelpers.dev.js') : require('./LexicalFileHelpers.prod.js') | ||
| module.exports = LexicalFileHelpers; |
+9
-5
@@ -1,5 +0,9 @@ | ||
| 'use strict';var d=require("lexical"),g=require("lexical/TableNode"),h=require("lexical/TableRowNode"),k=require("lexical/TableCellNode");function q(a,b){let c=null;var e=b[a.nodeName.toLowerCase()];if(e&&(c=e(a),d.$isElementNode(c)))for(a=a.childNodes,e=0;e<a.length;e++){const f=q(a[e],b);null!==f&&c.append(f)}return c}function r(a){a=a.getPreviousSibling();return null===a||d.$isLineBreakNode(a)||d.$isTextNode(a)&&a.isSimpleText()&&a.getTextContent().endsWith(" ")} | ||
| function t(a){a=a.getNextSibling();return null===a||d.$isLineBreakNode(a)||d.$isTextNode(a)&&a.isSimpleText()&&a.getTextContent().startsWith(" ")}exports.$areSiblingsNullOrSpace=function(a){return r(a)&&t(a)};exports.$createLexicalNodeFromDOMNode=q; | ||
| exports.$createTableNodeWithDimensions=function(a,b,c=!0){const e=g.$createTableNode();for(let f=0;f<a;f++){const l=h.$createTableRowNode();for(let m=0;m<b;m++){const n=k.$createTableCellNode(0===f&&c),p=d.$createParagraphNode();p.append(d.$createTextNode());n.append(p);l.append(n)}e.append(l)}return e}; | ||
| exports.$dfs__DEPRECATED=function(a,b){for(b(a);null!==a;){if(d.$isElementNode(a)&&0<a.getChildrenSize())a=a.getFirstChild();else{let c=null;for(;null===c&&null!==a;)c=a.getNextSibling(),a=null===c?a.getParent():c}null!==a&&(a=b(a))}};exports.$findMatchingParent=function(a,b){for(;a!==d.$getRoot()&&null!=a;){if(b(a))return a;a=a.getParent()}return null};exports.$getNearestNodeOfType=function(a,b){for(;null!=a&&!(a instanceof b);)a=a.getParent();return a};exports.$isNextSiblingNullOrSpace=t; | ||
| exports.$isPreviousSiblingNullOrSpace=r; | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict' | ||
| const LexicalNodeHelpers = process.env.NODE_ENV === 'development' ? require('./LexicalNodeHelpers.dev.js') : require('./LexicalNodeHelpers.prod.js') | ||
| module.exports = LexicalNodeHelpers; |
+9
-9
@@ -1,9 +0,9 @@ | ||
| 'use strict';var q=require("lexical"); | ||
| class t{constructor(b,a,c=1){this._offsetMap=b;this._firstNode=a;this._blockOffsetSize=c}createSelectionFromOffsets(b,a,c){var e=this._firstNode;if(null===e)return null;var f=y(e,b,this._blockOffsetSize);let h=y(e,a,this._blockOffsetSize);void 0!==c&&(b=z(b,f,c,this,this._blockOffsetSize),f=y(e,b,this._blockOffsetSize),a=z(a,h,c,this,this._blockOffsetSize),h=y(e,a,this._blockOffsetSize));if(null===f||null===h)return null;c=f.key;e=h.key;const m=q.$getNodeByKey(c),k=q.$getNodeByKey(e);if(null===m|| | ||
| null===k)return null;let l=0,d=0,g="element",n="element";"text"===f.type?(l=b-f.start,g="text",f=m.getNextSibling(),b!==a&&l===m.getTextContentSize()&&q.$isTextNode(f)&&(l=0,c=f.__key)):"inline"===f.type&&(c=m.getParentOrThrow().getKey(),l=a>f.start?f.end:f.start);"text"===h.type?(d=a-h.start,n="text"):"inline"===h.type&&(e=k.getParentOrThrow().getKey(),d=a>h.start?h.end:h.start);b=q.$createRangeSelection();if(null===b)return null;b.anchor.set(c,l,g);b.focus.set(e,d,n);return b}getOffsetsFromSelection(b){var a= | ||
| b.anchor,c=b.focus,e=this._offsetMap;const f=a.offset;var h=c.offset;let m=b=-1;if("text"===a.type)a=e.get(a.key),void 0!==a&&(b=a.start+f);else{a=a.getNode().getDescendantByIndex(f);const k=e.get(a.getKey());void 0!==k&&(b=a.getIndexWithinParent()!==f?k.end:k.start)}"text"===c.type?(h=e.get(c.key),void 0!==h&&(m=h.start+c.offset)):(c=c.getNode().getDescendantByIndex(h),e=e.get(c.getKey()),void 0!==e&&(m=c.getIndexWithinParent()!==h?e.end:e.start));return[b,m]}} | ||
| function z(b,a,c,e,f){const h=c._offsetMap;e=e._offsetMap;const m=new Set;let k=b;for(;null!==a;){var l=a.key,d=h.get(l),g=a.end-a.start;m.add(l);void 0===d?k+=g:(l=d.end-d.start,l!==g&&(k+=g-l));g=a.prev;if(null!==g)a=g;else{for(a=a.parent;null!==a;)d=a.prev,null!==d&&(g=d.key,l=h.get(g),d=d.end-d.start,m.add(g),void 0===l?k+=d:(g=l.end-l.start,g!==d&&(k+=d-g))),a=a.parent;break}}c=c._firstNode;if(null!==c){a=y(c,b,f);for(b=!1;null!==a;){if(!m.has(a.key)){b=!0;break}a=a.parent}if(!b)for(;null!== | ||
| a;)b=a.key,m.has(b)||(f=e.get(b),b=a.end-a.start,void 0===f?k-=b:(f=f.end-f.start,b!==f&&(k+=f-b))),a=a.prev}return k}function y(b,a,c){for(;null!==b;){if(a<b.end+("element"!==b.type||0===c?1:0)){const e=b.child;if(null!==e){b=e;continue}return b}b=b.next;if(null===b)break}return null} | ||
| function A(b,a,c,e,f,h){let m=null,k=null;const l=a.length;for(let v=0;v<l;v++){{var d=b;var g=a[v];var n=c,r=e,w=f,u=h,p=r.get(g);if(void 0===p)throw Error("Minified Lexical error #3; see codes.json for the full message or use the non-minified dev environment for full errors and additional helpful warnings.");const x=d.offset;if(q.$isElementNode(p)){const B=p.__children;r=(p=0===B.length)?null:A(d,B,null,r,w,u);if(!d.prevIsBlock||p)d.prevIsBlock=!0,d.offset+=u;n={child:r,prev:null,next:null,type:"element", | ||
| start:x,end:x,key:g,parent:n};null!==r&&(r.parent=n);n.end=d.offset;w.set(g,n);g=n}else d.prevIsBlock=!1,u=q.$isTextNode(p),d=d.offset+=u?p.__text.length:1,d={child:null,prev:null,next:null,type:u?"text":"inline",start:x,end:d,key:g,parent:n},w.set(g,d),g=d}null===k?m=g:(g.prev=k,k.next=g);k=g}return m} | ||
| exports.$createOffsetView=function(b,a=1,c){c=(c||b._pendingEditorState||b._editorState)._nodeMap;const e=c.get("root");b=new Map;c=A({offset:0,prevIsBlock:!1},e.__children,null,c,b,a);return new t(b,c,a)};exports.OffsetView=t; | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict' | ||
| const LexicalOffsetHelpers = process.env.NODE_ENV === 'development' ? require('./LexicalOffsetHelpers.dev.js') : require('./LexicalOffsetHelpers.prod.js') | ||
| module.exports = LexicalOffsetHelpers; |
+3
-3
@@ -16,6 +16,6 @@ { | ||
| "license": "MIT", | ||
| "version": "0.1.7", | ||
| "version": "0.1.8", | ||
| "peerDependencies": { | ||
| "lexical": "0.1.7", | ||
| "@lexical/list": "0.1.7" | ||
| "lexical": "0.1.8", | ||
| "@lexical/list": "0.1.8" | ||
| }, | ||
@@ -22,0 +22,0 @@ "repository": { |
+9
-2
@@ -1,2 +0,9 @@ | ||
| 'use strict';var d=require("lexical");function f(){return d.$getRoot().getTextContent()}function g(a,c=!0){if(a)return!1;a=f();c&&(a=a.trim());return""===a}function h(a){if(!g(a,!1))return!1;a=d.$getRoot().getChildren();const c=a.length;if(1<c)return!1;for(let e=0;e<c;e++){var b=a[e];if(d.$isElementNode(b)){if("paragraph"!==b.__type||0!==b.__indent)return!1;b=b.getChildren();const l=b.length;for(let k=0;k<l;k++)if(!d.$isTextNode(b[e]))return!1}}return!0}exports.$canShowPlaceholder=h; | ||
| exports.$canShowPlaceholderCurry=function(a){return()=>h(a)};exports.$isTextContentEmpty=g;exports.$isTextContentEmptyCurry=function(a,c){return()=>g(a,c)};exports.$textContent=f;exports.$textContentCurry=f; | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict' | ||
| const LexicalRootHelpers = process.env.NODE_ENV === 'development' ? require('./LexicalRootHelpers.dev.js') : require('./LexicalRootHelpers.prod.js') | ||
| module.exports = LexicalRootHelpers; |
+9
-13
@@ -1,13 +0,9 @@ | ||
| 'use strict';var l=require("lexical");const r=new Map;function v(a){a=a.getLatest();const c=a.constructor.clone(a);c.__parent=a.__parent;l.$isElementNode(a)&&l.$isElementNode(c)?(c.__children=Array.from(a.__children),c.__format=a.__format,c.__indent=a.__indent,c.__dir=a.__dir):l.$isTextNode(a)&&l.$isTextNode(c)?(c.__format=a.__format,c.__style=a.__style,c.__mode=a.__mode,c.__detail=a.__detail):l.$isDecoratorNode(a)&&l.$isDecoratorNode(c)&&(c.__state=a.__state);return c} | ||
| function w(a,c,b,g,k){for(var e=c;null!==a;){for(c=a.getParent();null!==c&&c.excludeFromCopy();)c=c.getParent();if(null===c)break;if(!l.$isElementNode(a)||!a.excludeFromCopy()){const d=a.getKey();let f=k.get(d);const h=void 0===f;h&&(f=v(a),k.set(d,f));!l.$isTextNode(f)||f.isSegmented()||f.isToken()?l.$isElementNode(f)&&(f.__children=f.__children.slice(b?e:0,b?void 0:e+1)):f.__text=f.__text.slice(b?e:0,b?void 0:e);if(l.$isRootNode(c)){h&&g.push(d);break}}e=k.get(c.getKey());e=l.$isElementNode(e)? | ||
| e.__children.indexOf(a.getKey()):a.getIndexWithinParent();a=c}}function x(a){return r.get(a)||null}function y(a,c){var b=x(a.getStyle());c=b?{...b,...c}:c;b="";for(g in c)g&&(b+=`${g}: ${c[g]};`);var g=b;a.setStyle(g);r.set(g,c)}function z(a,c,b,g){a.modify(c?"extend":"move",b,g)}function A(a){for(;null!==a&&!l.$isRootNode(a);){const c=a.getLatest(),b=a.getParent();0===c.__children.length&&a.remove();a=b}} | ||
| exports.$cloneContents=function(a){var c=a.anchor,b=a.focus,g=c.getCharacterOffset();const k=b.getCharacterOffset();var e=c.getNode(),d=b.getNode(),f=e.getParentOrThrow();if(e===d&&l.$isTextNode(e)&&(f.canBeEmpty()||1<f.getChildrenSize()))return a=v(e),e=k>g,a.__text=a.__text.slice(e?g:k,e?k:g),g=a.getKey(),{range:[g],nodeMap:[[g,a]]};a=a.getNodes();if(0===a.length)return{range:[],nodeMap:[]};e=a.length;d=a[0];f=d.getParent();if(null!==f&&!f.canBeEmpty()){var h=f.__children;if(h.length===e){var m= | ||
| !0;for(var n=0;n<h.length;n++)if(h[n]!==a[n].__key){m=!1;break}m&&(e++,a.push(f))}}f=a[e-1];c=c.isBefore(b);b=new Map;h=[];w(d,c?g:k,!0,h,b);for(d=0;d<e;d++)if(m=a[d],n=m.getKey(),!(b.has(n)||l.$isElementNode(m)&&m.excludeFromCopy())){const t=v(m);l.$isRootNode(m.getParent())&&h.push(m.getKey());b.set(n,t)}w(f,c?k:g,!1,h,b);return{range:h,nodeMap:Array.from(b.entries())}}; | ||
| exports.$getSelectionStyleValueForProperty=function(a,c,b=""){let g=null;const k=a.getNodes();var e=a.anchor,d=a.focus,f=a.isBackward();a=f?d.offset:e.offset;e=f?d.getNode():e.getNode();for(d=0;d<k.length;d++){var h=k[d];if((0===d||0!==a||!h.is(e))&&l.$isTextNode(h)){f=c;var m=b;h=h.getStyle();h=x(h);f=null!==h?h[f]||m:m;if(null===g)g=f;else if(g!==f){g="";break}}}return null===g?b:g};exports.$isAtNodeEnd=function(a){return"text"===a.type?a.offset===a.getNode().getTextContentSize():a.offset===a.getNode().getChildrenSize()}; | ||
| exports.$moveCaretSelection=z;exports.$moveCharacter=function(a,c,b){var g="rtl"===a.anchor.getNode().getParentOrThrow().getDirection();z(a,c,b?!g:g,"character")}; | ||
| exports.$patchStyleText=function(a,c){var b=a.getNodes();const g=b.length-1;let k=b[0],e=b[g];if(!a.isCollapsed()){var d=a.anchor,f=a.focus;a=k.getTextContent().length;var h=f.offset,m=d.offset;d=(f=d.isBefore(f))?m:h;f=f?h:m;if(d===k.getTextContentSize()){const n=k.getNextSibling();l.$isTextNode(n)&&(d=m=0,k=n)}if(k.is(e))l.$isTextNode(k)&&(d=m>h?h:m,f=m>h?m:h,d!==f&&(0===d&&f===a?(y(k,c),k.select(d,f)):(b=k.splitText(d,f),b=0===d?b[0]:b[1],y(b,c),b.select(0,f-d))));else for(l.$isTextNode(k)&&(0!== | ||
| d&&([,k]=k.splitText(d)),y(k,c)),l.$isTextNode(e)&&(a=e.getTextContent().length,f!==a&&([e]=e.splitText(f)),0!==f&&y(e,c)),a=1;a<g;a++)h=b[a],m=h.getKey(),l.$isTextNode(h)&&m!==k.getKey()&&m!==e.getKey()&&!h.isToken()&&y(h,c)}}; | ||
| exports.$selectAll=function(a){const c=a.anchor;a=a.focus;var b=c.getNode().getTopLevelElementOrThrow().getParentOrThrow();let g=b.getFirstDescendant();b=b.getLastDescendant();let k="element",e="element",d=0;l.$isTextNode(g)?k="text":l.$isElementNode(g)||null===g||(g=g.getParentOrThrow());l.$isTextNode(b)?(e="text",d=b.getTextContentSize()):l.$isElementNode(b)||null===b||(b=b.getParentOrThrow(),d=b.getChildrenSize());g&&b&&(c.set(g.getKey(),0,k),a.set(b.getKey(),d,e))}; | ||
| exports.$wrapLeafNodesInElements=function(a,c,b){const g=a.getNodes(),k=g.length;if(0===k){a=a.anchor;a="text"===a.type?a.getNode().getParentOrThrow():a.getNode();var e=a.getChildren();let p=c();e.forEach(u=>p.append(u));b&&(p=b.append(p));a.replace(p)}else{var d=g[0],f=new Map;e=[];for(d=l.$isElementNode(d)?d:d.getParentOrThrow();null!==d;){var h=d.getPreviousSibling();if(null!==h){d=h;break}d=d.getParentOrThrow();if(l.$isRootNode(d))break}h=new Set;for(var m=0;m<k;m++){var n=g[m];l.$isElementNode(n)&& | ||
| 0===n.getChildrenSize()&&h.add(n.getKey())}var t=new Set;for(m=0;m<k;m++){var q=g[m];n=q.getParent();if(null!==n&&l.$isLeafNode(q)&&!t.has(q.getKey())){if(q=n.getKey(),void 0===f.get(q)){const p=c();e.push(p);f.set(q,p);n.getChildren().forEach(u=>{p.append(u);t.add(u.getKey())});A(n)}}else h.has(q.getKey())&&(e.push(c()),q.remove())}if(b)for(c=0;c<e.length;c++)b.append(e[c]);if(l.$isRootNode(d))if(c=d.getFirstChild(),l.$isElementNode(c)&&(d=c),null===c)if(b)d.append(b);else for(b=0;b<e.length;b++)d.append(e[b]); | ||
| else if(b)c.insertBefore(b);else for(b=0;b<e.length;b++)c.insertBefore(e[b]);else if(b)d.insertAfter(b);else for(b=e.length-1;0<=b;b--)d.insertAfter(e[b]);a.dirty=!0}};exports.getStyleObjectFromCSS=x; | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict' | ||
| const LexicalSelectionHelpers = process.env.NODE_ENV === 'development' ? require('./LexicalSelectionHelpers.dev.js') : require('./LexicalSelectionHelpers.prod.js') | ||
| module.exports = LexicalSelectionHelpers; |
+9
-2
@@ -1,2 +0,9 @@ | ||
| 'use strict';var g=require("lexical");exports.$findTextIntersectionFromCharacters=function(c,d){var a=c.getFirstChild();c=0;a:for(;null!==a;){if(g.$isElementNode(a)){var b=a.getFirstChild();if(null!==b){a=b;continue}}else if(g.$isTextNode(a)){b=a.getTextContentSize();if(c+b>d)return{node:a,offset:d-c};c+=b}b=a.getNextSibling();if(null!==b)a=b;else{for(a=a.getParent();null!==a;){b=a.getNextSibling();if(null!==b){a=b;continue a}a=a.getParent()}break}}return null}; | ||
| exports.$joinTextNodesFromElementNode=function(c,d,a,b){let e="";c=c.getChildren();const l=c.length;for(let h=0;h<l;++h){const f=c[h];if(g.$isTextNode(f)){const k=f.getTextContent();if(f.is(a)){if(b>k.length)throw Error("Minified Lexical error #50; see codes.json for the full message or use the non-minified dev environment for full errors and additional helpful warnings.");e+=f.getTextContent().substr(0,b);break}else e+=k}else e+=d}return e}; | ||
| /** | ||
| * Copyright (c) Meta Platforms, Inc. and affiliates. | ||
| * | ||
| * This source code is licensed under the MIT license found in the | ||
| * LICENSE file in the root directory of this source tree. | ||
| */ | ||
| 'use strict' | ||
| const LexicalTextHelpers = process.env.NODE_ENV === 'development' ? require('./LexicalTextHelpers.dev.js') : require('./LexicalTextHelpers.prod.js') | ||
| module.exports = LexicalTextHelpers; |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 4 instances in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 2 instances in 1 package
88408
308.31%27
145.45%1948
1994.62%8
-11.11%11
Infinity%1
Infinity%