New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

hast-util-to-nlcst

Package Overview
Dependencies
Maintainers
2
Versions
16
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

hast-util-to-nlcst - npm Package Compare versions

Comparing version 1.2.7 to 1.2.8

160

index.js
'use strict'
var vfileLocation = require('vfile-location')
var embedded = require('hast-util-embedded')
var convert = require('hast-util-is-element/convert')
var phrasing = require('hast-util-phrasing')
var textContent = require('hast-util-to-string')
var whitespace = require('hast-util-whitespace')
var toString = require('nlcst-to-string')
var position = require('unist-util-position')
var phrasing = require('hast-util-phrasing')
var embedded = require('hast-util-embedded')
var whitespace = require('hast-util-whitespace')
var textContent = require('hast-util-to-string')
var is = require('hast-util-is-element')
var vfileLocation = require('vfile-location')
module.exports = toNlcst
var source = ['code']
var ignore = ['script', 'style', 'svg', 'math', 'del']
var explicit = ['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
var push = [].push
var flowAccepting = [
var source = convert(['code', dataNlcstSourced])
var ignore = convert(['script', 'style', 'svg', 'math', 'del', dataNlcstIgnore])
var explicit = convert(['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6'])
var flowAccepting = convert([
'body',

@@ -42,4 +44,7 @@ 'article',

'dialog'
]
])
// See: <https://html.spec.whatwg.org/multipage/dom.html#paragraphs>
var unravelInParagraph = convert(['a', 'ins', 'del', 'map'])
// Transform `tree` to nlcst.

@@ -70,4 +75,4 @@ function toNlcst(tree, file, Parser) {

location = vfileLocation(file)
doc = String(file)
location = vfileLocation(doc)
parser = 'parse' in Parser ? Parser : new Parser()

@@ -84,23 +89,18 @@

children: results,
position: {
start: location.toPosition(0),
end: location.toPosition(doc.length)
}
position: {start: location.toPoint(0), end: location.toPoint(doc.length)}
}
function find(node) {
var children = node.children
if (node.type === 'root') {
findAll(children)
} else if (is(node) && !ignored(node)) {
if (is(node, explicit)) {
findAll(node.children)
} else if (node.type === 'element' && !ignore(node)) {
if (explicit(node)) {
// Explicit paragraph.
add(node)
} else if (is(node, flowAccepting)) {
} else if (flowAccepting(node)) {
// Slightly simplified version of: <https://html.spec.whatwg.org/#paragraphs>.
implicit(flattenAll(children))
implicit(flattenAll(node.children))
} else {
// Dig deeper.
findAll(children)
findAll(node.children)
}

@@ -111,6 +111,5 @@ }

function findAll(children) {
var length = children.length
var index = -1
while (++index < length) {
while (++index < children.length) {
find(children[index])

@@ -120,17 +119,12 @@ }

function flatten(node) {
if (is(node, ['a', 'ins', 'del', 'map'])) {
return flattenAll(node.children)
}
return node
}
function flattenAll(children) {
var results = []
var length = children.length
var index = -1
while (++index < length) {
results = results.concat(flatten(children[index]))
while (++index < children.length) {
if (unravelInParagraph(children[index])) {
push.apply(results, flattenAll(children[index].children))
} else {
results.push(children[index])
}
}

@@ -144,3 +138,3 @@

if (result.length !== 0) {
if (result.length) {
results.push(parser.tokenizeParagraph(result))

@@ -151,15 +145,12 @@ }

function implicit(children) {
var length = children.length + 1
var index = -1
var viable = false
var start = -1
var viable
var child
while (++index < length) {
while (++index <= children.length) {
child = children[index]
if (child && phrasing(child)) {
if (start === -1) {
start = index
}
if (start === -1) start = index

@@ -179,3 +170,3 @@ if (!viable && !embedded(child) && !whitespace(child)) {

viable = false
viable = null
start = -1

@@ -188,30 +179,26 @@ }

function one(node) {
var type = node.type
var tagName = type === 'element' ? node.tagName : null
var replacement
var change
var replacement
if (type === 'text') {
change = true
if (node.type === 'text') {
replacement = parser.tokenize(node.value)
} else if (tagName === 'wbr') {
change = true
replacement = [parser.tokenizeWhiteSpace(' ')]
} else if (tagName === 'br') {
change = true
replacement = [parser.tokenizeWhiteSpace('\n')]
} else if (sourced(node)) {
change = true
replacement = [parser.tokenizeSource(textContent(node))]
} else if (type === 'root' || !ignored(node)) {
replacement = all(node.children)
} else {
return
} else if (node.type === 'element' && !ignore(node)) {
if (node.tagName === 'wbr') {
replacement = [parser.tokenizeWhiteSpace(' ')]
change = true
} else if (node.tagName === 'br') {
replacement = [parser.tokenizeWhiteSpace('\n')]
change = true
} else if (source(node)) {
replacement = [parser.tokenizeSource(textContent(node))]
change = true
} else {
replacement = all(node.children)
}
}
if (!change) {
return replacement
}
return patch(replacement, location, location.toOffset(position.start(node)))
return change
? patch(replacement, location, location.toOffset(position.start(node)))
: replacement
}

@@ -221,16 +208,10 @@

function all(children) {
var length = children && children.length
var results = []
var index = -1
var result = []
var child
while (++index < length) {
child = one(children[index])
if (child) {
result = result.concat(child)
}
while (++index < children.length) {
push.apply(results, one(children[index]) || [])
}
return result
return results
}

@@ -244,15 +225,12 @@

function patch(nodes, location, offset) {
var length = nodes.length
var index = -1
var start = offset
var children
var end
var node
var end
while (++index < length) {
while (++index < nodes.length) {
node = nodes[index]
children = node.children
if (children) {
patch(children, location, start)
if (node.children) {
patch(node.children, location, start)
}

@@ -263,4 +241,4 @@

node.position = {
start: location.toPosition(start),
end: location.toPosition(end)
start: location.toPoint(start),
end: location.toPoint(end)
}

@@ -275,10 +253,8 @@

function sourced(node) {
var props = node.properties
return is(node) && (is(node, source) || props.dataNlcst === 'source')
function dataNlcstSourced(node) {
return node.properties.dataNlcst === 'source'
}
function ignored(node) {
var props = node.properties
return is(node) && (is(node, ignore) || props.dataNlcst === 'ignore')
function dataNlcstIgnore(node) {
return node.properties.dataNlcst === 'ignore'
}
{
"name": "hast-util-to-nlcst",
"version": "1.2.7",
"version": "1.2.8",
"description": "hast utility to transform to nlcst",

@@ -40,6 +40,6 @@ "license": "MIT",

"unist-util-position": "^3.0.0",
"vfile-location": "^2.0.0"
"vfile-location": "^3.1.0"
},
"devDependencies": {
"browserify": "^16.0.0",
"browserify": "^17.0.0",
"is-hidden": "^1.0.0",

@@ -51,15 +51,15 @@ "negate": "^1.0.0",

"parse-latin": "^4.0.0",
"prettier": "^1.0.0",
"rehype": "^9.0.0",
"remark-cli": "^7.0.0",
"remark-preset-wooorm": "^6.0.0",
"tape": "^4.0.0",
"tinyify": "^2.0.0",
"prettier": "^2.0.0",
"rehype": "^11.0.0",
"remark-cli": "^9.0.0",
"remark-preset-wooorm": "^8.0.0",
"tape": "^5.0.0",
"tinyify": "^3.0.0",
"vfile": "^4.0.0",
"xo": "^0.27.0"
"xo": "^0.34.0"
},
"scripts": {
"format": "remark . -qfo && prettier --write \"**/*.js\" && xo --fix",
"build-bundle": "browserify . -s hastUtilToNlcst > hast-util-to-nlcst.js",
"build-mangle": "browserify . -s hastUtilToNlcst -p tinyify > hast-util-to-nlcst.min.js",
"format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix",
"build-bundle": "browserify . -s hastUtilToNlcst -o hast-util-to-nlcst.js",
"build-mangle": "browserify . -s hastUtilToNlcst -o hast-util-to-nlcst.min.js -p tinyify",
"build": "npm run build-bundle && npm run build-mangle",

@@ -87,2 +87,7 @@ "test-api": "node test",

"esnext": false,
"rules": {
"unicorn/explicit-length-check": "off",
"unicorn/no-fn-reference-in-iterator": "off",
"unicorn/prefer-optional-catch-binding": "off"
},
"ignores": [

@@ -89,0 +94,0 @@ "hast-util-to-nlcst.js"

@@ -140,2 +140,15 @@ # hast-util-to-nlcst

## Related
* [`mdast-util-to-nlcst`](https://github.com/syntax-tree/mdast-util-to-nlcst)
— transform mdast to nlcst
* [`mdast-util-to-hast`](https://github.com/syntax-tree/mdast-util-to-hast)
— transform mdast to hast
* [`hast-util-to-mdast`](https://github.com/syntax-tree/hast-util-to-mdast)
— transform hast to mdast
* [`hast-util-to-xast`](https://github.com/syntax-tree/hast-util-to-xast)
— transform hast to xast
* [`hast-util-sanitize`](https://github.com/syntax-tree/hast-util-sanitize)
— sanitize hast nodes
## Contribute

@@ -179,5 +192,5 @@

[chat-badge]: https://img.shields.io/badge/chat-spectrum-7b16ff.svg
[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg
[chat]: https://spectrum.chat/unified/syntax-tree
[chat]: https://github.com/syntax-tree/unist/discussions

@@ -190,7 +203,7 @@ [npm]: https://docs.npmjs.com/cli/install

[contributing]: https://github.com/syntax-tree/.github/blob/master/contributing.md
[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md
[support]: https://github.com/syntax-tree/.github/blob/master/support.md
[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md
[coc]: https://github.com/syntax-tree/.github/blob/master/code-of-conduct.md
[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md

@@ -197,0 +210,0 @@ [english]: https://github.com/wooorm/parse-english

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc