Comparing version 4.1.2 to 4.2.0
@@ -5,2 +5,9 @@ <!--remark setext--> | ||
4.2.0 / 2016-08-01 | ||
================== | ||
* Move `trough` over to its own module ([`81c67fb`](https://github.com/wooorm/unified/commit/81c67fb)) | ||
* Fix bug where exceptions in `done` were swallowed ([`2dfc932`](https://github.com/wooorm/unified/commit/2dfc932)) | ||
* Refactor code-style to use `xo` ([`01543d5`](https://github.com/wooorm/unified/commit/01543d5)) | ||
4.1.2 / 2016-06-13 | ||
@@ -7,0 +14,0 @@ ================== |
632
index.js
@@ -11,8 +11,632 @@ /** | ||
/* eslint-env commonjs */ | ||
/* Dependencies. */ | ||
var events = require('events'); | ||
var has = require('has'); | ||
var once = require('once'); | ||
var extend = require('extend'); | ||
var bail = require('bail'); | ||
var vfile = require('vfile'); | ||
var trough = require('trough'); | ||
/* | ||
* Expose. | ||
/* Expose an abstract processor. */ | ||
module.exports = unified().abstract(); | ||
/* Methods. */ | ||
var slice = [].slice; | ||
/* Process pipeline. */ | ||
var pipeline = trough() | ||
.use(function (p, ctx) { | ||
ctx.tree = p.parse(ctx.file, ctx.options); | ||
}) | ||
.use(function (p, ctx, next) { | ||
p.run(ctx.tree, ctx.file, function (err, tree, file) { | ||
if (err) { | ||
next(err); | ||
} else { | ||
ctx.tree = tree; | ||
ctx.file = file; | ||
next(); | ||
} | ||
}); | ||
}) | ||
.use(function (p, ctx) { | ||
ctx.file.contents = p.stringify(ctx.tree, ctx.file, ctx.options); | ||
}); | ||
/** | ||
* Function to create the first processor. | ||
* | ||
* @return {Function} - First processor. | ||
*/ | ||
function unified() { | ||
var attachers = []; | ||
var transformers = trough(); | ||
var namespace = {}; | ||
var chunks = []; | ||
var emitter = new events.EventEmitter(); | ||
var ended = false; | ||
var concrete = true; | ||
var settings; | ||
var key; | ||
module.exports = require('./lib/unified.js'); | ||
/** | ||
* Create a new processor based on the processor | ||
* in the current scope. | ||
* | ||
* @return {Processor} - New concrete processor based | ||
* on the descendant processor. | ||
*/ | ||
function processor() { | ||
var destination = unified(); | ||
var length = attachers.length; | ||
var index = -1; | ||
while (++index < length) { | ||
destination.use.apply(null, attachers[index]); | ||
} | ||
destination.data(extend(true, {}, namespace)); | ||
return destination; | ||
} | ||
/* Mix in methods. */ | ||
for (key in emitter) { | ||
processor[key] = emitter[key]; | ||
} | ||
/* Helpers. */ | ||
/** | ||
* Assert a parser is available. | ||
* | ||
* @param {string} name - Name of callee. | ||
*/ | ||
function assertParser(name) { | ||
if (!isParser(processor.Parser)) { | ||
throw new Error('Cannot `' + name + '` without `Parser`'); | ||
} | ||
} | ||
/** | ||
* Assert a compiler is available. | ||
* | ||
* @param {string} name - Name of callee. | ||
*/ | ||
function assertCompiler(name) { | ||
if (!isCompiler(processor.Compiler)) { | ||
throw new Error('Cannot `' + name + '` without `Compiler`'); | ||
} | ||
} | ||
/** | ||
* Assert the processor is concrete. | ||
* | ||
* @param {string} name - Name of callee. | ||
*/ | ||
function assertConcrete(name) { | ||
if (!concrete) { | ||
throw new Error( | ||
'Cannot ' + | ||
(name ? 'invoke `' + name + '` on' : 'pipe into') + | ||
' abstract processor.\n' + | ||
'To make the processor concrete, invoke it: ' + | ||
'use `processor()` instead of `processor`.' | ||
); | ||
} | ||
} | ||
/** | ||
* Assert `node` is a Unist node. | ||
* | ||
* @param {*} node - Value to check. | ||
*/ | ||
function assertNode(node) { | ||
if (!isNode(node)) { | ||
throw new Error('Expected node, got `' + node + '`'); | ||
} | ||
} | ||
/** | ||
* Assert, if no `done` is given, that `complete` is | ||
* `true`. | ||
* | ||
* @param {string} name - Name of callee. | ||
* @param {boolean} complete - Whether an async process | ||
* is complete. | ||
* @param {Function?} done - Optional handler of async | ||
* results. | ||
*/ | ||
function assertDone(name, complete, done) { | ||
if (!complete && !done) { | ||
throw new Error( | ||
'Expected `done` to be given to `' + name + '` ' + | ||
'as async plug-ins are used' | ||
); | ||
} | ||
} | ||
/* Throw as early as possible. | ||
* As events are triggered synchroneously, the stack | ||
* is preserved. */ | ||
processor.on('pipe', function () { | ||
assertConcrete(); | ||
}); | ||
/** | ||
* Abstract: used to signal an abstract processor which | ||
* should made concrete before using. | ||
* | ||
* For example, take unified itself. It’s abstract. | ||
* Plug-ins should not be added to it. Rather, it should | ||
* be made concrete (by invoking it) before modifying it. | ||
* | ||
* In essence, always invoke this when exporting a | ||
* processor. | ||
* | ||
* @return {Processor} - The operated on processor. | ||
*/ | ||
function abstract() { | ||
concrete = false; | ||
return processor; | ||
} | ||
/** | ||
* Data management. | ||
* | ||
* Getter / setter for processor-specific informtion. | ||
* | ||
* @param {string} key - Key to get or set. | ||
* @param {*} value - Value to set. | ||
* @return {*} - Either the operator on processor in | ||
* setter mode; or the value stored as `key` in | ||
* getter mode. | ||
*/ | ||
function data(key, value) { | ||
assertConcrete('data'); | ||
if (typeof key === 'string') { | ||
/* Set `key`. */ | ||
if (arguments.length === 2) { | ||
namespace[key] = value; | ||
return processor; | ||
} | ||
/* Get `key`. */ | ||
return (has(namespace, key) && namespace[key]) || null; | ||
} | ||
/* Get space. */ | ||
if (!key) { | ||
return namespace; | ||
} | ||
/* Set space. */ | ||
namespace = key; | ||
return processor; | ||
} | ||
/** | ||
* Plug-in management. | ||
* | ||
* Pass it: | ||
* * an attacher and options, | ||
* * a list of attachers and options for all of them; | ||
* * a tuple of one attacher and options. | ||
* * a matrix: list containing any of the above and | ||
* matrices. | ||
* | ||
* @param {...*} value - See description. | ||
* @return {Processor} - The operated on processor. | ||
*/ | ||
function use(value) { | ||
var args = slice.call(arguments, 0); | ||
var params = args.slice(1); | ||
var index; | ||
var length; | ||
var transformer; | ||
assertConcrete('use'); | ||
/* Multiple attachers. */ | ||
if ('length' in value && !isFunction(value)) { | ||
index = -1; | ||
length = value.length; | ||
if (!isFunction(value[0])) { | ||
/* Matrix of things. */ | ||
while (++index < length) { | ||
use(value[index]); | ||
} | ||
} else if (isFunction(value[1])) { | ||
/* List of things. */ | ||
while (++index < length) { | ||
use.apply(null, [value[index]].concat(params)); | ||
} | ||
} else { | ||
/* Arguments. */ | ||
use.apply(null, value); | ||
} | ||
return processor; | ||
} | ||
/* Store attacher. */ | ||
attachers.push(args); | ||
/* Single attacher. */ | ||
transformer = value.apply(null, [processor].concat(params)); | ||
if (isFunction(transformer)) { | ||
transformers.use(transformer); | ||
} | ||
return processor; | ||
} | ||
/** | ||
* Parse a file (in string or VFile representation) | ||
* into a Unist node using the `Parser` on the | ||
* processor. | ||
* | ||
* @param {(string|VFile)?} [file] - File to process. | ||
* @param {Object?} [options] - Configuration. | ||
* @return {Node} - Unist node. | ||
*/ | ||
function parse(file, options) { | ||
assertConcrete('parse'); | ||
assertParser('parse'); | ||
return new processor.Parser(vfile(file), options, processor).parse(); | ||
} | ||
/** | ||
* Run transforms on a Unist node representation of a file | ||
* (in string or VFile representation). | ||
* | ||
* @param {Node} node - Unist node. | ||
* @param {(string|VFile)?} [file] - File representation. | ||
* @param {Function?} [done] - Callback. | ||
* @return {Node} - The given or resulting Unist node. | ||
*/ | ||
function run(node, file, done) { | ||
var complete = false; | ||
var result; | ||
assertConcrete('run'); | ||
assertNode(node); | ||
result = node; | ||
if (!done && file && !isFile(file)) { | ||
done = file; | ||
file = null; | ||
} | ||
transformers.run(node, vfile(file), function (err, tree, file) { | ||
complete = true; | ||
result = tree || node; | ||
(done || bail)(err, tree, file); | ||
}); | ||
assertDone('run', complete, done); | ||
return result; | ||
} | ||
/** | ||
* Stringify a Unist node representation of a file | ||
* (in string or VFile representation) into a string | ||
* using the `Compiler` on the processor. | ||
* | ||
* @param {Node} node - Unist node. | ||
* @param {(string|VFile)?} [file] - File representation. | ||
* @param {Object?} [options] - Configuration. | ||
* @return {string} - String representation. | ||
*/ | ||
function stringify(node, file, options) { | ||
assertConcrete('stringify'); | ||
assertCompiler('stringify'); | ||
assertNode(node); | ||
if (!options && file && !isFile(file)) { | ||
options = file; | ||
file = null; | ||
} | ||
return new processor.Compiler(vfile(file), options, processor).compile(node); | ||
} | ||
/** | ||
* Parse a file (in string or VFile representation) | ||
* into a Unist node using the `Parser` on the processor, | ||
* then run transforms on that node, and compile the | ||
* resulting node using the `Compiler` on the processor, | ||
* and store that result on the VFile. | ||
* | ||
* @param {(string|VFile)?} file - File representation. | ||
* @param {Object?} [options] - Configuration. | ||
* @param {Function?} [done] - Callback. | ||
* @return {VFile} - The given or resulting VFile. | ||
*/ | ||
function process(file, options, done) { | ||
var complete = false; | ||
assertConcrete('process'); | ||
assertParser('process'); | ||
assertCompiler('process'); | ||
if (!done && isFunction(options)) { | ||
done = options; | ||
options = null; | ||
} | ||
file = vfile(file); | ||
pipeline.run(processor, { | ||
file: file, | ||
options: options || {} | ||
}, function (err) { | ||
complete = true; | ||
if (!done) { | ||
bail(err); | ||
} | ||
try { | ||
done(err, file); | ||
} catch (err) { | ||
bail(err); | ||
} | ||
}); | ||
assertDone('process', complete, done); | ||
return file; | ||
} | ||
/* Streams. */ | ||
/** | ||
* Write a chunk into memory. | ||
* | ||
* @param {(Buffer|string)?} chunk - Value to write. | ||
* @param {string?} [encoding] - Encoding. | ||
* @param {Function?} [callback] - Callback. | ||
* @return {boolean} - Whether the write was succesful. | ||
*/ | ||
function write(chunk, encoding, callback) { | ||
assertConcrete('write'); | ||
if (isFunction(encoding)) { | ||
callback = encoding; | ||
encoding = null; | ||
} | ||
if (ended) { | ||
throw new Error('Did not expect `write` after `end`'); | ||
} | ||
chunks.push((chunk || '').toString(encoding || 'utf8')); | ||
if (callback) { | ||
callback(); | ||
} | ||
/* Signal succesful write. */ | ||
return true; | ||
} | ||
/** | ||
* End the writing. Passes all arguments to a final | ||
* `write`. Starts the process, which will trigger | ||
* `error`, with a fatal error, if any; `data`, with | ||
* the generated document in `string` form, if | ||
* succesful. If messages are triggered during the | ||
* process, those are triggerd as `warning`s. | ||
* | ||
* @return {boolean} - Whether the last write was | ||
* succesful. | ||
*/ | ||
function end() { | ||
assertConcrete('end'); | ||
assertParser('end'); | ||
assertCompiler('end'); | ||
write.apply(null, arguments); | ||
ended = true; | ||
process(chunks.join(''), settings, function (err, file) { | ||
var messages = file.messages; | ||
var length = messages.length; | ||
var index = -1; | ||
chunks = settings = null; | ||
/* Trigger messages as warnings, except for fatal error. */ | ||
while (++index < length) { | ||
if (messages[index] !== err) { | ||
processor.emit('warning', messages[index]); | ||
} | ||
} | ||
if (err) { | ||
/* Don’t enter an infinite error throwing loop. */ | ||
global.setTimeout(function () { | ||
processor.emit('error', err); | ||
}, 4); | ||
} else { | ||
processor.emit('data', file.contents); | ||
processor.emit('end'); | ||
} | ||
}); | ||
return true; | ||
} | ||
/** | ||
* Pipe the processor into a writable stream. | ||
* | ||
* Basically `Stream#pipe`, but inlined and | ||
* simplified to keep the bundled size down. | ||
* | ||
* @see https://github.com/nodejs/node/blob/master/lib/stream.js#L26 | ||
* | ||
* @param {Stream} dest - Writable stream. | ||
* @param {Object?} [options] - Processing | ||
* configuration. | ||
* @return {Stream} - The destination stream. | ||
*/ | ||
function pipe(dest, options) { | ||
var onend = once(function () { | ||
if (dest.end) { | ||
dest.end(); | ||
} | ||
}); | ||
assertConcrete('pipe'); | ||
settings = options || {}; | ||
/** | ||
* Handle data. | ||
* | ||
* @param {*} chunk - Data to pass through. | ||
*/ | ||
function ondata(chunk) { | ||
if (dest.writable) { | ||
dest.write(chunk); | ||
} | ||
} | ||
/** | ||
* Clean listeners. | ||
*/ | ||
function cleanup() { | ||
processor.removeListener('data', ondata); | ||
processor.removeListener('end', onend); | ||
processor.removeListener('error', onerror); | ||
processor.removeListener('end', cleanup); | ||
processor.removeListener('close', cleanup); | ||
dest.removeListener('error', onerror); | ||
dest.removeListener('close', cleanup); | ||
} | ||
/** | ||
* Close dangling pipes and handle unheard errors. | ||
* | ||
* @param {Error} err - Exception. | ||
*/ | ||
function onerror(err) { | ||
var handlers = processor._events.error; | ||
cleanup(); | ||
/* Cannot use `listenerCount` in node <= 0.12. */ | ||
if (!handlers || !handlers.length || handlers === onerror) { | ||
throw err; /* Unhandled stream error in pipe. */ | ||
} | ||
} | ||
processor.on('data', ondata); | ||
processor.on('error', onerror); | ||
processor.on('end', cleanup); | ||
processor.on('close', cleanup); | ||
/* If the 'end' option is not supplied, dest.end() will be | ||
* called when the 'end' or 'close' events are received. | ||
* Only dest.end() once. */ | ||
if (!dest._isStdio && settings.end !== false) { | ||
processor.on('end', onend); | ||
} | ||
dest.on('error', onerror); | ||
dest.on('close', cleanup); | ||
dest.emit('pipe', processor); | ||
return dest; | ||
} | ||
/* Data management. */ | ||
processor.data = data; | ||
/* Lock. */ | ||
processor.abstract = abstract; | ||
/* Plug-ins. */ | ||
processor.use = use; | ||
/* Streaming. */ | ||
processor.writable = true; | ||
processor.readable = true; | ||
processor.write = write; | ||
processor.end = end; | ||
processor.pipe = pipe; | ||
/* API. */ | ||
processor.parse = parse; | ||
processor.stringify = stringify; | ||
processor.run = run; | ||
processor.process = process; | ||
/* Expose. */ | ||
return processor; | ||
} | ||
/** | ||
* Check if `node` is a Unist node. | ||
* | ||
* @param {*} node - Value. | ||
* @return {boolean} - Whether `node` is a Unist node. | ||
*/ | ||
function isNode(node) { | ||
return node && typeof node.type === 'string' && node.type.length !== 0; | ||
} | ||
/** | ||
* Check if `file` is a VFile. | ||
* | ||
* @param {*} file - Value. | ||
* @return {boolean} - Whether `file` is a VFile. | ||
*/ | ||
function isFile(file) { | ||
return file && typeof file.contents === 'string'; | ||
} | ||
/** | ||
* Check if `fn` is a function. | ||
* | ||
* @param {*} fn - Value. | ||
* @return {boolean} - Whether `fn` is a function. | ||
*/ | ||
function isFunction(fn) { | ||
return typeof fn === 'function'; | ||
} | ||
/** | ||
* Check if `compiler` is a Compiler. | ||
* | ||
* @param {*} compiler - Value. | ||
* @return {boolean} - Whether `compiler` is a Compiler. | ||
*/ | ||
function isCompiler(compiler) { | ||
return isFunction(compiler) && compiler.prototype && isFunction(compiler.prototype.compile); | ||
} | ||
/** | ||
* Check if `parser` is a Parser. | ||
* | ||
* @param {*} parser - Value. | ||
* @return {boolean} - Whether `parser` is a Parser. | ||
*/ | ||
function isParser(parser) { | ||
return isFunction(parser) && parser.prototype && isFunction(parser.prototype.parse); | ||
} |
{ | ||
"name": "unified", | ||
"version": "4.1.2", | ||
"version": "4.2.0", | ||
"description": "Pluggable text processing interface", | ||
@@ -19,3 +19,5 @@ "license": "MIT", | ||
"extend": "^3.0.0", | ||
"has": "^1.0.1", | ||
"once": "^1.3.3", | ||
"trough": "^1.0.0", | ||
"vfile": "^1.0.0" | ||
@@ -26,6 +28,3 @@ }, | ||
}, | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/wooorm/unified.git" | ||
}, | ||
"repository": "https://github.com/wooorm/unified", | ||
"bugs": "https://github.com/wooorm/unified/issues", | ||
@@ -41,12 +40,8 @@ "author": "Titus Wormer <tituswormer@gmail.com> (http://wooorm.com)", | ||
"index.js", | ||
"lib/trough.js", | ||
"lib/unified.js" | ||
"lib" | ||
], | ||
"devDependencies": { | ||
"browserify": "^13.0.0", | ||
"eslint": "^2.0.0", | ||
"esmangle": "^1.0.0", | ||
"istanbul": "^0.4.0", | ||
"jscs": "^3.0.0", | ||
"jscs-jsdoc": "^2.0.0", | ||
"nyc": "^7.1.0", | ||
"remark-cli": "^1.0.0", | ||
@@ -58,3 +53,4 @@ "remark-comment-config": "^4.0.0", | ||
"remark-validate-links": "^4.0.0", | ||
"tape": "^4.4.0" | ||
"tape": "^4.4.0", | ||
"xo": "^0.16.0" | ||
}, | ||
@@ -66,9 +62,43 @@ "scripts": { | ||
"build": "npm run build-md && npm run build-bundle && npm run build-mangle", | ||
"lint-api": "eslint .", | ||
"lint-style": "jscs --reporter inline .", | ||
"lint": "npm run lint-api && npm run lint-style", | ||
"test-api": "node test.js", | ||
"test-coverage": "istanbul cover test.js", | ||
"lint": "xo", | ||
"test-api": "node test", | ||
"test-coverage": "nyc --reporter lcov tape test", | ||
"test": "npm run build && npm run lint && npm run test-coverage" | ||
}, | ||
"nyc": { | ||
"check-coverage": true, | ||
"lines": 100, | ||
"functions": 100, | ||
"branches": 100 | ||
}, | ||
"xo": { | ||
"space": true, | ||
"rules": { | ||
"guard-for-in": "off", | ||
"max-lines": "off" | ||
}, | ||
"ignores": [ | ||
"unified.js", | ||
"unified.min.js" | ||
] | ||
}, | ||
"remarkConfig": { | ||
"output": true, | ||
"plugins": { | ||
"comment-config": null, | ||
"lint": { | ||
"heading-increment": false, | ||
"no-duplicate-headings": false, | ||
"list-item-spacing": false | ||
}, | ||
"github": null, | ||
"toc": { | ||
"maxDepth": 3 | ||
}, | ||
"validate-links": null | ||
}, | ||
"settings": { | ||
"bullet": "*" | ||
} | ||
} | ||
} |
142
readme.md
# unified [![Build Status][travis-badge]][travis] [![Coverage Status][codecov-badge]][codecov] | ||
<!--lint disable heading-increment no-duplicate-headings--> | ||
> **unified** recently changed its interface. These changes have | ||
> yet to bubble through to other processors before all examples | ||
> start working. | ||
**unified** is an interface for processing text using syntax trees. | ||
It’s what powers [**remark**][remark], [**retext**][retext], and | ||
others, but it also allows for processing between multiple syntaxes. | ||
[**rehype**][rehype], but it also allows for processing between | ||
multiple syntaxes. | ||
@@ -29,11 +24,15 @@ ## Installation | ||
var markdown = require('remark-parse'); | ||
var lint = require('remark-lint'); | ||
var html = require('remark-html'); | ||
var toc = require('remark-toc'); | ||
var remark2rehype = require('remark-rehype'); | ||
var document = require('rehype-document'); | ||
var html = require('rehype-stringify'); | ||
process.stdin | ||
.pipe(unified()) | ||
.use(markdown) | ||
.use(lint) | ||
.use(html) | ||
.pipe(process.stdout); | ||
.pipe(unified()) | ||
.use(markdown) | ||
.use(toc) | ||
.use(remark2rehype) | ||
.use(document) | ||
.use(html) | ||
.pipe(process.stdout); | ||
``` | ||
@@ -75,7 +74,7 @@ | ||
▲ └──────────────┘ ▼ | ||
└───────┐ ┌───────┘ | ||
│ │ | ||
┌───────┐ ┌──────┐ ┌────────┐ | ||
│ Input │ ── Parser ─▶ │ Tree │ ─ Compiler ▶ │ Output │ | ||
└───────┘ └──────┘ └────────┘ | ||
└────────┐ ┌────────┘ | ||
│ │ | ||
┌────────┐ │ │ ┌──────────┐ | ||
Input ──▶ │ Parser │ ──▶ Tree ──▶ │ Compiler │ ──▶ Output | ||
└────────┘ └──────────┘ | ||
``` | ||
@@ -146,7 +145,7 @@ | ||
process.stdin | ||
.pipe(unified()) | ||
.use(parse) | ||
.use(toc) | ||
.use(stringify) | ||
.pipe(process.stdout); | ||
.pipe(unified()) | ||
.use(parse) | ||
.use(toc) | ||
.use(stringify) | ||
.pipe(process.stdout); | ||
``` | ||
@@ -186,17 +185,19 @@ | ||
var lint = require('remark-lint'); | ||
var html = require('remark-html'); | ||
var remark2retext = require('remark-retext'); | ||
var english = require('retext-english'); | ||
var equality = require('retext-equality'); | ||
var remark2rehype = require('remark-rehype'); | ||
var html = require('rehype-stringify'); | ||
var report = require('vfile-reporter'); | ||
unified() | ||
.use(markdown) | ||
.use(lint) | ||
.use(remark2retext, unified().use(english).use(equality)) | ||
.use(html) | ||
.process('## Hey guys', function (err, file) { | ||
console.log(report(file)); | ||
console.log(file.contents); | ||
}); | ||
.use(markdown) | ||
.use(lint) | ||
.use(remark2retext, unified().use(english).use(equality)) | ||
.use(remark2rehype) | ||
.use(html) | ||
.process('## Hey guys', function (err, file) { | ||
console.err(report(err || file)); | ||
console.log(file.toString()); | ||
}); | ||
``` | ||
@@ -207,21 +208,28 @@ | ||
```txt | ||
<stdin> | ||
1:1-1:12 warning First heading level should be `1` first-heading-level | ||
1:8-1:12 warning `guys` may be insensitive, use `people`, `persons`, `folks` instead | ||
1:8-1:12 warning `guys` may be insensitive, use `people`, `persons`, `folks` instead gals-men | ||
⚠ 2 warnings | ||
⚠ 3 warnings | ||
<h2>Hey guys</h2> | ||
``` | ||
###### Bridge | ||
###### Processing between syntaxes | ||
**unified** bridges transform the syntax tree from one flavour to | ||
another. Then, they apply another processor’s transformations on | ||
that tree. And then, if possible, mutating the origin tree based | ||
on changes made to the destination tree. Finally, it continues | ||
running the origin process. | ||
The processors can be combined in two modes. | ||
See [**unified-bridge**][unified-bridge] for more information. | ||
**Bridge** mode transforms the syntax tree from one flavour to another. | ||
Then, transformations are applied on that tree. Finally, the origin | ||
processor continues transforming the original syntax tree. | ||
**Mutate** mode transforms the syntax tree from one flavour to another. | ||
Then, the origin processor continues transforming the destination syntax | ||
tree. | ||
In the previous example (“Programming interface”), `remark-retext` is | ||
used in bridge mode: the origin syntax tree is kept after retext is | ||
finished; whereas `remark-rehype` is used in mutate mode: it sets a | ||
new syntax tree and discards the original. | ||
* [**remark-retext**][remark-retext]. | ||
* [**remark-rehype**][remark-rehype]. | ||
@@ -283,5 +291,3 @@ ## API | ||
parser, the compiler, or linking the processor to other processors; | ||
* It transforms the [**syntax tree**][node] representation of a file; | ||
* It modifies metadata of a file. | ||
@@ -312,7 +318,10 @@ | ||
Transformers modify the syntax tree or metadata of a file. | ||
A transformer is a (generator) function which is invoked each time | ||
a file is passed through the transform phase. If an error occurs | ||
(either because it’s thrown, returned, rejected, or passed to | ||
[`next`][next]), the process stops. | ||
A transformer is a function which is invoked each time a file is | ||
passed through the transform phase. If an error occurs (either | ||
because it’s thrown, returned, rejected, or passed to [`next`][next]), | ||
the process stops. | ||
The transformation process in **unified** is handled by [`trough`][trough], | ||
see it’s documentation for the exact semantics of transformers. | ||
###### Parameters | ||
@@ -327,7 +336,5 @@ | ||
* `Error` — Can be returned to stop the process; | ||
* [**Node**][node] — Can be returned and results in further | ||
transformations and `stringify`s to be performed on the new | ||
tree; | ||
* `Promise` — If a promise is returned, the function is asynchronous, | ||
@@ -438,8 +445,5 @@ and **must** be resolved (optionally with a [**Node**][node]) or | ||
* `file` ([**VFile**][file]); | ||
* `value` (`string`) — String representation of a file; | ||
* `options` (`Object`, optional) — Configuration for both the parser | ||
and compiler; | ||
* `done` ([`Function`][process-done], optional). | ||
@@ -467,3 +471,3 @@ | ||
Write data the the in-memory buffer. | ||
Write data to the in-memory buffer. | ||
@@ -491,7 +495,5 @@ ###### Parameters | ||
file; | ||
* `error` (`Error`) | ||
— When the process was unsuccessful, triggered with the fatal | ||
error; | ||
* `warning` ([`VFileMessage`][vfilemessage]) | ||
@@ -577,3 +579,3 @@ — Each message created by the plug-ins in the process is triggered | ||
The following example, `index.js`, shows how [**remark**][remark] | ||
The following example, `index.js`, shows how [**rehype**][rehype] | ||
prevents extensions to itself: | ||
@@ -583,4 +585,4 @@ | ||
var unified = require('unified'); | ||
var parse = require('remark-parse'); | ||
var stringify = require('remark-stringify'); | ||
var parse = require('rehype-parse'); | ||
var stringify = require('rehype-stringify'); | ||
@@ -595,9 +597,9 @@ module.exports = unified().use(parse).use(stringify).abstract(); | ||
```js | ||
var remark = require('remark'); | ||
var rehype = require('rehype'); | ||
process.stdin.pipe(remark()).pipe(process.stdout); | ||
process.stdin.pipe(rehype()).pipe(process.stdout); | ||
``` | ||
The below example, `b.js`, shows a similar looking example which | ||
operates on the abstract [**remark**][remark] interface. If this | ||
operates on the abstract [**rehype**][rehype] interface. If this | ||
behaviour was allowed it would result in unexpected behaviour, so | ||
@@ -607,5 +609,5 @@ an error is thrown. **This is invalid**: | ||
```js | ||
var remark = require('remark'); | ||
var rehype = require('rehype'); | ||
process.stdin.pipe(remark).pipe(process.stdout); | ||
process.stdin.pipe(rehype).pipe(process.stdout); | ||
``` | ||
@@ -651,3 +653,3 @@ | ||
[unified-bridge]: https://github.com/wooorm/unified-bridge | ||
[rehype]: https://github.com/wooorm/rehype | ||
@@ -658,3 +660,3 @@ [remark]: https://github.com/wooorm/remark | ||
[rehype]: https://github.com/wooorm/rehype | ||
[hast]: https://github.com/wooorm/hast | ||
@@ -665,6 +667,6 @@ [mdast]: https://github.com/wooorm/mdast | ||
[hast]: https://github.com/wooorm/hast | ||
[unist]: https://github.com/wooorm/unist | ||
[remark-rehype]: https://github.com/wooorm/remark-rehype | ||
[remark-retext]: https://github.com/wooorm/remark-retext | ||
@@ -723,1 +725,3 @@ | ||
[pipe]: #processorpipestream-options | ||
[trough]: https://github.com/wooorm/trough#function-fninput-next |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
No repository
Supply chain riskPackage does not have a linked source code repository. Without this field, a package will have no reference to the location of the source code use to generate the package.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
11
709
41844
6
5
543
1
1
+ Addedhas@^1.0.1
+ Addedtrough@^1.0.0
+ Addedhas@1.0.4(transitive)
+ Addedtrough@1.0.5(transitive)