documentary
Advanced tools
Comparing version 1.34.11 to 1.35.0
#!/usr/bin/env node | ||
const { _source, _output, _toc, _watch, _push, _version, _extract, _h1, | ||
_reverse, _generate, _noCache, _namespace, _help, argsConfig, _wiki, | ||
_types, _focus, _debug } = require('./get-args'); | ||
_types, _focus, _debug, _annotate } = require('./get-args'); | ||
@@ -66,3 +66,3 @@ if (_debug) { | ||
reverse: _reverse, noCache: _noCache, rootNamespace: _namespace, | ||
wiki: _wiki, types: _types, focus: _focus, | ||
wiki: _wiki, types: _types, focus: _focus, annotate: _annotate, | ||
} | ||
@@ -69,0 +69,0 @@ let files |
@@ -63,2 +63,7 @@ const { argufy } = require('../../stdlib'); | ||
}, | ||
'annotate': { | ||
description: 'Place resolved URLs to all documented types into the\n`typedefs.json` file and reference it in `package.json`.', | ||
boolean: true, | ||
short: 'a', | ||
}, | ||
'generate': { | ||
@@ -161,2 +166,8 @@ description: '[Deprecated] Places typedefs definitions into JavaScript\nfiles from types.xml. Use `typal` instead.', | ||
/** | ||
* Place resolved URLs to all documented types into the | ||
`typedefs.json` file and reference it in `package.json`. | ||
*/ | ||
const _annotate = /** @type {boolean} */ (args['annotate']) | ||
/** | ||
* [Deprecated] Places typedefs definitions into JavaScript | ||
@@ -202,2 +213,3 @@ files from types.xml. Use `typal` instead. | ||
module.exports._debug = _debug | ||
module.exports._annotate = _annotate | ||
module.exports._generate = _generate | ||
@@ -204,0 +216,0 @@ module.exports._extract = _extract |
@@ -5,9 +5,66 @@ const { whichStream } = require('../../stdlib'); | ||
const { readDirStructure } = require('../../stdlib'); | ||
const { join } = require('path'); | ||
const { join, dirname, relative, parse, sep } = require('path'); | ||
const { parse: parseUrl } = require('url'); | ||
const Stream = require('stream'); | ||
const { getToc } = require('../lib/Toc'); | ||
const Documentary = require('../lib/Documentary'); | ||
const { getStream } = require('../lib'); | ||
const { getStream, getLink } = require('../lib'); | ||
const { getTypedefs } = require('../lib/Typedefs'); | ||
const { writeFileSync } = require('fs'); | ||
const Annotate = (wiki, types) => { | ||
const packageJson = require(join(process.cwd(), 'package.json')) | ||
const { repository } = packageJson | ||
let github | ||
if (typeof repository == 'string') { | ||
const [portal, name] = repository.split(':') | ||
if (name && portal != 'github') { | ||
throw new Error('Only GitHub is supported for repositories in package.json.') | ||
} else if (name) github = name | ||
else github = repository | ||
} else { | ||
const { url } = repository | ||
const { host, pathname } = parseUrl(url) | ||
if (host != 'github.com') throw new Error('Only GitHub is supported for repositories in package.json.') | ||
github = pathname.replace(/\.git$/, '').replace(/^\//, '') | ||
} | ||
github = `https://github.com/${github}` | ||
let t = null | ||
if (wiki) { | ||
t = types.filter(({ import: i }) => !i).reduce((acc, type) => { | ||
const { name, appearsIn } = type | ||
const [ai] = appearsIn.map((file) => { | ||
let rel = relative(dirname(file), file) | ||
const [bn] = rel.split(sep) | ||
const { name: n } = parse(bn) | ||
return n | ||
}) | ||
const link = getLink(name, 'type') | ||
const r = `${ai}#${link}` | ||
const rr = `${github}/wiki/${r}` | ||
acc[`${type.originalNs}${type.name}`] = { | ||
link: rr, | ||
description: type.description, | ||
} | ||
return acc | ||
}, {}) | ||
} | ||
if (t) { | ||
let current = {} | ||
try { | ||
current = require('typedefs.json') | ||
} catch (err) { /* */} | ||
writeFileSync('typedefs.json', JSON.stringify({ ...current, ...t }, null, 2)) | ||
const newPackageJson = Object.entries(packageJson).reduce((acc, [k, v]) => { | ||
acc[k] = v | ||
if (k == 'repository') { | ||
acc['typedefs'] = 'typedefs.json' | ||
} | ||
return acc | ||
}, {}) | ||
writeFileSync('package.json', JSON.stringify(newPackageJson, null, 2) + '\n') | ||
console.log('Updated package.json to point to typdefs.json') | ||
} | ||
} | ||
/** | ||
@@ -28,2 +85,3 @@ * Run the documentary and save the results. | ||
source, output, reverse, justToc, h1, noCache, rootNamespace, wiki, | ||
annotate, | ||
} = options | ||
@@ -41,6 +99,10 @@ | ||
const { types, locations } = await getTypedefs(stream, rootNamespace, typesLocations, { | ||
wiki, source, | ||
const typedefs = await getTypedefs(stream, rootNamespace, typesLocations, { | ||
wiki, source, recordOriginalNs: annotate, | ||
}) | ||
if (annotate) { | ||
Annotate(wiki, typedefs.types) | ||
} | ||
let assets = [] | ||
@@ -65,4 +127,3 @@ if (wiki) { | ||
...options, | ||
locations, | ||
types, | ||
typedefs, | ||
wiki, | ||
@@ -77,3 +138,3 @@ output: o, | ||
} else { | ||
const doc = await runPage({ source, reverse, locations, types, noCache, h1, justToc, output }) | ||
const doc = await runPage({ source, reverse, typedefs, noCache, h1, justToc, output }) | ||
assets = doc.assets | ||
@@ -85,8 +146,12 @@ if (output) { | ||
return [...Object.keys(locations), ...assets] | ||
return [...Object.keys(typedefs.locations), ...assets] | ||
} | ||
/** | ||
* @param {Object} opts | ||
* @param {import('../lib/Typedefs').default} opts.typedefs | ||
*/ | ||
const runPage = async (opts) => { | ||
const { | ||
source, reverse, locations, types, noCache, h1, justToc, | ||
source, reverse, locations, typedefs, noCache, h1, justToc, | ||
output = '-', wiki, | ||
@@ -97,3 +162,3 @@ } = opts | ||
const doc = new Documentary({ | ||
locations, types, noCache, objectMode: true, wiki, output, source, | ||
locations, typedefs, noCache, objectMode: true, wiki, output, source, | ||
cacheLocation: process.env.DOCUMENTARY_CACHE_LOCATION, | ||
@@ -100,0 +165,0 @@ }) |
@@ -16,2 +16,5 @@ const { h } = require('preact'); | ||
}, | ||
'include-typedefs'({ documentary }) { | ||
return documentary.removeLine() | ||
}, | ||
} | ||
@@ -18,0 +21,0 @@ |
@@ -48,6 +48,8 @@ const { h } = require('preact'); | ||
locations, allTypes, cut: { code: cutCode }, | ||
_args: { wiki, source }, currentFile, | ||
_args: { wiki, source }, currentFile, _typedefs, | ||
} = documentary | ||
const file = wiki ? source : currentFile | ||
const at = [...allTypes, ..._typedefs.included] | ||
documentary.setPretty(false) | ||
@@ -81,3 +83,3 @@ let [location] = children | ||
if (!type.isMethod) { | ||
const res = type.toMarkdown(allTypes, opts) | ||
const res = type.toMarkdown(at, opts) | ||
if (level) res.LINE = res.LINE.replace(/t-type/, `${'#'.repeat(level)}-type`) | ||
@@ -87,3 +89,3 @@ return res | ||
const LINE = Method({ documentary, level, method: type, noArgTypesInToc }) | ||
const table = makeMethodTable(type, allTypes, opts) | ||
const table = makeMethodTable(type, at, opts) | ||
return { LINE, table, examples: type.examples } | ||
@@ -96,3 +98,3 @@ }) | ||
const j = importsToMd.map(i => i.toMarkdown(allTypes, { flatten })) | ||
const j = importsToMd.map(i => i.toMarkdown(at, { flatten })) | ||
@@ -104,3 +106,3 @@ const ttt = tt.map((s, i) => { | ||
const ch = isObject ? h(Narrow,{...type,key:i, | ||
documentary:documentary, allTypes:allTypes, opts:opts, | ||
documentary:documentary, allTypes:at, opts:opts, | ||
slimFunctions:slimFunctions | ||
@@ -107,0 +109,0 @@ }) : type |
@@ -74,5 +74,4 @@ const { Replaceable, makeMarkers, makeCutRule, makePasteRule } = require('../../stdlib'); | ||
/** | ||
* @param {DocumentaryOptions} options Options for the Documentary constructor. | ||
* @param {!Object<string, !Array<_typal.Type>} [options.locations] The source locations of types, e.g., types/index.xml. | ||
* @param {!Array<_typal.Type>} [options.types] All extracted types across every processed file. | ||
* @param {Object} options Options for the Documentary constructor. | ||
* @param {import('./Typedefs').default} [options.typedefs] Pre-extracted typedefs. | ||
* @param {string} [options.wiki] If processing Wiki, specifies the output location. | ||
@@ -88,3 +87,3 @@ * @param {string} [options.source] The location of the source file or directory from which the documentation is compiled. | ||
const { | ||
locations = {}, types: allTypes = /** @type {!Array<_typal.Type>} */ ([]), | ||
typedefs, | ||
cwd = CWD, cacheLocation = join(cwd, '.documentary/cache'), | ||
@@ -168,3 +167,3 @@ noCache, disableDtoc, objectMode = true /* deprec */, | ||
replacement(match, location, typeName) { | ||
const types = locations[location] | ||
const types = this.locations[location] | ||
if (!types) { | ||
@@ -176,3 +175,3 @@ LOG('No types for location %s.', location) | ||
const res = t.map((type) => { | ||
const { LINE, table: tb } = type.toMarkdown(allTypes) | ||
const { LINE, table: tb } = type.toMarkdown(this.allTypes) | ||
return `${LINE}${tb}` | ||
@@ -259,11 +258,29 @@ }).join('\n\n') | ||
} | ||
/** | ||
* The source locations of types, e.g., types/index.xml. | ||
*/ | ||
this.locations = locations | ||
/** | ||
* All extracted types across every processed file. | ||
*/ | ||
this.allTypes = allTypes | ||
this._typedefs = typedefs | ||
if (this._typedefs) { | ||
const imports = this._typedefs.types.filter(({ import: i }) => i) | ||
this._typedefs.included.forEach(({ description, fullName: k, link }) => { | ||
const i = imports.find(({ fullName }) => fullName == k) | ||
if (!i) return | ||
if (!i.link) i.link = link | ||
if (!i.description) i.description = description | ||
}) | ||
} | ||
} | ||
/** | ||
* The source locations of types, e.g., types/index.xml. | ||
*/ | ||
get locations() { | ||
if (this._typedefs) return this._typedefs.locations | ||
return {} | ||
} | ||
/** | ||
* All extracted types across every processed file. | ||
*/ | ||
get allTypes() { | ||
if (this._typedefs) { | ||
return this._typedefs.types | ||
} | ||
return [] | ||
} | ||
@@ -270,0 +287,0 @@ /** |
@@ -9,2 +9,5 @@ const { createReadStream, lstatSync } = require('fs'); | ||
/** | ||
* @param {string} title | ||
*/ | ||
const getLink = (title, prefix = '') => { | ||
@@ -11,0 +14,0 @@ const codes = {} |
const { Replaceable, replace } = require('../../stdlib'); | ||
const { collect } = require('../../stdlib'); | ||
const { relative, sep, join } = require('path'); | ||
const { relative, sep, join, resolve } = require('path'); | ||
const { typedefMdRe } = require('./rules/typedef-md'); | ||
@@ -13,42 +13,2 @@ const { read } = require('./'); | ||
const nodeAPI = { | ||
'http.IncomingMessage': { | ||
link: 'https://nodejs.org/api/http.html#http_class_http_incomingmessage', | ||
desc: 'A readable stream that receives data from the client in chunks. The first argument of the http.Server.on("request") event.', | ||
}, | ||
'http.Server': { | ||
link: 'https://nodejs.org/api/http.html#http_class_http_server', | ||
desc: 'An HTTP server that extends net.Server to handle network requests.', | ||
}, | ||
'http.ServerResponse': { | ||
// link: 'https://nodejs.org/api/http.html#http_response_socket', | ||
link: 'https://nodejs.org/api/http.html#http_class_http_serverresponse', | ||
desc: 'A writable stream that communicates data to the client. The second argument of the http.Server.on("request") event.', | ||
}, | ||
'http.OutgoingHttpHeaders': { | ||
link: 'https://nodejs.org/api/http.html', | ||
desc: 'The headers hash map for making requests, including such properties as Content-Encoding, Content-Type, etc.', | ||
}, | ||
'http.IncomingHttpHeaders': { | ||
link: 'https://nodejs.org/api/http.html', | ||
desc: 'The hash map of headers that are set by the server (e.g., when accessed via IncomingMessage.headers)', | ||
}, | ||
'url.URL': { | ||
link: 'https://nodejs.org/api/url.html#url_class_url', | ||
desc: 'Browser-compatible URL class, implemented by following the `WHATWG` URL Standard.', | ||
}, | ||
'net.Socket': { | ||
link: 'https://nodejs.org/api/net.html#net_class_net_socket', | ||
desc: 'A two-way communication channel between clients and servers.', | ||
}, | ||
'stream.Stream': { | ||
link: 'https://nodejs.org/api/stream.html#stream', | ||
desc: 'Handles streaming data in Node.js.', | ||
}, | ||
'events.EventEmitter': { | ||
link: 'https://nodejs.org/api/events.html#events_class_eventemitter', | ||
desc: 'Emits named events that cause listeners to be called.', | ||
}, | ||
} | ||
/** | ||
@@ -58,3 +18,3 @@ * A Typedefs class will detect and store in a map all type definitions embedded into the documentation. | ||
class Typedefs extends Replaceable { | ||
constructor(rootNamespace, { wiki, source } = {}) { | ||
constructor(rootNamespace, { wiki, source, recordOriginalNs } = {}) { | ||
super([ | ||
@@ -87,2 +47,8 @@ { | ||
const { types, imports } = parseFile(xml, rootNamespace, location) | ||
if (recordOriginalNs) { | ||
const { types: types2 } = parseFile(xml) | ||
types2.forEach(({ ns }, i) => { | ||
types[i].originalNs = ns | ||
}) | ||
} | ||
@@ -105,2 +71,7 @@ this.emit('types', { | ||
this.types = [] | ||
/** @type {!Array<{fullName: string, link: string, description:string }>} */ | ||
this.included = [] | ||
/** | ||
* The locations of read types. | ||
*/ | ||
this.locations = {} | ||
@@ -193,3 +164,3 @@ this.on('types', ({ location, types, typeName, file, link = '' }) => { | ||
* @param {Stream} stream | ||
* @param {string} [namespace] | ||
* @param {string} [namespace] The root namespace. | ||
* @param {Array<string>} [typesLocations] | ||
@@ -224,6 +195,21 @@ * @param {Object} [options] | ||
}, | ||
'include-typedefs'({ children }) { | ||
let [loc] = children | ||
loc = loc.trim() || 'typedefs.json' | ||
const data = require(resolve(loc)) | ||
Object.entries(data).forEach(([k, { description, link }]) => { | ||
const n = `${namespace}.` | ||
if (namespace && k.startsWith(n)) k = k.replace(n, '') | ||
const t = { | ||
fullName: k, | ||
link, | ||
description, | ||
} | ||
this.included.push(t) | ||
}) | ||
}, | ||
}) | ||
// const r = new Replaceable(c) | ||
stream.pipe(new Transform({ | ||
const t = new Transform({ | ||
async transform({ data, file }, enc, next) { | ||
@@ -235,3 +221,6 @@ if (!data && !file) | ||
const d = await replace(new Replaceable(c), data) | ||
// Competent components | ||
const r = new Replaceable(c) | ||
r.included = typedefs.included | ||
const d = await replace(r, data) | ||
this.push({ data: d, file }) | ||
@@ -241,3 +230,7 @@ next() | ||
objectMode: true, | ||
})).pipe(typedefs) | ||
}) | ||
t.write({ data: `<include-typedefs> | ||
${resolve(__dirname, '../../typedefs.json')} | ||
</include-typedefs>`, file: 'fake.md' }) | ||
stream.pipe(t).pipe(typedefs) | ||
@@ -257,12 +250,3 @@ await collect(typedefs) | ||
} | ||
if (type.import) { | ||
const api = nodeAPI[type.fullName] | ||
if (api) { | ||
if (!type.link) type.link = api.link | ||
if (!type.description) type.description = api.desc | ||
} | ||
} | ||
}) | ||
// const { types, locations } = typedefs | ||
@@ -269,0 +253,0 @@ return typedefs |
## 19 December 2019 | ||
### [1.35.0](https://github.com/artdecocode/documentary/compare/v1.34.11...v1.35.0) | ||
- [feature] Implement `<include-typedefs>` component to include annotations and `-a` flag to generate them from Wikis. | ||
- [feature] Add `<type-link>` component to link to a type on another page. | ||
### [1.34.11](https://github.com/artdecocode/documentary/compare/v1.34.10...v1.34.11) | ||
@@ -4,0 +9,0 @@ |
{ | ||
"name": "documentary", | ||
"version": "1.34.11", | ||
"version": "1.35.0", | ||
"description": "Documentation Compiler To Generate The Table Of Contents, Embed Examples With Their Output, Make Markdown Tables, Maintain Typedefs For JavaScript And README, Watch Changes To Push, Use Macros And Prettify API Titles.", | ||
@@ -39,3 +39,4 @@ "main": "build", | ||
"web-components.json", | ||
"stdlib" | ||
"stdlib", | ||
"typedefs.json" | ||
], | ||
@@ -42,0 +43,0 @@ "repository": { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
456861
78
4957
33