Comparing version 1.13.4 to 1.14.0
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,5 +5,6 @@ * @module compiler | ||
const path = require("path"); | ||
const {isFunction} = require("./utils"); | ||
import * as path from "node:path"; | ||
import {isFunction} from "./utils.js"; | ||
/** | ||
@@ -60,2 +59,2 @@ * @description Template compiler. | ||
module.exports = Compiler; | ||
export default Compiler; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,4 +5,4 @@ * @module decorator | ||
const {File} = require("./types"); | ||
const {isFunction, isString, getFullDocPath} = require("./utils"); | ||
import {File} from "./types.js"; | ||
import {isFunction, isString, getFullDocPath} from "./utils.js"; | ||
@@ -107,2 +105,2 @@ /** | ||
module.exports = Decorator; | ||
export default Decorator; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,3 +5,3 @@ * @module generator | ||
const {isArray, isFunction} = require("./utils"); | ||
import {isArray, isFunction} from "./utils.js"; | ||
@@ -62,2 +60,2 @@ /** | ||
module.exports = Generator; | ||
export default Generator; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,23 +5,29 @@ * @module hikaru | ||
const path = require("path"); | ||
import * as path from "node:path"; | ||
const fse = require("fs-extra"); | ||
const YAML = require("yaml"); | ||
const nunjucks = require("nunjucks"); | ||
const {marked} = require("marked"); | ||
import fse from "fs-extra"; | ||
import YAML from "yaml"; | ||
import nunjucks from "nunjucks"; | ||
import {marked} from "marked"; | ||
const Logger = require("./logger"); | ||
const Watcher = require("./watcher"); | ||
const Renderer = require("./renderer"); | ||
const Compiler = require("./compiler"); | ||
const Processor = require("./processor"); | ||
const Generator = require("./generator"); | ||
const Decorator = require("./decorator"); | ||
const Translator = require("./translator"); | ||
const Router = require("./router"); | ||
const types = require("./types"); | ||
import Logger from "./logger.js"; | ||
import Watcher from "./watcher.js"; | ||
import Renderer from "./renderer.js"; | ||
import Compiler from "./compiler.js"; | ||
import Processor from "./processor.js"; | ||
import Generator from "./generator.js"; | ||
import Decorator from "./decorator.js"; | ||
import Translator from "./translator.js"; | ||
import Router from "./router.js"; | ||
import * as types from "./types.js"; | ||
import * as utils from "./utils.js"; | ||
const {Site, File} = types; | ||
const utils = require("./utils"); | ||
const { | ||
hikaruDir, | ||
loadJSON, | ||
loadYAML, | ||
loadYAMLSync, | ||
isString, | ||
isObject, | ||
isReadableSync, | ||
matchFiles, | ||
@@ -120,5 +124,2 @@ paginate, | ||
}\`...`); | ||
this.logger.debug(`Hikaru is copying \`${ | ||
this.logger.cyan(path.join(siteDir, "package.json")) | ||
}\`...`); | ||
this.logger.debug(`Hikaru is creating \`${ | ||
@@ -137,16 +138,5 @@ this.logger.cyan(path.join(siteDir, "srcs", path.sep)) | ||
fse.copy( | ||
path.join(__dirname, "..", "dists", "site-config.yaml"), | ||
path.join(hikaruDir, "dists", "site-config.yaml"), | ||
siteConfigPath | ||
); | ||
fse.readFile( | ||
path.join(__dirname, "..", "dists", "package.json") | ||
).then((text) => { | ||
const json = JSON.parse(text); | ||
// Set package name to site dir name. | ||
json["name"] = path.relative(path.join("..", siteDir), siteDir); | ||
return fse.outputFile( | ||
path.join(siteDir, "package.json"), | ||
JSON.stringify(json, null, " ") | ||
); | ||
}); | ||
fse.mkdirp(path.join(siteDir, "srcs")); | ||
@@ -260,7 +250,7 @@ fse.mkdirp(path.join(siteDir, "docs")); | ||
let defaultSiteConfigPath = path.join(siteDir, "site-config.yaml"); | ||
if (!fse.existsSync(defaultSiteConfigPath)) { | ||
if (!isReadableSync(defaultSiteConfigPath)) { | ||
this.logger.warn(`Hikaru suggests you to rename \`${ | ||
this.logger.cyan(path.join(siteDir, "siteConfig.yml")) | ||
}\` to \`${ | ||
this.logger.cyan(path.join(siteDir, "site-config.yaml")) | ||
}\` to \`${ | ||
this.logger.cyan(path.join(siteDir, "siteConfig.yml")) | ||
}\` because it's deprecated!`); | ||
@@ -276,7 +266,4 @@ defaultSiteConfigPath = path.join(siteDir, "siteConfig.yml"); | ||
try { | ||
siteConfig = YAML.parse( | ||
// Only site config and theme config can use readFileSync | ||
// because they are basic. | ||
fse.readFileSync(siteConfigPath, "utf8") | ||
); | ||
// Only site config and theme config can be block because they are basic. | ||
siteConfig = loadYAMLSync(siteConfigPath); | ||
} catch (error) { | ||
@@ -327,7 +314,7 @@ this.logger.warn("Hikaru cannot load site config!"); | ||
let defaultThemeConfigPath = path.join(siteDir, "theme-config.yaml"); | ||
if (!fse.existsSync(defaultThemeConfigPath)) { | ||
if (!isReadableSync(defaultThemeConfigPath)) { | ||
this.logger.warn(`Hikaru suggests you to rename \`${ | ||
this.logger.cyan(path.join(siteDir, "themeConfig.yml")) | ||
}\` to \`${ | ||
this.logger.cyan(path.join(siteDir, "theme-config.yaml")) | ||
}\` to \`${ | ||
this.logger.cyan(path.join(siteDir, "themeConfig.yml")) | ||
}\` because it's deprecated!`); | ||
@@ -343,7 +330,4 @@ defaultThemeConfigPath = path.join(siteDir, "themeConfig.yml"); | ||
try { | ||
themeConfig = YAML.parse( | ||
// Only site config and theme config can use readFileSync | ||
// because they are basic. | ||
fse.readFileSync(themeConfigPath, "utf8") | ||
); | ||
// Only site config and theme config can be block because they are basic. | ||
themeConfig = loadYAMLSync(themeConfigPath); | ||
} catch (error) { | ||
@@ -376,3 +360,3 @@ if (error["code"] === "ENOENT") { | ||
try { | ||
rawFileDependencies = YAML.parse(await fse.readFile(filepath, "utf8")); | ||
rawFileDependencies = await loadYAML(filepath); | ||
} catch (error) { | ||
@@ -434,20 +418,42 @@ // Should work if theme author does not provide such a file. | ||
const sitePkgPath = path.join(this.site["siteDir"], "package.json"); | ||
if (!fse.existsSync(sitePkgPath)) { | ||
let sitePkgJSON; | ||
try { | ||
sitePkgJSON = await loadJSON(sitePkgPath); | ||
} catch (error) { | ||
return null; | ||
} | ||
const plugins = JSON.parse( | ||
await fse.readFile(sitePkgPath, "utf8") | ||
)["dependencies"]; | ||
if (plugins == null) { | ||
if (sitePkgJSON["dependencies"] == null) { | ||
return null; | ||
} | ||
return Promise.all(Object.keys(plugins).filter((name) => { | ||
const plugins = Object.keys(sitePkgJSON["dependencies"]).filter((name) => { | ||
return /^hikaru-/.test(name); | ||
}).map((name) => { | ||
const modulePath = path.join(this.site["siteDir"], "node_modules", name); | ||
}); | ||
return Promise.all(plugins.map(async (name) => { | ||
this.logger.debug(`Hikaru is loading plugin \`${ | ||
this.logger.blue(name) | ||
}\`...`); | ||
// Use absolute path to load from siteDir instead of program dir. | ||
return require(path.resolve(modulePath))({ | ||
const pluginDir = path.join(this.site["siteDir"], "node_modules", name); | ||
// Unlike `require()`, `import ()` does not check entries in `package.json` | ||
// if you pass a path, so we do this manually. | ||
let pluginPkgJSON; | ||
try { | ||
pluginPkgJSON = await loadJSON(path.join(pluginDir, "package.json")); | ||
} catch (error) { | ||
// Plugin should be a valid package. | ||
return null; | ||
} | ||
let pluginPath; | ||
if (pluginPkgJSON["exports"] != null && | ||
isString(pluginPkgJSON["exports"])) { | ||
// Could be an Object, but we don't accept this as a plugin. | ||
pluginPath = path.resolve(pluginDir, pluginPkgJSON["exports"]); | ||
} else if (pluginPkgJSON["main"] != null) { | ||
// If exists, main is always a string. | ||
pluginPath = path.resolve(pluginDir, pluginPkgJSON["main"]); | ||
} else { | ||
pluginPath = path.resolve(pluginDir, "index.js"); | ||
} | ||
// import is a keyword, not a function. | ||
const module = await import(pluginPath); | ||
return module["default"]({ | ||
"logger": this.logger, | ||
@@ -487,3 +493,3 @@ "watcher": this.watcher, | ||
})); | ||
return Promise.all(scripts.map((filepath) => { | ||
return Promise.all(scripts.map(async (filepath) => { | ||
this.logger.debug(`Hikaru is loading script \`${ | ||
@@ -493,3 +499,5 @@ this.logger.cyan(filepath) | ||
// Use absolute path to load from siteDir instead of program dir. | ||
return require(path.resolve(filepath))({ | ||
// import is a keyword, not a function. | ||
const module = await import(path.resolve(filepath)); | ||
return module["default"]({ | ||
"logger": this.logger, | ||
@@ -709,3 +717,3 @@ "watcher": this.watcher, | ||
// fallback to read from disk. | ||
if (!fse.existsSync(srcPath)) { | ||
if (!isReadableSync(srcPath)) { | ||
return null; | ||
@@ -798,6 +806,6 @@ } | ||
this.processor.register("content resolving", (site) => { | ||
this.processor.register("content resolving", async (site) => { | ||
const all = site["posts"].concat(site["pages"]); | ||
// It turns out that single threaded resolving works faster, | ||
// and takes less memory, because Node.js Workers needs to copy message. | ||
// and takes less memory, because Node.js Workers needs to copy messages. | ||
for (const p of all) { | ||
@@ -814,3 +822,3 @@ const node = parseNode(p["content"]); | ||
resolveImages(node, site["siteConfig"]["rootDir"], p["docPath"]); | ||
resolveCodeBlocks(node, site["siteConfig"]["highlight"]); | ||
await resolveCodeBlocks(node, site["siteConfig"]["highlight"]); | ||
p["content"] = serializeNode(node); | ||
@@ -937,2 +945,2 @@ if (p["content"].indexOf("<!--more-->") !== -1) { | ||
module.exports = Hikaru; | ||
export default Hikaru; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,12 +5,13 @@ * @module index | ||
const {Command} = require("commander"); | ||
const pkg = require("../package.json"); | ||
const Hikaru = require("./hikaru"); | ||
import {Command} from "commander"; | ||
import Hikaru from "./hikaru.js"; | ||
import {pkgJSON, getVersion} from "./utils.js"; | ||
const command = new Command(); | ||
command | ||
.version(pkg["version"], "-v, --version", "Print version number.") | ||
.version(getVersion(), "-v, --version", "Print version number.") | ||
.usage("<subcommand> [options] [dir]") | ||
.description(pkg["description"]) | ||
.description(pkgJSON["description"]) | ||
// Overwrite default help option description. | ||
@@ -92,4 +91,6 @@ .helpOption("-h, --help", "Print help infomation.") | ||
*/ | ||
module.exports = (argv = process.argv) => { | ||
const hikaru = (argv = process.argv) => { | ||
command.parse(argv); | ||
}; | ||
export default hikaru; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,3 +5,3 @@ * @module logger | ||
const {isFunction} = require("./utils"); | ||
import {isFunction} from "./utils.js"; | ||
@@ -136,2 +134,2 @@ /** | ||
module.exports = Logger; | ||
export default Logger; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,3 +5,3 @@ * @module processor | ||
const {isFunction} = require("./utils"); | ||
import {isFunction} from "./utils.js"; | ||
@@ -56,2 +54,2 @@ /** | ||
module.exports = Processor; | ||
export default Processor; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,6 +5,7 @@ * @module renderer | ||
const path = require("path"); | ||
const {File} = require("./types"); | ||
const {getFullSrcPath, getFullDocPath, isFunction} = require("./utils"); | ||
import * as path from "node:path"; | ||
import {File} from "./types.js"; | ||
import {getFullSrcPath, getFullDocPath, isFunction} from "./utils.js"; | ||
/** | ||
@@ -102,2 +101,2 @@ * @description File renderer. | ||
module.exports = Renderer; | ||
export default Renderer; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,6 +5,7 @@ * @module router | ||
const fse = require("fs-extra"); | ||
const http = require("http"); | ||
const {Site, File} = require("./types"); | ||
const { | ||
import fse from "fs-extra"; | ||
import * as http from "node:http"; | ||
import {Site, File} from "./types.js"; | ||
import { | ||
isArray, | ||
@@ -30,3 +29,3 @@ isString, | ||
parseFrontMatter | ||
} = require("./utils"); | ||
} from "./utils.js"; | ||
@@ -397,2 +396,2 @@ /** | ||
module.exports = Router; | ||
export default Router; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,4 +5,4 @@ * @module translator | ||
const {format} = require("util"); | ||
const {isObject, isArray, isString} = require("./utils"); | ||
import {format} from "node:util"; | ||
import {isObject, isArray, isString} from "./utils.js"; | ||
@@ -108,2 +106,2 @@ /** | ||
module.exports = Translator; | ||
export default Translator; |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -190,3 +188,3 @@ * @module types | ||
module.exports = { | ||
export { | ||
Site, | ||
@@ -193,0 +191,0 @@ File, |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,18 +5,61 @@ * @module utils | ||
const path = require("path"); | ||
import * as path from "node:path"; | ||
const YAML = require("yaml"); | ||
const parse5 = require("parse5"); | ||
const readdirp = require("readdirp"); | ||
const picomatch = require("picomatch"); | ||
// OMG you are adding new dependency! Why not implement it yourself? | ||
// Calm down, it has no dependency so just give it a chance. | ||
// And its code is a little bit long. | ||
const {isBinaryFile, isBinaryFileSync} = require("isbinaryfile"); | ||
import fse from "fs-extra"; | ||
import YAML from "yaml"; | ||
import * as parse5 from "parse5"; | ||
import readdirp from "readdirp"; | ||
import picomatch from "picomatch"; | ||
// This works better than what I wrote and it has no dependency. | ||
import {isBinaryFile, isBinaryFileSync} from "isbinaryfile"; | ||
const {Site, File, Category, Tag, TOC} = require("./types"); | ||
const pkg = require("../package.json"); | ||
const extMIME = require("../dists/ext-mime.json"); | ||
import {Site, File, Category, Tag, TOC} from "./types.js"; | ||
/** | ||
* @description Hikaru's package dir. | ||
*/ | ||
const hikaruDir = path.resolve( | ||
path.dirname(new URL(import.meta.url).pathname), | ||
"../" | ||
); | ||
/** | ||
* @param {String} path | ||
* @return {Promise<Object>} | ||
*/ | ||
const loadJSON = async (path) => { | ||
return JSON.parse(await fse.readFile(path, "utf8")); | ||
}; | ||
/** | ||
* @param {String} path | ||
* @return {Object} | ||
*/ | ||
const loadJSONSync = (path) => { | ||
return JSON.parse(fse.readFileSync(path, "utf8")); | ||
}; | ||
/** | ||
* @private | ||
* @description This is Hikaru's package.json, it is used internally. | ||
*/ | ||
const pkgJSON = loadJSONSync(path.join(hikaruDir, "package.json")); | ||
/** | ||
* @param {String} path | ||
* @return {Promise<Object>} | ||
*/ | ||
const loadYAML = async (path) => { | ||
return YAML.parse(await fse.readFile(path, "utf8")); | ||
}; | ||
/** | ||
* @param {String} path | ||
* @return {Object} | ||
*/ | ||
const loadYAMLSync = (path) => { | ||
return YAML.parse(fse.readFileSync(path, "utf8")); | ||
}; | ||
/** | ||
* @param {*} o | ||
@@ -75,2 +116,19 @@ * @return {Boolean} | ||
/** | ||
* @description Node.js marks `fs.exists()` deprecated and suggest to use | ||
* `fs.access()`, but it throws error instead of return a boolean, this is a | ||
* wrapper for it. | ||
* @see https://nodejs.org/api/fs.html#fsaccesssyncpath-mode | ||
* @param {String} path | ||
* @return {Boolean} | ||
*/ | ||
const isReadableSync = (path) => { | ||
try { | ||
fse.accessSync(path, fse.constants.R_OK); | ||
return true; | ||
} catch (error) { | ||
return false; | ||
} | ||
}; | ||
/** | ||
* @description Escape HTML chars. | ||
@@ -227,2 +285,7 @@ * @param {String} str | ||
/** | ||
* @private | ||
* @description This is only used for serve. | ||
*/ | ||
let extMIME = null; | ||
/** | ||
* @description Detect Content-Type via filename. | ||
@@ -233,2 +296,5 @@ * @param {String} docPath | ||
const getContentType = (docPath) => { | ||
if (extMIME == null) { | ||
extMIME = loadJSONSync(path.join(hikaruDir, "hikaru", "ext-mime.json")); | ||
} | ||
return extMIME[path.extname(docPath)] || "application/octet-stream"; | ||
@@ -649,4 +715,3 @@ }; | ||
/** | ||
* @description Quick and not so dirty way to replace a Node with given HTML string, | ||
* if more then one node parsed from string, only use the first one. | ||
* @description Quick and not so dirty way to replace a Node with given HTML string. | ||
* @param {Object} node parse5 Node to replace. | ||
@@ -656,7 +721,15 @@ * @param {String} html | ||
const replaceNode = (node, html) => { | ||
if (node["parentNode"] != null && html != null) { | ||
const newNode = parseNode(node["parentNode"], html); | ||
if (newNode["childNodes"].length > 0) { | ||
newNode["childNodes"][0]["parentNode"] = node["parentNode"]; | ||
Object.assign(node, newNode["childNodes"][0]); | ||
const parentNode = node["parentNode"]; | ||
if (parentNode != null && html != null) { | ||
const newNode = parseNode(html); | ||
if (newNode["childNodes"] != null && newNode["childNodes"].length > 0) { | ||
const index = parentNode["childNodes"].indexOf(node); | ||
parentNode["childNodes"].splice( | ||
index, | ||
1, | ||
...newNode["childNodes"].map((childNode) => { | ||
childNode["parentNode"] = parentNode; | ||
return childNode; | ||
}) | ||
); | ||
} | ||
@@ -735,5 +808,4 @@ } | ||
const setNodeText = (node, html) => { | ||
// Add HTML to childNodes via parsing and replacing | ||
// to keep tree reference, and skip the parse5-generated | ||
// `#document-fragment` node. | ||
// Add HTML to childNodes via parsing and replacing to keep tree reference, | ||
// and skip the parse5-generated `#document-fragment` node. | ||
// Text nodes have no childNode. | ||
@@ -743,5 +815,5 @@ // Only append to nodes that already have childNodes. | ||
// Don't forget to replace childNode's parentNode. | ||
node["childNodes"] = parseNode(node, html)["childNodes"].map((c) => { | ||
c["parentNode"] = node; | ||
return c; | ||
node["childNodes"] = parseNode(html)["childNodes"].map((childNode) => { | ||
childNode["parentNode"] = node; | ||
return childNode; | ||
}); | ||
@@ -793,12 +865,12 @@ } | ||
/** | ||
* @description Update headers' ID for bootstrap scrollspy. | ||
* @description Update headings' ID for bootstrap scrollspy. | ||
* @param {Object} node parse5 Node. | ||
*/ | ||
const resolveHeadingIDs = (node) => { | ||
const headerNames = ["h1", "h2", "h3", "h4", "h5", "h6"]; | ||
const headerIDs = {}; | ||
const headerNodes = nodesFilter(node, (node) => { | ||
return headerNames.includes(node["tagName"]); | ||
const headingNames = ["h1", "h2", "h3", "h4", "h5", "h6"]; | ||
const headingIDs = {}; | ||
const headingNodes = nodesFilter(node, (node) => { | ||
return headingNames.includes(node["tagName"]); | ||
}); | ||
for (const node of headerNodes) { | ||
for (const node of headingNodes) { | ||
const text = getNodeText(node); | ||
@@ -811,11 +883,11 @@ if (text != null) { | ||
); | ||
const id = headerIDs[encoded] == null | ||
const id = headingIDs[encoded] == null | ||
? encoded | ||
: `${encoded}-${headerIDs[encoded]++}`; | ||
: `${encoded}-${headingIDs[encoded]++}`; | ||
// If we have `abc`, `abc` and `abc-1`, | ||
// we must save the `abc-1` generated by the second `abc`, | ||
// to prevent 2 `abc-1` for the last `abc-1`. | ||
headerIDs[id] = 1; | ||
headingIDs[id] = 1; | ||
setNodeAttr(node, "id", id); | ||
setNodeText(node, `<a class="header-link" href="#${id}"></a>${text}`); | ||
setNodeText(node, `<a class="heading-link header-link" href="#${id}"></a>${text}`); | ||
} | ||
@@ -828,18 +900,18 @@ } | ||
/** | ||
* @description Generate TOC from HTML headers. | ||
* @description Generate TOC from HTML headings. | ||
* @param {Object} node parse5 Node. | ||
*/ | ||
const genTOC = (node) => { | ||
const headerNames = ["h1", "h2", "h3", "h4", "h5", "h6"]; | ||
const headingNames = ["h1", "h2", "h3", "h4", "h5", "h6"]; | ||
const toc = []; | ||
const headerNodes = nodesFilter(node, (node) => { | ||
return headerNames.includes(node["tagName"]); | ||
const headingNodes = nodesFilter(node, (node) => { | ||
return headingNames.includes(node["tagName"]); | ||
}); | ||
for (const node of headerNodes) { | ||
for (const node of headingNodes) { | ||
let level = toc; | ||
while ( | ||
level.length > 0 && | ||
headerNames.indexOf( | ||
headingNames.indexOf( | ||
level[level.length - 1]["name"] | ||
) < headerNames.indexOf(node["tagName"]) | ||
) < headingNames.indexOf(node["tagName"]) | ||
) { | ||
@@ -899,3 +971,3 @@ level = level[level.length - 1]["subs"]; | ||
* marked.js and CommonMark tends to do URL encode by themselves. | ||
* I should skip `encodeURI()` here and do it for header ID only. | ||
* I should skip `encodeURI()` here and do it for heading ID only. | ||
* See <https://github.com/markedjs/marked/issues/1285>. | ||
@@ -958,7 +1030,4 @@ */ | ||
hljsAliases.set(lang, lang); | ||
// Then register it. | ||
const hljsModule = require(`highlight.js/lib/languages/${lang}`); | ||
hljs.registerLanguage(lang, hljsModule); | ||
// And add its aliases. | ||
const aliases = hljsModule(hljs)["aliases"]; | ||
// Then add its aliases. | ||
const aliases = hljs.getLanguage(lang)["aliases"]; | ||
if (aliases != null) { | ||
@@ -977,4 +1046,5 @@ for (const alias of aliases) { | ||
* @param {Object} [hlOpts] Highlight options. | ||
* @return {Promise<null>} Importing hljs is async. | ||
*/ | ||
const resolveCodeBlocks = (node, hlOpts = {}) => { | ||
const resolveCodeBlocks = async (node, hlOpts = {}) => { | ||
// Enable hljs prefix and gutter by default. | ||
@@ -988,3 +1058,5 @@ hlOpts = Object.assign({"hljs": true, "gutter": true}, hlOpts); | ||
if (hljs == null) { | ||
hljs = require("highlight.js"); | ||
// import is a keyword, not a function. | ||
const module = await import("highlight.js"); | ||
hljs = module["default"]; | ||
} | ||
@@ -1093,3 +1165,3 @@ hljs.configure(hlOpts); | ||
const getVersion = () => { | ||
return pkg["version"]; | ||
return pkgJSON["version"]; | ||
}; | ||
@@ -1118,3 +1190,9 @@ | ||
module.exports = { | ||
export { | ||
hikaruDir, | ||
loadJSON, | ||
loadJSONSync, | ||
loadYAML, | ||
loadYAMLSync, | ||
pkgJSON, | ||
isString, | ||
@@ -1128,2 +1206,3 @@ isArray, | ||
isBinaryFileSync, | ||
isReadableSync, | ||
escapeHTML, | ||
@@ -1130,0 +1209,0 @@ matchFiles, |
@@ -1,3 +0,1 @@ | ||
"use strict"; | ||
/** | ||
@@ -7,9 +5,9 @@ * @module watcher | ||
const path = require("path"); | ||
import * as path from "node:path"; | ||
const isGlob = require("is-glob"); | ||
const chokidar = require("chokidar"); | ||
const picomatch = require("picomatch"); | ||
import isGlob from "is-glob"; | ||
import chokidar from "chokidar"; | ||
import picomatch from "picomatch"; | ||
const {isArray, isString} = require("./utils"); | ||
import {isArray, isString} from "./utils.js"; | ||
@@ -269,2 +267,2 @@ /** | ||
module.exports = Watcher; | ||
export default Watcher; |
{ | ||
"name": "hikarujs", | ||
"version": "1.13.4", | ||
"version": "1.14.0", | ||
"description": "A static site generator that generates routes based on directories naturally.", | ||
"main": "hikaru/index.js", | ||
"type": "module", | ||
"bin": { | ||
"hikaru": "bin/hikaru" | ||
"hikaru": "bin/hikaru.js" | ||
}, | ||
"scripts": { | ||
"build:docs": "jsdoc -c .jsdoc.json -R README.md -u tutorials/ && echo \"hikaru.alynx.one\" > docs/CNAME && touch docs/.nojekyll && git add -A docs/ && git commit -m \"Updated docs.\"", | ||
"pref": "0x -o bin/hikaru build", | ||
"test": "standardx bin/* hikaru/*.js tests/*.js && mocha tests/index.js", | ||
"pref": "0x -o bin/hikaru.js build", | ||
"test": "standardx bin/*.js hikaru/*.js tests/*.js && mocha tests/index.js", | ||
"preversion": "npm test", | ||
@@ -33,23 +33,23 @@ "version": "npm run build:docs" | ||
"chokidar": "^3.5.3", | ||
"commander": "^9.0.0", | ||
"fs-extra": "^10.0.0", | ||
"highlight.js": "^11.5.0", | ||
"commander": "^9.4.1", | ||
"fs-extra": "^11.1.0", | ||
"highlight.js": "^11.7.0", | ||
"is-glob": "^4.0.3", | ||
"isbinaryfile": "^4.0.8", | ||
"marked": "^4.0.12", | ||
"isbinaryfile": "^5.0.0", | ||
"marked": "^4.2.4", | ||
"nunjucks": "^3.2.3", | ||
"parse5": "^6.0.1", | ||
"parse5": "^7.1.2", | ||
"picomatch": "^2.3.1", | ||
"yaml": "^1.10.2" | ||
"yaml": "^2.1.3" | ||
}, | ||
"devDependencies": { | ||
"0x": "^5.1.2", | ||
"chai": "^4.3.6", | ||
"jsdoc": "^3.6.10", | ||
"mocha": "^9.2.0", | ||
"0x": "^5.4.1", | ||
"chai": "^4.3.7", | ||
"jsdoc": "^4.0.0", | ||
"mocha": "^10.2.0", | ||
"standardx": "^7.0.0" | ||
}, | ||
"engines": { | ||
"node": ">=12.20.0" | ||
"node": ">=14.16.0" | ||
} | ||
} |
@@ -14,12 +14,10 @@ Hikaru | ||
Hikaru is a command line program (not a module) and you can install it from NPM: | ||
Hikaru is a command line program (not a module) and you can install it from NPM, since Hikaru v1.14.0, it is recommended to install it locally in your site dir and run it with `npx`: | ||
``` | ||
# npm i -g hikarujs | ||
$ npm i -s hikarujs && npx hikaru i | ||
``` | ||
If you are an Arch Linux user, you can also install package `hikarujs` from [AUR](https://aur.archlinux.org/packages/hikarujs/). | ||
**Hikaru works on Node.js v14.16.0 LTS or later.** | ||
**Hikaru works on Node.js v12.20.0 LTS or later.** | ||
# Setup site | ||
@@ -29,3 +27,4 @@ | ||
$ mkdir hikaru-site && cd hikaru-site | ||
$ hikaru init && npm install | ||
$ npm install --save hikarujs hikaru-generator-feed hikaru-generator-sitemap hikaru-generator-search | ||
$ npx hikaru init --debug | ||
``` | ||
@@ -49,3 +48,3 @@ | ||
## Edit site config | ||
## Edit config | ||
@@ -56,3 +55,3 @@ ``` | ||
Set `themeDir` to `themes/aria` | ||
Set `themeDir` to `themes/aria`: | ||
@@ -63,4 +62,9 @@ ```yaml | ||
**Don't forget to copy your theme config to site's dir and edit it as its README file.** | ||
Copy theme config to site dir and edit it: | ||
``` | ||
$ cp themes/aria/theme-config.yaml theme-config.yaml | ||
$ $EDITOR site-config.yaml | ||
``` | ||
# Create src file | ||
@@ -97,3 +101,3 @@ | ||
``` | ||
$ hikaru serve | ||
$ npx hikaru serve --debug | ||
``` | ||
@@ -104,3 +108,3 @@ | ||
``` | ||
$ hikaru build | ||
$ npx hikaru build --debug | ||
``` | ||
@@ -142,2 +146,6 @@ | ||
## Common JS or ES Module | ||
I personally like Common JS, but more and more libraries uses ES module, so use ES module. | ||
# More | ||
@@ -156,10 +164,1 @@ | ||
[Apache-2.0](LICENSE) | ||
# For Stylus and nib | ||
Since Hikaru v1.2.11, nib was dropped because [its the only dependency has NodeJS 14 warnings](https://github.com/stylus/nib/issues/347), and many nib features can be done in newer CSS. So it was splitted into a single plugin. | ||
Since Hikaru v1.10.0, Stylus was also dropped, now Hikaru does not contain any CSS preprocessors. | ||
If you need Stylus and nib for your theme, please install [hikaru-renderer-stylus-nib](https://github.com/AlynxZhou/hikaru-renderer-stylus-nib/) to your site. | ||
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
127041
3576
0
Yes
19
156
+ Addedentities@4.5.0(transitive)
+ Addedfs-extra@11.3.0(transitive)
+ Addedisbinaryfile@5.0.4(transitive)
+ Addedparse5@7.2.1(transitive)
+ Addedyaml@2.7.0(transitive)
- Removedfs-extra@10.1.0(transitive)
- Removedisbinaryfile@4.0.10(transitive)
- Removedparse5@6.0.1(transitive)
- Removedyaml@1.10.2(transitive)
Updatedcommander@^9.4.1
Updatedfs-extra@^11.1.0
Updatedhighlight.js@^11.7.0
Updatedisbinaryfile@^5.0.0
Updatedmarked@^4.2.4
Updatedparse5@^7.1.2
Updatedyaml@^2.1.3