@lbu/insight
Advanced tools
Comparing version 0.0.9 to 0.0.10
@@ -5,6 +5,10 @@ import { newLogger } from "./src/logger.js"; | ||
export { newLogger } from "./src/logger.js"; | ||
export { addProcessor, parseExec } from "./src/parser.js"; | ||
export { newLogParserContext, executeLogParser } from "./src/parser.js"; | ||
/** | ||
* Standard log instance | ||
* @type {Logger} | ||
*/ | ||
export const log = newLogger({ | ||
depth: 4, | ||
}); |
{ | ||
"name": "@lbu/insight", | ||
"version": "0.0.9", | ||
"version": "0.0.10", | ||
"description": "Simple logger in NDJSON format", | ||
"main": "index.js", | ||
"main": "./index.js", | ||
"exports": "./index.js", | ||
"type": "module", | ||
@@ -29,5 +30,5 @@ "keywords": [ | ||
"engines": { | ||
"node": ">=12" | ||
"node": ">=14" | ||
}, | ||
"gitHead": "d847630e049071c7c2385eef8377ba976ddd0e2a" | ||
"gitHead": "35e227dd5217c7eda76fcc69adaabb19207e3008" | ||
} |
@@ -21,3 +21,3 @@ # @lbu/insight | ||
- Various utilities like loading .env files, executing other processes and a | ||
basic template system | ||
basic string templating system | ||
@@ -24,0 +24,0 @@ ## Docs |
import { writeNDJSON, writePretty } from "./writer.js"; | ||
const defaultOptions = { | ||
isProduction: () => process.env.NODE_ENV === "production", | ||
stream: () => process.stdout, | ||
ctx: () => {}, | ||
depth: () => 3, | ||
}; | ||
/** | ||
* @typedef {object} LoggerOptions | ||
* @property {boolean} [pretty=false] | ||
* @property {number} [depth=3] | ||
* @property {WriteStream} [stream=process.stdout] | ||
* @property {object} [ctx={}] | ||
*/ | ||
/** | ||
* @typedef {function(args: ...*): undefined} LogFn | ||
*/ | ||
/** | ||
* @typedef {object} Logger | ||
* @property {function(opts: LoggerOptions): Logger} derive | ||
* @property {function(): boolean} isProduction | ||
* @property {LogFn} info | ||
* @property {LogFn} error | ||
*/ | ||
/** | ||
* Create a new logger | ||
* @param {Object} [options] | ||
* @param {boolean} [options.isProduction] | ||
* @param {NodeJS.WritableStream} [options.stream=process.stdout] | ||
* @param {Object} [options.ctx] | ||
* @param {number} [options.depth] | ||
* @param {LoggerOptions} [options] | ||
* @return {Logger} | ||
*/ | ||
export function newLogger(options = {}) { | ||
const stream = options.stream || defaultOptions.stream(); | ||
const isProduction = | ||
typeof options.isProduction === "boolean" | ||
? options.isProduction | ||
: defaultOptions.isProduction(); | ||
let ctx = options.ctx || defaultOptions.ctx(); | ||
let depth = options.depth || defaultOptions.depth(); | ||
export function newLogger(options) { | ||
let _internal = Object.assign( | ||
{ | ||
pretty: process.env.NODE_ENV !== "production", | ||
depth: 3, | ||
stream: process.stdout, | ||
ctx: {}, | ||
}, | ||
options, | ||
); | ||
return new Logger(stream, isProduction, ctx, depth); | ||
return { | ||
isProduction: () => !_internal.pretty, | ||
info: logger.bind( | ||
undefined, | ||
_internal.pretty, | ||
_internal.stream, | ||
_internal.depth, | ||
_internal.ctx, | ||
"info", | ||
), | ||
error: logger.bind( | ||
undefined, | ||
_internal.pretty, | ||
_internal.stream, | ||
_internal.depth, | ||
_internal.ctx, | ||
"error", | ||
), | ||
derive: (opts) => { | ||
return newLogger({ ..._internal, ...opts }); | ||
}, | ||
}; | ||
} | ||
class Logger { | ||
/** | ||
* @param {NodeJS.WritableStream} stream | ||
* @param {boolean} isProduction | ||
* @param {object} ctx | ||
* @param {number} depth | ||
*/ | ||
constructor(stream, isProduction, ctx, depth) { | ||
this.stream = stream; | ||
this.isProd = isProduction; | ||
this.ctx = ctx; | ||
this.depth = depth; | ||
} | ||
/** | ||
* @public | ||
* @param args | ||
*/ | ||
info(...args) { | ||
logger(this.isProd, this.stream, this.depth, this.ctx, "info", ...args); | ||
} | ||
/** | ||
* @public | ||
* @param args | ||
*/ | ||
error(...args) { | ||
logger(this.isProd, this.stream, this.depth, this.ctx, "error", ...args); | ||
} | ||
/** | ||
* @public | ||
* @param {number} depth | ||
*/ | ||
setDepth(depth) { | ||
this.depth = depth; | ||
} | ||
/** | ||
* @public | ||
* @return {boolean} | ||
*/ | ||
isProduction() { | ||
return this.isProd; | ||
} | ||
/** | ||
* @public | ||
* @return {object} | ||
*/ | ||
getCtx() { | ||
return this.ctx; | ||
} | ||
/** | ||
* @public | ||
* @param {object} ctx | ||
*/ | ||
setCtx(ctx) { | ||
this.ctx = ctx; | ||
} | ||
} | ||
function logger(isProduction, stream, depth, ctx, level, ...args) { | ||
function logger(pretty, stream, depth, ctx, level, ...args) { | ||
const metaData = { | ||
@@ -101,4 +70,3 @@ ...ctx, | ||
}; | ||
if (isProduction) { | ||
if (!pretty) { | ||
writeNDJSON(stream, depth, metaData); | ||
@@ -105,0 +73,0 @@ } else { |
@@ -6,7 +6,7 @@ const sizes = ["Bytes", "KiB", "MiB", "GiB", "TiB", "PiB"]; | ||
* Support up to a pebibyte | ||
* @param {number} bytes | ||
* @param {number} [bytes] | ||
* @returns {string} | ||
*/ | ||
export function bytesToHumanReadable(bytes) { | ||
if (bytes === 0) { | ||
if (bytes === 0 || bytes === undefined) { | ||
return "0 Byte"; | ||
@@ -36,3 +36,9 @@ } | ||
export function printProcessMemoryUsage(logger) { | ||
const { external, heapTotal, heapUsed, rss } = process.memoryUsage(); | ||
const { | ||
external, | ||
heapTotal, | ||
heapUsed, | ||
rss, | ||
arrayBuffers, | ||
} = process.memoryUsage(); | ||
if (logger.isProduction()) { | ||
@@ -44,2 +50,3 @@ logger.info({ | ||
external, | ||
arrayBuffers, | ||
}); | ||
@@ -52,4 +59,5 @@ } else { | ||
external: bytesToHumanReadable(external), | ||
arrayBuffers: bytesToHumanReadable(arrayBuffers), | ||
}); | ||
} | ||
} |
@@ -5,76 +5,75 @@ import pump from "pump"; | ||
const jsonProcessors = new Set(); | ||
const textProcessors = new Set(); | ||
/** | ||
* @typedef {object} LogParserContext | ||
* @property {(function(data: object): undefined)} [jsonProcessor] | ||
* @property {(function(data: string): undefined)} [textProcessor] | ||
* @property {ReadStream} stream | ||
*/ | ||
/** | ||
* Add new processors | ||
* @param {"JSON"|"TEXT"} type | ||
* @param {function} processor | ||
* Create a new parser context | ||
* @param {ReadStream} stream | ||
* @return {LogParserContext} | ||
*/ | ||
export function addProcessor(type, processor) { | ||
if (type === "JSON") { | ||
jsonProcessors.add(processor); | ||
} else { | ||
textProcessors.add(processor); | ||
} | ||
export function newLogParserContext(stream) { | ||
return { | ||
jsonProcessor: undefined, | ||
textProcessor: undefined, | ||
stream, | ||
}; | ||
} | ||
/** | ||
* Run parser on inStream | ||
* Returns a stream with original contents of inStream | ||
* Note that this is mostly useful with production logs | ||
* @param inStream | ||
* @return {void|*} | ||
* Run the parser, splits the in stream onn lines and call either the jsonProcessor or | ||
* textProcessor with the value. The original value is written to the returned stream | ||
* @param {LogParserContext} lpc | ||
* @return {ReadStream} | ||
*/ | ||
export function parseExec(inStream = process.stdin) { | ||
export function executeLogParser(lpc) { | ||
const transport = new Transform({ | ||
transform(chunk, enc, cb) { | ||
const line = processLine(chunk); | ||
if (line === undefined) { | ||
return cb(); | ||
if (chunk !== null && chunk !== undefined && chunk.length !== 0) { | ||
const str = chunk.toString(); | ||
if (str.length > 0) { | ||
callProcessor(lpc, str); | ||
} | ||
cb(null, str + "\n"); | ||
} else { | ||
cb(); | ||
} | ||
cb(null, line); | ||
}, | ||
}); | ||
return pump(inStream, split(), transport); | ||
return pump(lpc.stream, split(), transport); | ||
} | ||
function processJson(obj) { | ||
for (const p of jsonProcessors) { | ||
p(obj); | ||
} | ||
} | ||
/** | ||
* Internal try to parse as json and execute jsonProcessor, else execute textProcessor | ||
* @param {LogParserContext} lpc | ||
* @param {string} line | ||
*/ | ||
function callProcessor(lpc, line) { | ||
let obj = undefined; | ||
function processText(txt) { | ||
for (const p of textProcessors) { | ||
p(txt); | ||
} | ||
} | ||
function processLine(line) { | ||
const l = line.toString(); | ||
let j = undefined; | ||
if (!l) { | ||
return; | ||
} | ||
try { | ||
j = JSON.parse(l); | ||
obj = JSON.parse(line); | ||
} catch { | ||
processText(l); | ||
return l + "\n"; | ||
if (lpc.textProcessor) { | ||
lpc.textProcessor(line); | ||
} | ||
return; | ||
} | ||
if ( | ||
j === undefined || | ||
Object.prototype.toString.call(j) !== "[object Object]" | ||
obj === undefined || | ||
Object.prototype.toString.call(obj) !== "[object Object]" | ||
) { | ||
processText(l); | ||
return l + "\n"; | ||
if (lpc.textProcessor) { | ||
lpc.textProcessor(line); | ||
} | ||
} else { | ||
if (lpc.jsonProcessor) { | ||
lpc.jsonProcessor(obj); | ||
} | ||
} | ||
processJson(j); | ||
return l + "\n"; | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
11162
332