va-log-parser
Advanced tools
Comparing version 0.0.3 to 0.0.4
@@ -1,118 +0,35 @@ | ||
import moment from "moment"; | ||
import fs from "fs"; | ||
import linebyline from "linebyline"; | ||
import ora from "ora"; | ||
import chalk from "chalk"; | ||
import cliProgress from "cli-progress"; | ||
import sortLog from "./sortLog.js"; | ||
const spinner = ora(); | ||
// create a new progress bar instance and use shades_classic theme | ||
const bar1 = new cliProgress.SingleBar({ | ||
format: "{duration}sec | {bar} {percentage}% | {value}/{total} Bytes", | ||
barCompleteChar: "\u2588", | ||
barIncompleteChar: "\u2591", | ||
hideCursor: true, | ||
stopOnComplete: true, | ||
}); | ||
function importLogFile(LogFile) { | ||
const allFileContents = fs.readFileSync(LogFile, "utf-8"); | ||
return allFileContents; | ||
} | ||
function getLogLines(LogFile) { | ||
const fileLines = LogFile.split(/\r?\n/); | ||
return fileLines; | ||
} | ||
function saveParsedFile(filePath, data) { | ||
const outputString = JSON.stringify(data); | ||
fs.writeFileSync(filePath, outputString); | ||
} | ||
function getDateRange(timeStamps) { | ||
const sortedTimeStamps = timeStamps.sort((a, b) => b - a); | ||
const dateRange = `${moment(sortedTimeStamps[1]).format( | ||
"YYYY-MM-DD[T]HHmmss" | ||
)}_${moment(sortedTimeStamps[sortedTimeStamps.length - 1]).format( | ||
"YYYY-MM-DD[T]HHmmss" | ||
)}`; | ||
return dateRange; | ||
} | ||
function parseLoggers(uniqueLoggers, jsonArray, errors, dateRange, org) { | ||
for (const logger_name of uniqueLoggers) { | ||
let loggerArray = jsonArray.filter( | ||
(Obj) => Obj.logger_name === logger_name | ||
); | ||
let loggerErrorArray = errors.filter( | ||
(Obj) => Obj.logger_name === logger_name | ||
); | ||
let loggerFolder = `./${org}/${dateRange}/${logger_name.replaceAll( | ||
".", | ||
"-" | ||
)}`; | ||
if (!fs.existsSync(loggerFolder)) { | ||
fs.mkdirSync(loggerFolder, { recursive: true }); | ||
} | ||
const parsedLoggerPath = `${loggerFolder}/${logger_name.replaceAll( | ||
".", | ||
"-" | ||
)}-${dateRange}.json`; | ||
const parsedLoggerErrorsPath = `${loggerFolder}/${logger_name.replaceAll( | ||
".", | ||
"-" | ||
)}-${dateRange}-Errors.json`; | ||
saveParsedFile(parsedLoggerPath, loggerArray); | ||
if (loggerErrorArray.length > 0) { | ||
saveParsedFile(parsedLoggerErrorsPath, loggerErrorArray); | ||
console.log(chalk.yellow(`Errors found for Logger: ${logger_name}`)); | ||
} | ||
} | ||
} | ||
export default function parseLogFile(LogFile) { | ||
spinner.start(`Importing Log File: ${LogFile}`); | ||
const wholeFile = importLogFile(LogFile); | ||
spinner.succeed(); | ||
spinner.start(`Slicing Log File: ${LogFile}`); | ||
const fileLines = getLogLines(wholeFile); | ||
spinner.succeed(); | ||
let jsonArray = []; | ||
let errors = []; | ||
let loggers = []; | ||
let org = null; | ||
let timeStamps = []; | ||
spinner.start(`Processing Log Lines: ${LogFile}`); | ||
for (const line of fileLines) { | ||
let jsonObjFromLine; | ||
try { | ||
jsonObjFromLine = JSON.parse(line); | ||
} catch { | ||
jsonObjFromLine = { text: line }; | ||
} | ||
jsonArray.push(jsonObjFromLine); | ||
if (line.includes("error")) { | ||
errors.push(jsonObjFromLine); | ||
} | ||
if ( | ||
jsonObjFromLine["logger_name"] !== null && | ||
jsonObjFromLine["logger_name"] !== undefined | ||
) { | ||
loggers.push(jsonObjFromLine.logger_name); | ||
} | ||
if ( | ||
jsonObjFromLine["@timestamp"] !== null && | ||
jsonObjFromLine["@timestamp"] !== undefined | ||
) { | ||
timeStamps.push(new Date(jsonObjFromLine["@timestamp"])); | ||
} | ||
if ( | ||
org == null && | ||
jsonObjFromLine["org"] !== null && | ||
jsonObjFromLine["org"] !== undefined | ||
) { | ||
org = jsonObjFromLine.org; | ||
} | ||
} | ||
spinner.succeed(); | ||
spinner.start(`Getting Unique Loggers: ${LogFile}`); | ||
const uniqueLoggers = Array.from(new Set(loggers)); | ||
spinner.succeed(); | ||
spinner.start(`Getting Date Range: ${LogFile}`); | ||
const dateRange = getDateRange(timeStamps); | ||
spinner.succeed(); | ||
parseLoggers(uniqueLoggers, jsonArray, errors, dateRange, org); | ||
spinner.succeed(`Parsing Log File: ${LogFile}`); | ||
let fileStats = fs.statSync(LogFile); | ||
bar1.start(fileStats.size, 0, { | ||
speed: "N/A", | ||
}); | ||
let lines; | ||
const rl = linebyline(LogFile); | ||
rl.on("line", function (line, lineCount, byteCount) { | ||
bar1.update(byteCount); | ||
sortLog(line); | ||
lines = lineCount; | ||
}); | ||
rl.on("end", function (line, lineCount, byteCount) { | ||
bar1.update(fileStats.size); | ||
spinner.succeed(`Completed ${LogFile} / ${lines} Lines`); | ||
}); | ||
} |
@@ -7,2 +7,3 @@ import inquirer from "inquirer"; | ||
import chalk from "chalk"; | ||
import parseMultipleLogFiles from "./parseMultipleLogFiles.js"; | ||
@@ -36,4 +37,2 @@ function checkDirectory(path) { | ||
choices: [ | ||
new inquirer.Separator(chalk.yellow("------Folders------")), | ||
...directoryContents.filter((Obj) => checkDirectory(Obj) === true), | ||
new inquirer.Separator(chalk.green("------Files------")), | ||
@@ -52,9 +51,8 @@ ...directoryContents.filter((Obj) => checkDirectory(Obj) === false), | ||
.then((answers) => { | ||
for (const logFile of answers.Files) { | ||
try { | ||
parseLogFile(logFile); | ||
} catch {} | ||
if (answers.Files.length > 1) { | ||
parseMultipleLogFiles(answers.Files); | ||
} else { | ||
parseLogFile(answers.Files[0]); | ||
} | ||
spinner.succeed("Finished Processing Log Files"); | ||
}); | ||
} |
{ | ||
"name": "va-log-parser", | ||
"version": "0.0.3", | ||
"version": "0.0.4", | ||
"description": "A Log Parser for SailPoint VA logs", | ||
@@ -24,9 +24,9 @@ "main": "index.js", | ||
"boxen": "^7.0.0", | ||
"chalk": "^5.0.1", | ||
"cli-progress": "^3.11.2", | ||
"commander": "^9.4.0", | ||
"fs": "^0.0.1-security", | ||
"inquirer": "^9.1.0", | ||
"moment": "^2.29.4", | ||
"linebyline": "^1.3.0", | ||
"ora": "^6.1.2" | ||
} | ||
} |
# VA-Log-Parser | ||
Node Module to Parse SailPoint VA logs | ||
> This tool is not developed, maintained or supported by SailPoint. | ||
> It is a community effort to help manage and support IdentityNow. | ||
[GitHub](https://github.com/LukeHagar/VA-Log-Parser "GitHub Repo") | ||
[NPM](https://www.npmjs.com/package/va-log-parser "npmjs Page") | ||
Node Module to Parse SailPoint VA logs |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
7132
10
10
171
6
1
+ Addedcli-progress@^3.11.2
+ Addedlinebyline@^1.3.0
+ Addedcli-progress@3.12.0(transitive)
+ Addedlinebyline@1.3.0(transitive)
- Removedchalk@^5.0.1
- Removedmoment@^2.29.4
- Removedmoment@2.30.1(transitive)