Fully Functional CCG Log Parser :)

This commit is contained in:
luke-hagar-sp
2022-08-10 11:47:23 -05:00
parent 3319beb050
commit 050cef9a06
4 changed files with 138 additions and 36 deletions

View File

@@ -1,21 +1,18 @@
import chalk from "chalk";
import moment from "moment";
import fs from "fs";
import boxen from "boxen";
import ora from "ora";
import chalk from "chalk";
const spinner = ora();
function importLogFile(LogFile) {
const allFileContents = fs.readFileSync(LogFile, "utf-8");
const fileLines = allFileContents.split(/\r?\n/);
let jsonArray = [];
console.log(fileLines);
for (const line of fileLines) {
try {
let jsonObjFromLine = JSON.parse(line);
jsonArray.push(jsonObjFromLine);
} catch {
console.log("Invalid JSON Found");
return allFileContents;
}
}
return jsonArray;
function getLogLines(LogFile) {
const fileLines = LogFile.split(/\r?\n/);
return fileLines;
}
function checkDirectory(path) {
@@ -24,23 +21,108 @@ function checkDirectory(path) {
}
function saveParsedFile(filePath, data) {
console.log(`Saving file ${filePath}`);
const outputString = JSON.stringify(data);
fs.writeFileSync(filePath, outputString);
}
export default function parseLogFile(LogFile) {
console.log(checkDirectory(LogFile));
const jsonArray = importLogFile(LogFile);
const errors = jsonArray.filter(
(Obj) =>
Obj.message?.includes("error") ||
Obj.exception?.stacktrace?.includes("error")
);
// console.log(jsonArray);
// console.log(errors);
const parsedDataPath = `${LogFile.replace(".log", "-log")}-Parsed.JSON`;
const errorPath = `${LogFile.replace(".log", "-log")}-Parsed-Errors.JSON`;
saveParsedFile(parsedDataPath, jsonArray);
saveParsedFile(errorPath, errors);
function getDateRange(timeStamps) {
const sortedTimeStamps = timeStamps.sort((a, b) => b - a);
console.log(sortedTimeStamps[1]);
console.log(sortedTimeStamps[sortedTimeStamps.length - 1]);
const dateRange = `${moment(sortedTimeStamps[1]).format(
"YYYY-MM-DD[T]HHmmss"
)}_${moment(sortedTimeStamps[sortedTimeStamps.length - 1]).format(
"YYYY-MM-DD[T]HHmmss"
)}`;
console.log(dateRange);
return dateRange;
}
function parseLoggers(uniqueLoggers, jsonArray, errors, dateRange, org) {
for (const logger_name of uniqueLoggers) {
let loggerArray = jsonArray.filter(
(Obj) => Obj.logger_name === logger_name
);
let loggerErrorArray = errors.filter(
(Obj) => Obj.logger_name === logger_name
);
let loggerFolder = `./${org}/${dateRange}/${logger_name.replaceAll(
".",
"-"
)}`;
if (!fs.existsSync(loggerFolder)) {
fs.mkdirSync(loggerFolder, { recursive: true });
}
const parsedLoggerPath = `${loggerFolder}/${logger_name.replaceAll(
".",
"-"
)}-${dateRange}.json`;
const parsedLoggerErrorsPath = `${loggerFolder}/${logger_name.replaceAll(
".",
"-"
)}-${dateRange}-Errors.json`;
saveParsedFile(parsedLoggerPath, loggerArray);
if (loggerErrorArray.length > 0) {
saveParsedFile(parsedLoggerErrorsPath, loggerErrorArray);
console.log(
chalk.bgGreenBright(`Errors found for Logger: ${logger_name}`)
);
}
}
}
export default function parseLogFile(LogFile) {
spinner.start(`Importing Log File: ${LogFile}`);
const wholeFile = importLogFile(LogFile);
spinner.succeed();
spinner.start(`Slicing Log File: ${LogFile}`);
const fileLines = getLogLines(wholeFile);
spinner.succeed();
let jsonArray = [];
let errors = [];
let loggers = [];
let org = null;
let timeStamps = [];
spinner.start(`Processing Log Lines: ${LogFile}`);
for (const line of fileLines) {
let jsonObjFromLine;
try {
jsonObjFromLine = JSON.parse(line);
} catch {
jsonObjFromLine = { text: line };
}
jsonArray.push(jsonObjFromLine);
if (line.includes("error")) {
errors.push(jsonObjFromLine);
}
if (
jsonObjFromLine["logger_name"] !== null &&
jsonObjFromLine["logger_name"] !== undefined
) {
loggers.push(jsonObjFromLine.logger_name);
}
if (
jsonObjFromLine["@timestamp"] !== null &&
jsonObjFromLine["@timestamp"] !== undefined
) {
timeStamps.push(new Date(jsonObjFromLine["@timestamp"]));
}
if (
org == null &&
jsonObjFromLine["org"] !== null &&
jsonObjFromLine["org"] !== undefined
) {
org = jsonObjFromLine.org;
}
}
spinner.succeed();
spinner.start(`Getting Unique Loggers: ${LogFile}`);
const uniqueLoggers = Array.from(new Set(loggers));
spinner.succeed();
spinner.start(`Getting Date Range: ${LogFile}`);
const dateRange = getDateRange(timeStamps);
spinner.succeed();
parseLoggers(uniqueLoggers, jsonArray, errors, dateRange, org);
}

View File

@@ -2,8 +2,10 @@ import inquirer from "inquirer";
import boxen from "boxen";
import ora from "ora";
import fs from "fs";
import parseLogFile from "./parseLogFile.js";
export default function startCLI() {
const spinner = ora();
console.log(
boxen(`VA-Log-Parser`, {
title: `Written by: Luke Hagar`,
@@ -11,8 +13,10 @@ export default function startCLI() {
padding: 5,
})
);
spinner.start(`Getting Current Directory`);
const currentDirectory = process.cwd();
const spinner = ora(`Checking Path: ${currentDirectory}`).start();
spinner.succeed();
spinner.start(`Reading Directory: ${currentDirectory}`);
let directoryContents = fs.readdirSync(currentDirectory);
spinner.succeed();
inquirer
@@ -32,10 +36,11 @@ export default function startCLI() {
},
])
.then((answers) => {
console.log(JSON.stringify(answers, null, " "));
});
// spinner.start("Processing File 1");
// spinner.succeed();
// spinner.start("Processing File 2");
// spinner.fail();
for (const logFile of answers.Files) {
try {
parseLogFile(logFile);
} catch {}
}
spinner.succeed("Finished Processing Log Files");
});
}

14
package-lock.json generated
View File

@@ -14,6 +14,7 @@
"commander": "^9.4.0",
"fs": "^0.0.1-security",
"inquirer": "^9.1.0",
"moment": "^2.29.4",
"ora": "^6.1.2"
},
"bin": {
@@ -452,6 +453,14 @@
"node": ">=6"
}
},
"node_modules/moment": {
"version": "2.29.4",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz",
"integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==",
"engines": {
"node": "*"
}
},
"node_modules/mute-stream": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz",
@@ -960,6 +969,11 @@
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
"integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="
},
"moment": {
"version": "2.29.4",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz",
"integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w=="
},
"mute-stream": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "va-log-parser",
"version": "0.0.2",
"version": "0.0.1",
"description": "A Log Parser for SailPoint VA logs",
"main": "index.js",
"type": "module",
@@ -18,6 +18,7 @@
"commander": "^9.4.0",
"fs": "^0.0.1-security",
"inquirer": "^9.1.0",
"moment": "^2.29.4",
"ora": "^6.1.2"
}
}