diff --git a/.eslintrc.json b/.eslintrc.json index d2bb05f..df2e200 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,10 +1,6 @@ { - "plugins": [ - "@typescript-eslint" - ], - "extends": [ - "plugin:github/recommended" - ], + "plugins": ["@typescript-eslint"], + "extends": ["plugin:github/recommended"], "parser": "@typescript-eslint/parser", "parserOptions": { "ecmaVersion": 9, @@ -36,10 +32,7 @@ "allowExpressions": true } ], - "@typescript-eslint/func-call-spacing": [ - "error", - "never" - ], + "@typescript-eslint/func-call-spacing": ["error", "never"], "@typescript-eslint/no-array-constructor": "error", "@typescript-eslint/no-explicit-any": "error", "@typescript-eslint/no-extraneous-class": "error", @@ -59,11 +52,7 @@ "@typescript-eslint/promise-function-async": "error", "@typescript-eslint/require-array-sort-compare": "error", "@typescript-eslint/restrict-plus-operands": "error", - "semi": "off", - "@typescript-eslint/semi": [ - "error", - "never" - ], + "@typescript-eslint/semi": ["error", "never"], "@typescript-eslint/type-annotation-spacing": "error", "@typescript-eslint/unbound-method": "error" }, diff --git a/.github/workflows/self-test.yml b/.github/workflows/self-test.yml new file mode 100644 index 0000000..b435258 --- /dev/null +++ b/.github/workflows/self-test.yml @@ -0,0 +1,48 @@ +name: self-test + +on: + pull_request: + paths: + - 'src/**' + - 'action.yml' + - '.github/workflows/self-test.yml' + workflow_dispatch: + +permissions: + pull-requests: write + contents: read + +jobs: + self-test: + name: Test the action against itself + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Build & package + run: | + npm ci + npm run build + npm run package + + # Test the action itself (using the local changes) + - name: Run workflow telemetry + uses: ./ + with: + proc_trace_sys_enable: true + + # This is a simple test that gives the action something to measure + - name: Run some commands to generate metrics + run: | + echo "Creating some disk activity..." + dd if=/dev/zero of=testfile bs=1M count=100 + rm testfile + + echo "Creating some CPU activity..." + for i in {1..10000000}; do :; done diff --git a/.prettierrc.json b/.prettierrc.json index 05d3af9..517471f 100644 --- a/.prettierrc.json +++ b/.prettierrc.json @@ -2,9 +2,8 @@ "printWidth": 80, "tabWidth": 2, "useTabs": false, - "semi": false, - "singleQuote": true, - "trailingComma": "none", + "semi": true, + "trailingComma": "es5", "bracketSpacing": true, "arrowParens": "avoid" } diff --git a/action.yml b/action.yml index cb3c025..98c51a3 100644 --- a/action.yml +++ b/action.yml @@ -28,7 +28,7 @@ inputs: required: false proc_trace_table_show: description: "Enables showing traced processes in trace table. Defaults to 'false'." - default: "false" + default: "true" required: false comment_on_pr: description: "Set to `true` to publish the results as comment to the PR (applicable if workflow run is triggered from PR). Defaults to 'true'." diff --git a/src/config.ts b/src/config.ts new file mode 100644 index 0000000..83618ed --- /dev/null +++ b/src/config.ts @@ -0,0 +1,32 @@ +// Configuration constants +export const UI_CONFIG = { + FONT_FAMILY: "Arial, Helvetica, sans-serif", +}; + +// Chart configuration defaults +export const CHART_DEFAULTS = { + options: { + width: 1000, + height: 500, + xAxis: { + label: "Time", + fontFamily: UI_CONFIG.FONT_FAMILY, + }, + yAxis: { + fontFamily: UI_CONFIG.FONT_FAMILY, + }, + timeTicks: { + unit: "auto", + }, + fontFamily: UI_CONFIG.FONT_FAMILY, + }, +}; + +// Mermaid chart defaults +export const MERMAID_DEFAULTS = { + gantt: { + dateFormat: "x", + axisFormat: "%H:%M:%S", + fontFamily: UI_CONFIG.FONT_FAMILY, + }, +}; diff --git a/src/interfaces/index.ts b/src/interfaces/index.ts index af633e6..9a14883 100644 --- a/src/interfaces/index.ts +++ b/src/interfaces/index.ts @@ -1,113 +1,113 @@ // eslint-disable-next-line import/no-unresolved -import { components } from '@octokit/openapi-types' +import { components } from "@octokit/openapi-types"; -export type WorkflowJobType = components['schemas']['job'] +export type WorkflowJobType = components["schemas"]["job"]; export interface CPUStats { - readonly time: number - readonly totalLoad: number - readonly userLoad: number - readonly systemLoad: number + readonly time: number; + readonly totalLoad: number; + readonly userLoad: number; + readonly systemLoad: number; } export interface MemoryStats { - readonly time: number - readonly totalMemoryMb: number - readonly activeMemoryMb: number - readonly availableMemoryMb: number + readonly time: number; + readonly totalMemoryMb: number; + readonly activeMemoryMb: number; + readonly availableMemoryMb: number; } export interface NetworkStats { - readonly time: number - readonly rxMb: number - readonly txMb: number + readonly time: number; + readonly rxMb: number; + readonly txMb: number; } export interface DiskStats { - readonly time: number - readonly rxMb: number - readonly wxMb: number + readonly time: number; + readonly rxMb: number; + readonly wxMb: number; } export interface DiskSizeStats { - readonly time: number - readonly availableSizeMb: number - readonly usedSizeMb: number + readonly time: number; + readonly availableSizeMb: number; + readonly usedSizeMb: number; } export interface ProcessedStats { - readonly x: number - readonly y: number + readonly x: number; + readonly y: number; } export interface ProcessedCPUStats { - readonly userLoadX: ProcessedStats[] - readonly systemLoadX: ProcessedStats[] + readonly userLoadX: ProcessedStats[]; + readonly systemLoadX: ProcessedStats[]; } export interface ProcessedMemoryStats { - readonly activeMemoryX: ProcessedStats[] - readonly availableMemoryX: ProcessedStats[] + readonly activeMemoryX: ProcessedStats[]; + readonly availableMemoryX: ProcessedStats[]; } export interface ProcessedNetworkStats { - readonly networkReadX: ProcessedStats[] - readonly networkWriteX: ProcessedStats[] + readonly networkReadX: ProcessedStats[]; + readonly networkWriteX: ProcessedStats[]; } export interface ProcessedDiskStats { - readonly diskReadX: ProcessedStats[] - readonly diskWriteX: ProcessedStats[] + readonly diskReadX: ProcessedStats[]; + readonly diskWriteX: ProcessedStats[]; } export interface ProcessedDiskSizeStats { - readonly diskAvailableX: ProcessedStats[] - readonly diskUsedX: ProcessedStats[] + readonly diskAvailableX: ProcessedStats[]; + readonly diskUsedX: ProcessedStats[]; } export interface LineGraphOptions { - readonly label: string - readonly axisColor: string + readonly label: string; + readonly axisColor: string; readonly line: { - readonly label: string - readonly color: string - readonly points: ProcessedStats[] - } + readonly label: string; + readonly color: string; + readonly points: ProcessedStats[]; + }; } export interface StackedArea { - readonly label: string - readonly color: string - readonly points: ProcessedStats[] + readonly label: string; + readonly color: string; + readonly points: ProcessedStats[]; } export interface StackedAreaGraphOptions { - readonly label: string - readonly axisColor: string - readonly areas: StackedArea[] + readonly label: string; + readonly axisColor: string; + readonly areas: StackedArea[]; } export interface GraphResponse { - readonly id: string - readonly url: string + readonly id: string; + readonly url: string; } export interface CompletedCommand { - readonly ts: string - readonly event: string - readonly name: string - readonly uid: number - readonly pid: number - readonly ppid: string - readonly startTime: number - readonly fileName: string - readonly args: string[] - readonly duration: number - readonly exitCode: number - readonly order: number + readonly ts: string; + readonly event: string; + readonly name: string; + readonly uid: number; + readonly pid: number; + readonly ppid: string; + readonly startTime: number; + readonly fileName: string; + readonly args: string[]; + readonly duration: number; + readonly exitCode: number; + readonly order: number; } export interface ProcEventParseOptions { - readonly minDuration: number - readonly traceSystemProcesses: boolean + readonly minDuration: number; + readonly traceSystemProcesses: boolean; } diff --git a/src/logger.ts b/src/logger.ts index bf283a0..0f511e6 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -1,24 +1,24 @@ -import * as core from '@actions/core' +import * as core from "@actions/core"; -const LOG_HEADER: string = '[Workflow Telemetry]' +const LOG_HEADER: string = "[Workflow Telemetry]"; export function isDebugEnabled(): boolean { - return core.isDebug() + return core.isDebug(); } export function debug(msg: string) { - core.debug(LOG_HEADER + ' ' + msg) + core.debug(LOG_HEADER + " " + msg); } export function info(msg: string) { - core.info(LOG_HEADER + ' ' + msg) + core.info(LOG_HEADER + " " + msg); } export function error(msg: string | Error) { - if (msg instanceof String || typeof msg === 'string') { - core.error(LOG_HEADER + ' ' + msg) + if (msg instanceof String || typeof msg === "string") { + core.error(LOG_HEADER + " " + msg); } else { - core.error(LOG_HEADER + ' ' + (msg as Error).name) - core.error(msg as Error) + core.error(LOG_HEADER + " " + (msg as Error).name); + core.error(msg as Error); } } diff --git a/src/main.ts b/src/main.ts index 5946acb..82e5547 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,24 +1,24 @@ -import * as core from '@actions/core' -import * as stepTracer from './stepTracer' -import * as statCollector from './statCollector' -import * as processTracer from './processTracer' -import * as logger from './logger' +import * as core from "@actions/core"; +import * as stepTracer from "./stepTracer"; +import * as statCollector from "./statCollector"; +import * as processTracer from "./processTracer"; +import * as logger from "./logger"; async function run(): Promise { try { - logger.info(`Initializing ...`) + logger.info(`Initializing ...`); // Start step tracer - await stepTracer.start() + await stepTracer.start(); // Start stat collector - await statCollector.start() + await statCollector.start(); // Start process tracer - await processTracer.start() + await processTracer.start(); - logger.info(`Initialization completed`) + logger.info(`Initialization completed`); } catch (error: any) { - logger.error(error.message) + logger.error(error.message); } } -run() +run(); diff --git a/src/post.ts b/src/post.ts index 04bab7c..76879d9 100644 --- a/src/post.ts +++ b/src/post.ts @@ -1,16 +1,16 @@ -import * as core from '@actions/core' -import * as github from '@actions/github' -import { Octokit } from '@octokit/action' -import * as stepTracer from './stepTracer' -import * as statCollector from './statCollector' -import * as processTracer from './processTracer' -import * as logger from './logger' -import { WorkflowJobType } from './interfaces' - -const { pull_request } = github.context.payload -const { workflow, job, repo, runId, sha } = github.context -const PAGE_SIZE = 100 -const octokit: Octokit = new Octokit() +import * as core from "@actions/core"; +import * as github from "@actions/github"; +import { Octokit } from "@octokit/action"; +import * as stepTracer from "./stepTracer"; +import * as statCollector from "./statCollector"; +import * as processTracer from "./processTracer"; +import * as logger from "./logger"; +import { WorkflowJobType } from "./interfaces"; + +const { pull_request } = github.context.payload; +const { workflow, job, repo, runId, sha } = github.context; +const PAGE_SIZE = 100; +const octokit: Octokit = new Octokit(); async function getCurrentJob(): Promise { const _getCurrentJob = async (): Promise => { @@ -20,146 +20,150 @@ async function getCurrentJob(): Promise { repo: repo.repo, run_id: runId, per_page: PAGE_SIZE, - page - }) - const jobs: WorkflowJobType[] = result.data.jobs + page, + }); + const jobs: WorkflowJobType[] = result.data.jobs; // If there are no jobs, stop here if (!jobs || !jobs.length) { - break + break; } const currentJobs = jobs.filter( it => - it.status === 'in_progress' && + it.status === "in_progress" && it.runner_name === process.env.RUNNER_NAME - ) + ); if (currentJobs && currentJobs.length) { - return currentJobs[0] + return currentJobs[0]; } // Since returning job count is less than page size, this means that there are no other jobs. // So no need to make another request for the next page. if (jobs.length < PAGE_SIZE) { - break + break; } } - return null - } + return null; + }; try { for (let i = 0; i < 10; i++) { - const currentJob: WorkflowJobType | null = await _getCurrentJob() + const currentJob: WorkflowJobType | null = await _getCurrentJob(); if (currentJob && currentJob.id) { - return currentJob + return currentJob; } - await new Promise(r => setTimeout(r, 1000)) + await new Promise(r => setTimeout(r, 1000)); } } catch (error: any) { logger.error( `Unable to get current workflow job info. ` + `Please sure that your workflow have "actions:read" permission!` - ) + ); } - return null + return null; } async function reportAll( currentJob: WorkflowJobType, content: string ): Promise { - logger.info(`Reporting all content ...`) + logger.info(`Reporting all content ...`); - logger.debug(`Workflow - Job: ${workflow} - ${job}`) + logger.debug(`Workflow - Job: ${workflow} - ${job}`); - const jobUrl = `https://github.com/${repo.owner}/${repo.repo}/runs/${currentJob.id}?check_suite_focus=true` - logger.debug(`Job url: ${jobUrl}`) + const jobUrl = `https://github.com/${repo.owner}/${repo.repo}/runs/${currentJob.id}?check_suite_focus=true`; + logger.debug(`Job url: ${jobUrl}`); - const title = `## Workflow Telemetry - ${workflow} / ${currentJob.name}` - logger.debug(`Title: ${title}`) + const title = `## Telemetry: ${workflow} / ${currentJob.name}`; + logger.debug(`Title: ${title}`); const commit: string = - (pull_request && pull_request.head && pull_request.head.sha) || sha - logger.debug(`Commit: ${commit}`) + (pull_request && pull_request.head && pull_request.head.sha) || sha; + logger.debug(`Commit: ${commit}`); - const commitUrl = `https://github.com/${repo.owner}/${repo.repo}/commit/${commit}` - logger.debug(`Commit url: ${commitUrl}`) + const commitUrl = `https://github.com/${repo.owner}/${repo.repo}/commit/${commit}`; + logger.debug(`Commit url: ${commitUrl}`); const info = `Workflow telemetry for commit [${commit}](${commitUrl})\n` + - `You can access workflow job details [here](${jobUrl})` + `You can access workflow job details [here](${jobUrl})`; - const postContent: string = [title, info, content].join('\n') + const postContent: string = [title, info, content].join("\n"); - const jobSummary: string = core.getInput('job_summary') - if ('true' === jobSummary) { - core.summary.addRaw(postContent) - await core.summary.write() + const jobSummary: string = core.getInput("job_summary"); + if ("true" === jobSummary) { + core.summary.addRaw(postContent); + await core.summary.write(); } - const commentOnPR: string = core.getInput('comment_on_pr') - if (pull_request && 'true' === commentOnPR) { + const commentOnPR: string = core.getInput("comment_on_pr"); + if (pull_request && "true" === commentOnPR) { if (logger.isDebugEnabled()) { - logger.debug(`Found Pull Request: ${JSON.stringify(pull_request)}`) + logger.debug(`Found Pull Request: ${JSON.stringify(pull_request)}`); } await octokit.rest.issues.createComment({ ...github.context.repo, issue_number: Number(github.context.payload.pull_request?.number), - body: postContent - }) + body: postContent, + }); } else { - logger.debug(`Couldn't find Pull Request`) + logger.debug(`Couldn't find Pull Request`); } - logger.info(`Reporting all content completed`) + logger.info(`Reporting all content completed`); } async function run(): Promise { try { - logger.info(`Finishing ...`) + logger.info(`Finishing ...`); - const currentJob: WorkflowJobType | null = await getCurrentJob() + const currentJob: WorkflowJobType | null = await getCurrentJob(); if (!currentJob) { logger.error( `Couldn't find current job. So action will not report any data.` - ) - return + ); + return; } - logger.debug(`Current job: ${JSON.stringify(currentJob)}`) + logger.debug(`Current job: ${JSON.stringify(currentJob)}`); // Finish step tracer - await stepTracer.finish(currentJob) + await stepTracer.finish(currentJob); // Finish stat collector - await statCollector.finish(currentJob) + await statCollector.finish(currentJob); // Finish process tracer - await processTracer.finish(currentJob) + await processTracer.finish(currentJob); // Report step tracer - const stepTracerContent: string | null = await stepTracer.report(currentJob) + const stepTracerContent: string | null = await stepTracer.report( + currentJob + ); // Report stat collector - const stepCollectorContent: string | null = - await statCollector.report(currentJob) + const stepCollectorContent: string | null = await statCollector.report( + currentJob + ); // Report process tracer - const procTracerContent: string | null = - await processTracer.report(currentJob) + const procTracerContent: string | null = await processTracer.report( + currentJob + ); - let allContent = '' + let allContent = ""; if (stepTracerContent) { - allContent = allContent.concat(stepTracerContent, '\n') + allContent = allContent.concat(stepTracerContent, "\n"); } if (stepCollectorContent) { - allContent = allContent.concat(stepCollectorContent, '\n') + allContent = allContent.concat(stepCollectorContent, "\n"); } if (procTracerContent) { - allContent = allContent.concat(procTracerContent, '\n') + allContent = allContent.concat(procTracerContent, "\n"); } - await reportAll(currentJob, allContent) + await reportAll(currentJob, allContent); - logger.info(`Finish completed`) + logger.info(`Finish completed`); } catch (error: any) { - logger.error(error.message) + logger.error(error.message); } } -run() +run(); diff --git a/src/procTraceParser.ts b/src/procTraceParser.ts index ebc2ae0..afc8895 100644 --- a/src/procTraceParser.ts +++ b/src/procTraceParser.ts @@ -1,110 +1,110 @@ -import * as fs from 'fs' -import * as readline from 'readline' -import * as logger from './logger' -import { CompletedCommand, ProcEventParseOptions } from './interfaces' +import * as fs from "fs"; +import * as readline from "readline"; +import * as logger from "./logger"; +import { CompletedCommand, ProcEventParseOptions } from "./interfaces"; const SYS_PROCS_TO_BE_IGNORED: Set = new Set([ - 'awk', - 'basename', - 'cat', - 'cut', - 'date', - 'echo', - 'envsubst', - 'expr', - 'dirname', - 'grep', - 'head', - 'id', - 'ip', - 'ln', - 'ls', - 'lsblk', - 'mkdir', - 'mktemp', - 'mv', - 'ps', - 'readlink', - 'rm', - 'sed', - 'seq', - 'sh', - 'uname', - 'whoami' -]) + "awk", + "basename", + "cat", + "cut", + "date", + "echo", + "envsubst", + "expr", + "dirname", + "grep", + "head", + "id", + "ip", + "ln", + "ls", + "lsblk", + "mkdir", + "mktemp", + "mv", + "ps", + "readlink", + "rm", + "sed", + "seq", + "sh", + "uname", + "whoami", +]); export async function parse( filePath: string, procEventParseOptions: ProcEventParseOptions ): Promise { const minDuration: number = - (procEventParseOptions && procEventParseOptions.minDuration) || -1 + (procEventParseOptions && procEventParseOptions.minDuration) || -1; const traceSystemProcesses: boolean = (procEventParseOptions && procEventParseOptions.traceSystemProcesses) || - false + false; - const fileStream: fs.ReadStream = fs.createReadStream(filePath) + const fileStream: fs.ReadStream = fs.createReadStream(filePath); const rl: readline.Interface = readline.createInterface({ input: fileStream, - crlfDelay: Infinity - }) + crlfDelay: Infinity, + }); // Note: we use the crlfDelay option to recognize all instances of CR LF // ('\r\n') in input file as a single line break. - const activeCommands: Map = new Map() - const replacedCommands: Map = new Map() - const completedCommands: CompletedCommand[] = [] - let commandOrder: number = 0 + const activeCommands: Map = new Map(); + const replacedCommands: Map = new Map(); + const completedCommands: CompletedCommand[] = []; + let commandOrder: number = 0; for await (let line of rl) { - line = line.trim() + line = line.trim(); if (!line || !line.length) { - continue + continue; } try { if (logger.isDebugEnabled()) { - logger.debug(`Parsing trace process event: ${line}`) + logger.debug(`Parsing trace process event: ${line}`); } - const event = JSON.parse(line) - event.order = ++commandOrder + const event = JSON.parse(line); + event.order = ++commandOrder; if (!traceSystemProcesses && SYS_PROCS_TO_BE_IGNORED.has(event.name)) { - continue + continue; } - if ('EXEC' === event.event) { - const existingCommand: any = activeCommands.get(event.pid) - activeCommands.set(event.pid, event) + if ("EXEC" === event.event) { + const existingCommand: any = activeCommands.get(event.pid); + activeCommands.set(event.pid, event); if (existingCommand) { - replacedCommands.set(event.pid, existingCommand) + replacedCommands.set(event.pid, existingCommand); } - } else if ('EXIT' === event.event) { - let activeCommandCompleted: boolean = false - let replacedCommandCompleted: boolean = false + } else if ("EXIT" === event.event) { + let activeCommandCompleted: boolean = false; + let replacedCommandCompleted: boolean = false; // Process active command - const activeCommand: any = activeCommands.get(event.pid) - activeCommands.delete(event.pid) + const activeCommand: any = activeCommands.get(event.pid); + activeCommands.delete(event.pid); if (activeCommand) { for (let key of Object.keys(event)) { if (!activeCommand.hasOwnProperty(key)) { - activeCommand[key] = event[key] + activeCommand[key] = event[key]; } } - activeCommandCompleted = true + activeCommandCompleted = true; } // Process replaced command if there is - const replacedCommand: any = replacedCommands.get(event.pid) - replacedCommands.delete(event.pid) + const replacedCommand: any = replacedCommands.get(event.pid); + replacedCommands.delete(event.pid); if (replacedCommand && activeCommandCompleted) { for (let key of Object.keys(event)) { if (!replacedCommand.hasOwnProperty(key)) { - replacedCommand[key] = event[key] + replacedCommand[key] = event[key]; } } const finishTime: number = - activeCommand.startTime + activeCommand.duration - replacedCommand.duration = finishTime - replacedCommand.startTime - replacedCommandCompleted = true + activeCommand.startTime + activeCommand.duration; + replacedCommand.duration = finishTime - replacedCommand.startTime; + replacedCommandCompleted = true; } // Complete the replaced command first if there is @@ -112,30 +112,30 @@ export async function parse( replacedCommandCompleted && replacedCommand.duration > minDuration ) { - completedCommands.push(replacedCommand) + completedCommands.push(replacedCommand); } // Then complete the actual command if (activeCommandCompleted && activeCommand.duration > minDuration) { - completedCommands.push(activeCommand) + completedCommands.push(activeCommand); } } else { if (logger.isDebugEnabled()) { - logger.debug(`Unknown trace process event: ${line}`) + logger.debug(`Unknown trace process event: ${line}`); } } } catch (error: any) { - logger.debug(`Unable to parse process trace event (${error}): ${line}`) + logger.debug(`Unable to parse process trace event (${error}): ${line}`); } } completedCommands.sort((a: CompletedCommand, b: CompletedCommand) => { - return a.startTime - b.startTime - }) + return a.startTime - b.startTime; + }); if (logger.isDebugEnabled()) { - logger.debug(`Completed commands: ${JSON.stringify(completedCommands)}`) + logger.debug(`Completed commands: ${JSON.stringify(completedCommands)}`); } - return completedCommands + return completedCommands; } diff --git a/src/processTracer.ts b/src/processTracer.ts index a379d91..4f6f7a1 100644 --- a/src/processTracer.ts +++ b/src/processTracer.ts @@ -1,35 +1,36 @@ -import { ChildProcess, spawn, exec } from 'child_process' -import path from 'path' -import * as core from '@actions/core' -import si from 'systeminformation' -import { sprintf } from 'sprintf-js' -import { parse } from './procTraceParser' -import { CompletedCommand, WorkflowJobType } from './interfaces' -import * as logger from './logger' - -const PROC_TRACER_PID_KEY = 'PROC_TRACER_PID' -const PROC_TRACER_OUTPUT_FILE_NAME = 'proc-trace.out' -const PROC_TRACER_BINARY_NAME_UBUNTU_20: string = 'proc_tracer_ubuntu-20' -const PROC_TRACER_BINARY_NAME_UBUNTU_22: string = 'proc_tracer_ubuntu-22' -const DEFAULT_PROC_TRACE_CHART_MAX_COUNT = 100 -const GHA_FILE_NAME_PREFIX = '/home/runner/work/_actions/' - -let finished = false +import { ChildProcess, spawn, exec } from "child_process"; +import path from "path"; +import * as core from "@actions/core"; +import si from "systeminformation"; +import { sprintf } from "sprintf-js"; +import { parse } from "./procTraceParser"; +import { CompletedCommand, WorkflowJobType } from "./interfaces"; +import { MERMAID_DEFAULTS } from "./config"; +import * as logger from "./logger"; + +const PROC_TRACER_PID_KEY = "PROC_TRACER_PID"; +const PROC_TRACER_OUTPUT_FILE_NAME = "proc-trace.out"; +const PROC_TRACER_BINARY_NAME_UBUNTU_20: string = "proc_tracer_ubuntu-20"; +const PROC_TRACER_BINARY_NAME_UBUNTU_22: string = "proc_tracer_ubuntu-22"; +const DEFAULT_PROC_TRACE_CHART_MAX_COUNT = 100; +const GHA_FILE_NAME_PREFIX = "/home/runner/work/_actions/"; + +let finished = false; async function getProcessTracerBinaryName(): Promise { - const osInfo: si.Systeminformation.OsData = await si.osInfo() + const osInfo: si.Systeminformation.OsData = await si.osInfo(); if (osInfo) { // Check whether we are running on Ubuntu - if (osInfo.distro === 'Ubuntu') { - const majorVersion: number = parseInt(osInfo.release.split('.')[0]) + if (osInfo.distro === "Ubuntu") { + const majorVersion: number = parseInt(osInfo.release.split(".")[0]); if (majorVersion === 20) { - logger.info(`Using ${PROC_TRACER_BINARY_NAME_UBUNTU_20}`) - return PROC_TRACER_BINARY_NAME_UBUNTU_20 + logger.info(`Using ${PROC_TRACER_BINARY_NAME_UBUNTU_20}`); + return PROC_TRACER_BINARY_NAME_UBUNTU_20; } if (majorVersion === 22) { - logger.info(`Using ${PROC_TRACER_BINARY_NAME_UBUNTU_22}`) - return PROC_TRACER_BINARY_NAME_UBUNTU_22 + logger.info(`Using ${PROC_TRACER_BINARY_NAME_UBUNTU_22}`); + return PROC_TRACER_BINARY_NAME_UBUNTU_22; } } } @@ -38,233 +39,243 @@ async function getProcessTracerBinaryName(): Promise { `Process tracing disabled because of unsupported OS: ${JSON.stringify( osInfo )}` - ) + ); - return null + return null; } function getExtraProcessInfo(command: CompletedCommand): string | null { // Check whether this is node process with args - if (command.name === 'node' && command.args.length > 1) { - const arg1: string = command.args[1] + if (command.name === "node" && command.args.length > 1) { + const arg1: string = command.args[1]; // Check whether this is Node.js GHA process if (arg1.startsWith(GHA_FILE_NAME_PREFIX)) { - const actionFile: string = arg1.substring(GHA_FILE_NAME_PREFIX.length) - const idx1: number = actionFile.indexOf('/') - const idx2: number = actionFile.indexOf('/', idx1 + 1) + const actionFile: string = arg1.substring(GHA_FILE_NAME_PREFIX.length); + const idx1: number = actionFile.indexOf("/"); + const idx2: number = actionFile.indexOf("/", idx1 + 1); if (idx1 >= 0 && idx2 > idx1) { // If we could find a valid GHA name, use it as extra info - return actionFile.substring(idx1 + 1, idx2) + return actionFile.substring(idx1 + 1, idx2); } } } - return null + return null; } /////////////////////////// export async function start(): Promise { - logger.info(`Starting process tracer ...`) + logger.info(`Starting process tracer ...`); try { - const procTracerBinaryName: string | null = - await getProcessTracerBinaryName() + const procTracerBinaryName: + | string + | null = await getProcessTracerBinaryName(); if (procTracerBinaryName) { const procTraceOutFilePath = path.join( __dirname, - '../proc-tracer', + "../proc-tracer", PROC_TRACER_OUTPUT_FILE_NAME - ) + ); const child: ChildProcess = spawn( - 'sudo', + "sudo", [ path.join(__dirname, `../proc-tracer/${procTracerBinaryName}`), - '-f', - 'json', - '-o', - procTraceOutFilePath + "-f", + "json", + "-o", + procTraceOutFilePath, ], { detached: true, - stdio: 'ignore', + stdio: "ignore", env: { - ...process.env - } + ...process.env, + }, } - ) - child.unref() + ); + child.unref(); - core.saveState(PROC_TRACER_PID_KEY, child.pid?.toString()) + core.saveState(PROC_TRACER_PID_KEY, child.pid?.toString()); - logger.info(`Started process tracer`) + logger.info(`Started process tracer`); - return true - } else { - return false + return true; } } catch (error: any) { - logger.error('Unable to start process tracer') - logger.error(error) - - return false + logger.error("Unable to start process tracer"); + logger.error(error); } + return false; } export async function finish(currentJob: WorkflowJobType): Promise { - logger.info(`Finishing process tracer ...`) + logger.info(`Finishing process tracer ...`); - const procTracePID: string = core.getState(PROC_TRACER_PID_KEY) + const procTracePID: string = core.getState(PROC_TRACER_PID_KEY); if (!procTracePID) { logger.info( `Skipped finishing process tracer since process tracer didn't started` - ) - return false + ); + return false; } try { logger.debug( `Interrupting process tracer with pid ${procTracePID} to stop gracefully ...` - ) + ); - exec(`sudo kill -s INT ${procTracePID}`) - finished = true + exec(`sudo kill -s INT ${procTracePID}`); + finished = true; - logger.info(`Finished process tracer`) + logger.info(`Finished process tracer`); - return true + return true; } catch (error: any) { - logger.error('Unable to finish process tracer') - logger.error(error) + logger.error("Unable to finish process tracer"); + logger.error(error); - return false + return false; } } export async function report( currentJob: WorkflowJobType ): Promise { - logger.info(`Reporting process tracer result ...`) + logger.info(`Reporting process tracer result ...`); if (!finished) { logger.info( `Skipped reporting process tracer since process tracer didn't finished` - ) - return null + ); + return null; } try { const procTraceOutFilePath = path.join( __dirname, - '../proc-tracer', + "../proc-tracer", PROC_TRACER_OUTPUT_FILE_NAME - ) + ); logger.info( `Getting process tracer result from file ${procTraceOutFilePath} ...` - ) + ); - let procTraceMinDuration = -1 + let procTraceMinDuration = -1; const procTraceMinDurationInput: string = core.getInput( - 'proc_trace_min_duration' - ) + "proc_trace_min_duration" + ); if (procTraceMinDurationInput) { - const minProcDurationVal: number = parseInt(procTraceMinDurationInput) + const minProcDurationVal: number = parseInt(procTraceMinDurationInput); if (Number.isInteger(minProcDurationVal)) { - procTraceMinDuration = minProcDurationVal + procTraceMinDuration = minProcDurationVal; } } const procTraceSysEnable: boolean = - core.getInput('proc_trace_sys_enable') === 'true' + core.getInput("proc_trace_sys_enable") === "true"; const procTraceChartShow: boolean = - core.getInput('proc_trace_chart_show') === 'true' + core.getInput("proc_trace_chart_show") === "true"; const procTraceChartMaxCountInput: number = parseInt( - core.getInput('proc_trace_chart_max_count') - ) + core.getInput("proc_trace_chart_max_count") + ); const procTraceChartMaxCount = Number.isInteger(procTraceChartMaxCountInput) ? procTraceChartMaxCountInput - : DEFAULT_PROC_TRACE_CHART_MAX_COUNT + : DEFAULT_PROC_TRACE_CHART_MAX_COUNT; const procTraceTableShow: boolean = - core.getInput('proc_trace_table_show') === 'true' + core.getInput("proc_trace_table_show") === "true"; const completedCommands: CompletedCommand[] = await parse( procTraceOutFilePath, { minDuration: procTraceMinDuration, - traceSystemProcesses: procTraceSysEnable + traceSystemProcesses: procTraceSysEnable, } - ) + ); /////////////////////////////////////////////////////////////////////////// - let chartContent = '' + let chartContent = ""; if (procTraceChartShow) { - chartContent = chartContent.concat('gantt', '\n') - chartContent = chartContent.concat('\t', `title ${currentJob.name}`, '\n') - chartContent = chartContent.concat('\t', `dateFormat x`, '\n') - chartContent = chartContent.concat('\t', `axisFormat %H:%M:%S`, '\n') + chartContent = chartContent.concat("gantt", "\n"); + chartContent = chartContent.concat( + "\t", + `title ${currentJob.name}`, + "\n" + ); + chartContent = chartContent.concat( + "\t", + `dateFormat ${MERMAID_DEFAULTS.gantt.dateFormat}`, + "\n" + ); + chartContent = chartContent.concat( + "\t", + `axisFormat ${MERMAID_DEFAULTS.gantt.axisFormat}`, + "\n" + ); const filteredCommands: CompletedCommand[] = [...completedCommands] .sort((a: CompletedCommand, b: CompletedCommand) => { - return -(a.duration - b.duration) + return -(a.duration - b.duration); }) .slice(0, procTraceChartMaxCount) .sort((a: CompletedCommand, b: CompletedCommand) => { - let result = a.startTime - b.startTime + let result = a.startTime - b.startTime; if (result === 0 && a.order && b.order) { - result = a.order - b.order + result = a.order - b.order; } - return result - }) + return result; + }); for (const command of filteredCommands) { - const extraProcessInfo: string | null = getExtraProcessInfo(command) - const escapedName = command.name.replace(/:/g, '#colon;') + const extraProcessInfo: string | null = getExtraProcessInfo(command); + const escapedName = command.name.replace(/:/g, "#colon;"); if (extraProcessInfo) { chartContent = chartContent.concat( - '\t', + "\t", `${escapedName} (${extraProcessInfo}) : ` - ) + ); } else { - chartContent = chartContent.concat('\t', `${escapedName} : `) + chartContent = chartContent.concat("\t", `${escapedName} : `); } if (command.exitCode !== 0) { // to show red - chartContent = chartContent.concat('crit, ') + chartContent = chartContent.concat("crit, "); } - const startTime: number = command.startTime - const finishTime: number = command.startTime + command.duration + const startTime: number = command.startTime; + const finishTime: number = command.startTime + command.duration; chartContent = chartContent.concat( `${Math.min(startTime, finishTime)}, ${finishTime}`, - '\n' - ) + "\n" + ); } } /////////////////////////////////////////////////////////////////////////// - let tableContent = '' + let tableContent = ""; if (procTraceTableShow) { - const commandInfos: string[] = [] + const commandInfos: string[] = []; commandInfos.push( sprintf( - '%-12s %-16s %7s %7s %7s %15s %15s %10s %-20s', - 'TIME', - 'NAME', - 'UID', - 'PID', - 'PPID', - 'START TIME', - 'DURATION (ms)', - 'EXIT CODE', - 'FILE NAME + ARGS' + "%-12s %-16s %7s %7s %7s %15s %15s %10s %-20s", + "TIME", + "NAME", + "UID", + "PID", + "PPID", + "START TIME", + "DURATION (ms)", + "EXIT CODE", + "FILE NAME + ARGS" ) - ) + ); for (const command of completedCommands) { commandInfos.push( sprintf( - '%-12s %-16s %7d %7d %7d %15d %15d %10d %s %s', + "%-12s %-16s %7d %7d %7d %15d %15d %10d %s %s", command.ts, command.name, command.uid, @@ -274,43 +285,43 @@ export async function report( command.duration, command.exitCode, command.fileName, - command.args.join(' ') + command.args.join(" ") ) - ) + ); } - tableContent = commandInfos.join('\n') + tableContent = commandInfos.join("\n"); } /////////////////////////////////////////////////////////////////////////// - const postContentItems: string[] = ['', '### Process Trace'] + const postContentItems: string[] = ["", "### Process trace"]; if (procTraceChartShow) { postContentItems.push( - '', + "", `#### Top ${procTraceChartMaxCount} processes with highest duration`, - '', - '```mermaid' + '\n' + chartContent + '\n' + '```' - ) + "", + "```mermaid" + "\n" + chartContent + "\n" + "```" + ); } if (procTraceTableShow) { postContentItems.push( - '', - `#### All processes with detail`, - '', - '```' + '\n' + tableContent + '\n' + '```' - ) + "", + `#### All processes with details`, + "", + "```" + "\n" + tableContent + "\n" + "```" + ); } - const postContent: string = postContentItems.join('\n') + const postContent: string = postContentItems.join("\n"); - logger.info(`Reported process tracer result`) + logger.info(`Reported process tracer result`); - return postContent + return postContent; } catch (error: any) { - logger.error('Unable to report process tracer result') - logger.error(error) + logger.error("Unable to report process tracer result"); + logger.error(error); - return null + return null; } } diff --git a/src/statCollector.ts b/src/statCollector.ts index c87d2ab..0f14072 100644 --- a/src/statCollector.ts +++ b/src/statCollector.ts @@ -1,7 +1,7 @@ -import { ChildProcess, spawn } from 'child_process' -import path from 'path' -import axios from 'axios' -import * as core from '@actions/core' +import { ChildProcess, spawn } from "child_process"; +import path from "path"; +import axios from "axios"; +import * as core from "@actions/core"; import { CPUStats, DiskSizeStats, @@ -17,65 +17,66 @@ import { ProcessedNetworkStats, ProcessedStats, StackedAreaGraphOptions, - WorkflowJobType -} from './interfaces' -import * as logger from './logger' -import { log } from 'console' + WorkflowJobType, +} from "./interfaces"; +import * as logger from "./logger"; +import { log } from "console"; +import { CHART_DEFAULTS } from "./config"; -const STAT_SERVER_PORT = 7777 +const STAT_SERVER_PORT = 7777; -const BLACK = '#000000' -const WHITE = '#FFFFFF' +const BLACK = "#000000"; +const WHITE = "#FFFFFF"; async function triggerStatCollect(): Promise { - logger.debug('Triggering stat collect ...') + logger.debug("Triggering stat collect ..."); const response = await axios.post( `http://localhost:${STAT_SERVER_PORT}/collect` - ) + ); if (logger.isDebugEnabled()) { - logger.debug(`Triggered stat collect: ${JSON.stringify(response.data)}`) + logger.debug(`Triggered stat collect: ${JSON.stringify(response.data)}`); } } async function reportWorkflowMetrics(): Promise { - const theme: string = core.getInput('theme', { required: false }) - let axisColor = BLACK + const theme: string = core.getInput("theme", { required: false }); + let axisColor = BLACK; switch (theme) { - case 'light': - axisColor = BLACK - break - case 'dark': - axisColor = WHITE - break + case "light": + axisColor = BLACK; + break; + case "dark": + axisColor = WHITE; + break; default: - core.warning(`Invalid theme: ${theme}`) + core.warning(`Invalid theme: ${theme}`); } - const { userLoadX, systemLoadX } = await getCPUStats() - const { activeMemoryX, availableMemoryX } = await getMemoryStats() - const { networkReadX, networkWriteX } = await getNetworkStats() - const { diskReadX, diskWriteX } = await getDiskStats() - const { diskAvailableX, diskUsedX } = await getDiskSizeStats() + const { userLoadX, systemLoadX } = await getCPUStats(); + const { activeMemoryX, availableMemoryX } = await getMemoryStats(); + const { networkReadX, networkWriteX } = await getNetworkStats(); + const { diskReadX, diskWriteX } = await getDiskStats(); + const { diskAvailableX, diskUsedX } = await getDiskSizeStats(); const cpuLoad = userLoadX && userLoadX.length && systemLoadX && systemLoadX.length ? await getStackedAreaGraph({ - label: 'CPU Load (%)', + label: "CPU load (%)", axisColor, areas: [ { - label: 'User Load', - color: '#e41a1c99', - points: userLoadX + label: "User load", + color: "#e41a1c99", + points: userLoadX, }, { - label: 'System Load', - color: '#ff7f0099', - points: systemLoadX - } - ] + label: "System load", + color: "#ff7f0099", + points: systemLoadX, + }, + ], }) - : null + : null; const memoryUsage = activeMemoryX && @@ -83,173 +84,173 @@ async function reportWorkflowMetrics(): Promise { availableMemoryX && availableMemoryX.length ? await getStackedAreaGraph({ - label: 'Memory Usage (MB)', + label: "Memory usage (MB)", axisColor, areas: [ { - label: 'Used', - color: '#377eb899', - points: activeMemoryX + label: "Used", + color: "#377eb899", + points: activeMemoryX, }, { - label: 'Free', - color: '#4daf4a99', - points: availableMemoryX - } - ] + label: "Free", + color: "#4daf4a99", + points: availableMemoryX, + }, + ], }) - : null + : null; const networkIORead = networkReadX && networkReadX.length ? await getLineGraph({ - label: 'Network I/O Read (MB)', + label: "Network I/O read (MB)", axisColor, line: { - label: 'Read', - color: '#be4d25', - points: networkReadX - } + label: "Read", + color: "#be4d25", + points: networkReadX, + }, }) - : null + : null; const networkIOWrite = networkWriteX && networkWriteX.length ? await getLineGraph({ - label: 'Network I/O Write (MB)', + label: "Network I/O write (MB)", axisColor, line: { - label: 'Write', - color: '#6c25be', - points: networkWriteX - } + label: "Write", + color: "#6c25be", + points: networkWriteX, + }, }) - : null + : null; const diskIORead = diskReadX && diskReadX.length ? await getLineGraph({ - label: 'Disk I/O Read (MB)', + label: "Disk I/O read (MB)", axisColor, line: { - label: 'Read', - color: '#be4d25', - points: diskReadX - } + label: "Read", + color: "#be4d25", + points: diskReadX, + }, }) - : null + : null; const diskIOWrite = diskWriteX && diskWriteX.length ? await getLineGraph({ - label: 'Disk I/O Write (MB)', + label: "Disk I/O write (MB)", axisColor, line: { - label: 'Write', - color: '#6c25be', - points: diskWriteX - } + label: "Write", + color: "#6c25be", + points: diskWriteX, + }, }) - : null + : null; const diskSizeUsage = diskUsedX && diskUsedX.length && diskAvailableX && diskAvailableX.length ? await getStackedAreaGraph({ - label: 'Disk Usage (MB)', + label: "Disk usage (MB)", axisColor, areas: [ { - label: 'Used', - color: '#377eb899', - points: diskUsedX + label: "Used", + color: "#377eb899", + points: diskUsedX, }, { - label: 'Free', - color: '#4daf4a99', - points: diskAvailableX - } - ] + label: "Free", + color: "#4daf4a99", + points: diskAvailableX, + }, + ], }) - : null + : null; - const postContentItems: string[] = [] + const postContentItems: string[] = []; if (cpuLoad) { postContentItems.push( - '### CPU Metrics', + "### CPU metrics", `![${cpuLoad.id}](${cpuLoad.url})`, - '' - ) + "" + ); } if (memoryUsage) { postContentItems.push( - '### Memory Metrics', + "### Memory metrics", `![${memoryUsage.id}](${memoryUsage.url})`, - '' - ) + "" + ); } if ((networkIORead && networkIOWrite) || (diskIORead && diskIOWrite)) { postContentItems.push( - '### IO Metrics', - '| | Read | Write |', - '|--- |--- |--- |' - ) + "### IO metrics", + "| | Read | Write |", + "|--- |--- |--- |" + ); } if (networkIORead && networkIOWrite) { postContentItems.push( `| Network I/O | ![${networkIORead.id}](${networkIORead.url}) | ![${networkIOWrite.id}](${networkIOWrite.url}) |` - ) + ); } if (diskIORead && diskIOWrite) { postContentItems.push( `| Disk I/O | ![${diskIORead.id}](${diskIORead.url}) | ![${diskIOWrite.id}](${diskIOWrite.url}) |` - ) + ); } if (diskSizeUsage) { postContentItems.push( - '### Disk Size Metrics', + "### Disk size metrics", `![${diskSizeUsage.id}](${diskSizeUsage.url})`, - '' - ) + "" + ); } - return postContentItems.join('\n') + return postContentItems.join("\n"); } async function getCPUStats(): Promise { - const userLoadX: ProcessedStats[] = [] - const systemLoadX: ProcessedStats[] = [] + const userLoadX: ProcessedStats[] = []; + const systemLoadX: ProcessedStats[] = []; - logger.debug('Getting CPU stats ...') - const response = await axios.get(`http://localhost:${STAT_SERVER_PORT}/cpu`) + logger.debug("Getting CPU stats ..."); + const response = await axios.get(`http://localhost:${STAT_SERVER_PORT}/cpu`); if (logger.isDebugEnabled()) { - logger.debug(`Got CPU stats: ${JSON.stringify(response.data)}`) + logger.debug(`Got CPU stats: ${JSON.stringify(response.data)}`); } response.data.forEach((element: CPUStats) => { userLoadX.push({ x: element.time, - y: element.userLoad && element.userLoad > 0 ? element.userLoad : 0 - }) + y: element.userLoad && element.userLoad > 0 ? element.userLoad : 0, + }); systemLoadX.push({ x: element.time, - y: element.systemLoad && element.systemLoad > 0 ? element.systemLoad : 0 - }) - }) + y: element.systemLoad && element.systemLoad > 0 ? element.systemLoad : 0, + }); + }); - return { userLoadX, systemLoadX } + return { userLoadX, systemLoadX }; } async function getMemoryStats(): Promise { - const activeMemoryX: ProcessedStats[] = [] - const availableMemoryX: ProcessedStats[] = [] + const activeMemoryX: ProcessedStats[] = []; + const availableMemoryX: ProcessedStats[] = []; - logger.debug('Getting memory stats ...') + logger.debug("Getting memory stats ..."); const response = await axios.get( `http://localhost:${STAT_SERVER_PORT}/memory` - ) + ); if (logger.isDebugEnabled()) { - logger.debug(`Got memory stats: ${JSON.stringify(response.data)}`) + logger.debug(`Got memory stats: ${JSON.stringify(response.data)}`); } response.data.forEach((element: MemoryStats) => { @@ -258,83 +259,83 @@ async function getMemoryStats(): Promise { y: element.activeMemoryMb && element.activeMemoryMb > 0 ? element.activeMemoryMb - : 0 - }) + : 0, + }); availableMemoryX.push({ x: element.time, y: element.availableMemoryMb && element.availableMemoryMb > 0 ? element.availableMemoryMb - : 0 - }) - }) + : 0, + }); + }); - return { activeMemoryX, availableMemoryX } + return { activeMemoryX, availableMemoryX }; } async function getNetworkStats(): Promise { - const networkReadX: ProcessedStats[] = [] - const networkWriteX: ProcessedStats[] = [] + const networkReadX: ProcessedStats[] = []; + const networkWriteX: ProcessedStats[] = []; - logger.debug('Getting network stats ...') + logger.debug("Getting network stats ..."); const response = await axios.get( `http://localhost:${STAT_SERVER_PORT}/network` - ) + ); if (logger.isDebugEnabled()) { - logger.debug(`Got network stats: ${JSON.stringify(response.data)}`) + logger.debug(`Got network stats: ${JSON.stringify(response.data)}`); } response.data.forEach((element: NetworkStats) => { networkReadX.push({ x: element.time, - y: element.rxMb && element.rxMb > 0 ? element.rxMb : 0 - }) + y: element.rxMb && element.rxMb > 0 ? element.rxMb : 0, + }); networkWriteX.push({ x: element.time, - y: element.txMb && element.txMb > 0 ? element.txMb : 0 - }) - }) + y: element.txMb && element.txMb > 0 ? element.txMb : 0, + }); + }); - return { networkReadX, networkWriteX } + return { networkReadX, networkWriteX }; } async function getDiskStats(): Promise { - const diskReadX: ProcessedStats[] = [] - const diskWriteX: ProcessedStats[] = [] + const diskReadX: ProcessedStats[] = []; + const diskWriteX: ProcessedStats[] = []; - logger.debug('Getting disk stats ...') - const response = await axios.get(`http://localhost:${STAT_SERVER_PORT}/disk`) + logger.debug("Getting disk stats ..."); + const response = await axios.get(`http://localhost:${STAT_SERVER_PORT}/disk`); if (logger.isDebugEnabled()) { - logger.debug(`Got disk stats: ${JSON.stringify(response.data)}`) + logger.debug(`Got disk stats: ${JSON.stringify(response.data)}`); } response.data.forEach((element: DiskStats) => { diskReadX.push({ x: element.time, - y: element.rxMb && element.rxMb > 0 ? element.rxMb : 0 - }) + y: element.rxMb && element.rxMb > 0 ? element.rxMb : 0, + }); diskWriteX.push({ x: element.time, - y: element.wxMb && element.wxMb > 0 ? element.wxMb : 0 - }) - }) + y: element.wxMb && element.wxMb > 0 ? element.wxMb : 0, + }); + }); - return { diskReadX, diskWriteX } + return { diskReadX, diskWriteX }; } async function getDiskSizeStats(): Promise { - const diskAvailableX: ProcessedStats[] = [] - const diskUsedX: ProcessedStats[] = [] + const diskAvailableX: ProcessedStats[] = []; + const diskUsedX: ProcessedStats[] = []; - logger.debug('Getting disk size stats ...') + logger.debug("Getting disk size stats ..."); const response = await axios.get( `http://localhost:${STAT_SERVER_PORT}/disk_size` - ) + ); if (logger.isDebugEnabled()) { - logger.debug(`Got disk size stats: ${JSON.stringify(response.data)}`) + logger.debug(`Got disk size stats: ${JSON.stringify(response.data)}`); } response.data.forEach((element: DiskSizeStats) => { @@ -343,48 +344,42 @@ async function getDiskSizeStats(): Promise { y: element.availableSizeMb && element.availableSizeMb > 0 ? element.availableSizeMb - : 0 - }) + : 0, + }); diskUsedX.push({ x: element.time, - y: element.usedSizeMb && element.usedSizeMb > 0 ? element.usedSizeMb : 0 - }) - }) + y: element.usedSizeMb && element.usedSizeMb > 0 ? element.usedSizeMb : 0, + }); + }); - return { diskAvailableX, diskUsedX } + return { diskAvailableX, diskUsedX }; } async function getLineGraph(options: LineGraphOptions): Promise { const payload = { options: { - width: 1000, - height: 500, - xAxis: { - label: 'Time' - }, + ...CHART_DEFAULTS.options, yAxis: { - label: options.label + ...CHART_DEFAULTS.options.yAxis, + label: options.label, }, - timeTicks: { - unit: 'auto' - } }, - lines: [options.line] - } + lines: [options.line], + }; - let response = null + let response = null; try { response = await axios.put( - 'https://api.globadge.com/v1/chartgen/line/time', + "https://api.globadge.com/v1/chartgen/line/time", payload - ) + ); } catch (error: any) { - logger.error(error) - logger.error(`getLineGraph ${JSON.stringify(payload)}`) + logger.error(error); + logger.error(`getLineGraph ${JSON.stringify(payload)}`); } - return response?.data + return response?.data; } async function getStackedAreaGraph( @@ -392,109 +387,103 @@ async function getStackedAreaGraph( ): Promise { const payload = { options: { - width: 1000, - height: 500, - xAxis: { - label: 'Time' - }, + ...CHART_DEFAULTS.options, yAxis: { - label: options.label + ...CHART_DEFAULTS.options.yAxis, + label: options.label, }, - timeTicks: { - unit: 'auto' - } }, - areas: options.areas - } + areas: options.areas, + }; - let response = null + let response = null; try { response = await axios.put( - 'https://api.globadge.com/v1/chartgen/stacked-area/time', + "https://api.globadge.com/v1/chartgen/stacked-area/time", payload - ) + ); } catch (error: any) { - logger.error(error) - logger.error(`getStackedAreaGraph ${JSON.stringify(payload)}`) + logger.error(error); + logger.error(`getStackedAreaGraph ${JSON.stringify(payload)}`); } - return response?.data + return response?.data; } /////////////////////////// export async function start(): Promise { - logger.info(`Starting stat collector ...`) + logger.info(`Starting stat collector ...`); try { - let metricFrequency = 0 - const metricFrequencyInput: string = core.getInput('metric_frequency') + let metricFrequency = 0; + const metricFrequencyInput: string = core.getInput("metric_frequency"); if (metricFrequencyInput) { - const metricFrequencyVal: number = parseInt(metricFrequencyInput) + const metricFrequencyVal: number = parseInt(metricFrequencyInput); if (Number.isInteger(metricFrequencyVal)) { - metricFrequency = metricFrequencyVal * 1000 + metricFrequency = metricFrequencyVal * 1000; } } const child: ChildProcess = spawn( process.argv[0], - [path.join(__dirname, '../scw/index.js')], + [path.join(__dirname, "../scw/index.js")], { detached: true, - stdio: 'ignore', + stdio: "ignore", env: { ...process.env, WORKFLOW_TELEMETRY_STAT_FREQ: metricFrequency ? `${metricFrequency}` - : undefined - } + : undefined, + }, } - ) - child.unref() + ); + child.unref(); - logger.info(`Started stat collector`) + logger.info(`Started stat collector`); - return true + return true; } catch (error: any) { - logger.error('Unable to start stat collector') - logger.error(error) + logger.error("Unable to start stat collector"); + logger.error(error); - return false + return false; } } export async function finish(currentJob: WorkflowJobType): Promise { - logger.info(`Finishing stat collector ...`) + logger.info(`Finishing stat collector ...`); try { // Trigger stat collect, so we will have remaining stats since the latest schedule - await triggerStatCollect() + await triggerStatCollect(); - logger.info(`Finished stat collector`) + logger.info(`Finished stat collector`); - return true + return true; } catch (error: any) { - logger.error('Unable to finish stat collector') - logger.error(error) + logger.error("Unable to finish stat collector"); + logger.error(error); - return false + return false; } } export async function report( currentJob: WorkflowJobType ): Promise { - logger.info(`Reporting stat collector result ...`) + logger.info(`Reporting stat collector result ...`); try { - const postContent: string = await reportWorkflowMetrics() + const postContent: string = await reportWorkflowMetrics(); - logger.info(`Reported stat collector result`) + logger.info(`Reported stat collector result`); - return postContent + return postContent; } catch (error: any) { - logger.error('Unable to report stat collector result') - logger.error(error) + logger.error("Unable to report stat collector result"); + logger.error(error); - return null + return null; } } diff --git a/src/statCollectorWorker.ts b/src/statCollectorWorker.ts index 2412b94..1efbf1d 100644 --- a/src/statCollectorWorker.ts +++ b/src/statCollectorWorker.ts @@ -1,32 +1,32 @@ -import { createServer, IncomingMessage, Server, ServerResponse } from 'http' -import si from 'systeminformation' -import * as logger from './logger' +import { createServer, IncomingMessage, Server, ServerResponse } from "http"; +import si from "systeminformation"; +import * as logger from "./logger"; import { CPUStats, MemoryStats, DiskStats, NetworkStats, - DiskSizeStats -} from './interfaces' + DiskSizeStats, +} from "./interfaces"; const STATS_FREQ: number = - parseInt(process.env.WORKFLOW_TELEMETRY_STAT_FREQ || '') || 5000 -const SERVER_HOST: string = 'localhost' + parseInt(process.env.WORKFLOW_TELEMETRY_STAT_FREQ || "") || 5000; +const SERVER_HOST: string = "localhost"; // TODO // It is better to find an available/free port automatically and use it. // Then the post script (`post.ts`) needs to know the selected port. const SERVER_PORT: number = - parseInt(process.env.WORKFLOW_TELEMETRY_SERVER_PORT || '') || 7777 + parseInt(process.env.WORKFLOW_TELEMETRY_SERVER_PORT || "") || 7777; -let expectedScheduleTime: number = 0 -let statCollectTime: number = 0 +let expectedScheduleTime: number = 0; +let statCollectTime: number = 0; /////////////////////////// // CPU Stats // /////////////////////////// -const cpuStatsHistogram: CPUStats[] = [] +const cpuStatsHistogram: CPUStats[] = []; function collectCPUStats(statTime: number, timeInterval: number): Promise { return si @@ -36,13 +36,13 @@ function collectCPUStats(statTime: number, timeInterval: number): Promise { time: statTime, totalLoad: data.currentLoad, userLoad: data.currentLoadUser, - systemLoad: data.currentLoadSystem - } - cpuStatsHistogram.push(cpuStats) + systemLoad: data.currentLoadSystem, + }; + cpuStatsHistogram.push(cpuStats); }) .catch((error: any) => { - logger.error(error) - }) + logger.error(error); + }); } /////////////////////////// @@ -50,7 +50,7 @@ function collectCPUStats(statTime: number, timeInterval: number): Promise { // Memory Stats // /////////////////////////// -const memoryStatsHistogram: MemoryStats[] = [] +const memoryStatsHistogram: MemoryStats[] = []; function collectMemoryStats( statTime: number, @@ -63,13 +63,13 @@ function collectMemoryStats( time: statTime, totalMemoryMb: data.total / 1024 / 1024, activeMemoryMb: data.active / 1024 / 1024, - availableMemoryMb: data.available / 1024 / 1024 - } - memoryStatsHistogram.push(memoryStats) + availableMemoryMb: data.available / 1024 / 1024, + }; + memoryStatsHistogram.push(memoryStats); }) .catch((error: any) => { - logger.error(error) - }) + logger.error(error); + }); } /////////////////////////// @@ -77,7 +77,7 @@ function collectMemoryStats( // Network Stats // /////////////////////////// -const networkStatsHistogram: NetworkStats[] = [] +const networkStatsHistogram: NetworkStats[] = []; function collectNetworkStats( statTime: number, @@ -87,21 +87,21 @@ function collectNetworkStats( .networkStats() .then((data: si.Systeminformation.NetworkStatsData[]) => { let totalRxSec = 0, - totalTxSec = 0 + totalTxSec = 0; for (let nsd of data) { - totalRxSec += nsd.rx_sec - totalTxSec += nsd.tx_sec + totalRxSec += nsd.rx_sec; + totalTxSec += nsd.tx_sec; } const networkStats: NetworkStats = { time: statTime, rxMb: Math.floor((totalRxSec * (timeInterval / 1000)) / 1024 / 1024), - txMb: Math.floor((totalTxSec * (timeInterval / 1000)) / 1024 / 1024) - } - networkStatsHistogram.push(networkStats) + txMb: Math.floor((totalTxSec * (timeInterval / 1000)) / 1024 / 1024), + }; + networkStatsHistogram.push(networkStats); }) .catch((error: any) => { - logger.error(error) - }) + logger.error(error); + }); } /////////////////////////// @@ -109,7 +109,7 @@ function collectNetworkStats( // Disk Stats // /////////////////////////// -const diskStatsHistogram: DiskStats[] = [] +const diskStatsHistogram: DiskStats[] = []; function collectDiskStats( statTime: number, @@ -118,21 +118,21 @@ function collectDiskStats( return si .fsStats() .then((data: si.Systeminformation.FsStatsData) => { - let rxSec = data.rx_sec ? data.rx_sec : 0 - let wxSec = data.wx_sec ? data.wx_sec : 0 + let rxSec = data.rx_sec ? data.rx_sec : 0; + let wxSec = data.wx_sec ? data.wx_sec : 0; const diskStats: DiskStats = { time: statTime, rxMb: Math.floor((rxSec * (timeInterval / 1000)) / 1024 / 1024), - wxMb: Math.floor((wxSec * (timeInterval / 1000)) / 1024 / 1024) - } - diskStatsHistogram.push(diskStats) + wxMb: Math.floor((wxSec * (timeInterval / 1000)) / 1024 / 1024), + }; + diskStatsHistogram.push(diskStats); }) .catch((error: any) => { - logger.error(error) - }) + logger.error(error); + }); } -const diskSizeStatsHistogram: DiskSizeStats[] = [] +const diskSizeStatsHistogram: DiskSizeStats[] = []; function collectDiskSizeStats( statTime: number, @@ -142,47 +142,47 @@ function collectDiskSizeStats( .fsSize() .then((data: si.Systeminformation.FsSizeData[]) => { let totalSize = 0, - usedSize = 0 + usedSize = 0; for (let fsd of data) { - totalSize += fsd.size - usedSize += fsd.used + totalSize += fsd.size; + usedSize += fsd.used; } const diskSizeStats: DiskSizeStats = { time: statTime, availableSizeMb: Math.floor((totalSize - usedSize) / 1024 / 1024), - usedSizeMb: Math.floor(usedSize / 1024 / 1024) - } - diskSizeStatsHistogram.push(diskSizeStats) + usedSizeMb: Math.floor(usedSize / 1024 / 1024), + }; + diskSizeStatsHistogram.push(diskSizeStats); }) .catch((error: any) => { - logger.error(error) - }) + logger.error(error); + }); } /////////////////////////// async function collectStats(triggeredFromScheduler: boolean = true) { try { - const currentTime: number = Date.now() + const currentTime: number = Date.now(); const timeInterval: number = statCollectTime ? currentTime - statCollectTime - : 0 + : 0; - statCollectTime = currentTime + statCollectTime = currentTime; - const promises: Promise[] = [] + const promises: Promise[] = []; - promises.push(collectCPUStats(statCollectTime, timeInterval)) - promises.push(collectMemoryStats(statCollectTime, timeInterval)) - promises.push(collectNetworkStats(statCollectTime, timeInterval)) - promises.push(collectDiskStats(statCollectTime, timeInterval)) - promises.push(collectDiskSizeStats(statCollectTime, timeInterval)) + promises.push(collectCPUStats(statCollectTime, timeInterval)); + promises.push(collectMemoryStats(statCollectTime, timeInterval)); + promises.push(collectNetworkStats(statCollectTime, timeInterval)); + promises.push(collectDiskStats(statCollectTime, timeInterval)); + promises.push(collectDiskSizeStats(statCollectTime, timeInterval)); - return promises + return promises; } finally { if (triggeredFromScheduler) { - expectedScheduleTime += STATS_FREQ - setTimeout(collectStats, expectedScheduleTime - Date.now()) + expectedScheduleTime += STATS_FREQ; + setTimeout(collectStats, expectedScheduleTime - Date.now()); } } } @@ -192,96 +192,96 @@ function startHttpServer() { async (request: IncomingMessage, response: ServerResponse) => { try { switch (request.url) { - case '/cpu': { - if (request.method === 'GET') { - response.end(JSON.stringify(cpuStatsHistogram)) + case "/cpu": { + if (request.method === "GET") { + response.end(JSON.stringify(cpuStatsHistogram)); } else { - response.statusCode = 405 - response.end() + response.statusCode = 405; + response.end(); } - break + break; } - case '/memory': { - if (request.method === 'GET') { - response.end(JSON.stringify(memoryStatsHistogram)) + case "/memory": { + if (request.method === "GET") { + response.end(JSON.stringify(memoryStatsHistogram)); } else { - response.statusCode = 405 - response.end() + response.statusCode = 405; + response.end(); } - break + break; } - case '/network': { - if (request.method === 'GET') { - response.end(JSON.stringify(networkStatsHistogram)) + case "/network": { + if (request.method === "GET") { + response.end(JSON.stringify(networkStatsHistogram)); } else { - response.statusCode = 405 - response.end() + response.statusCode = 405; + response.end(); } - break + break; } - case '/disk': { - if (request.method === 'GET') { - response.end(JSON.stringify(diskStatsHistogram)) + case "/disk": { + if (request.method === "GET") { + response.end(JSON.stringify(diskStatsHistogram)); } else { - response.statusCode = 405 - response.end() + response.statusCode = 405; + response.end(); } - break + break; } - case '/disk_size': { - if (request.method === 'GET') { - response.end(JSON.stringify(diskSizeStatsHistogram)) + case "/disk_size": { + if (request.method === "GET") { + response.end(JSON.stringify(diskSizeStatsHistogram)); } else { - response.statusCode = 405 - response.end() + response.statusCode = 405; + response.end(); } } - case '/collect': { - if (request.method === 'POST') { - await collectStats(false) - response.end() + case "/collect": { + if (request.method === "POST") { + await collectStats(false); + response.end(); } else { - response.statusCode = 405 - response.end() + response.statusCode = 405; + response.end(); } - break + break; } default: { - response.statusCode = 404 - response.end() + response.statusCode = 404; + response.end(); } } } catch (error: any) { - logger.error(error) - response.statusCode = 500 + logger.error(error); + response.statusCode = 500; response.end( JSON.stringify({ type: error.type, - message: error.message + message: error.message, }) - ) + ); } } - ) + ); server.listen(SERVER_PORT, SERVER_HOST, () => { - logger.info(`Stat server listening on port ${SERVER_PORT}`) - }) + logger.info(`Stat server listening on port ${SERVER_PORT}`); + }); } // Init // /////////////////////////// function init() { - expectedScheduleTime = Date.now() + expectedScheduleTime = Date.now(); - logger.info('Starting stat collector ...') - process.nextTick(collectStats) + logger.info("Starting stat collector ..."); + process.nextTick(collectStats); - logger.info('Starting HTTP server ...') - startHttpServer() + logger.info("Starting HTTP server ..."); + startHttpServer(); } -init() +init(); /////////////////////////// diff --git a/src/stepTracer.ts b/src/stepTracer.ts index 8e1fd45..89368fa 100644 --- a/src/stepTracer.ts +++ b/src/stepTracer.ts @@ -1,8 +1,9 @@ -import { WorkflowJobType } from './interfaces' -import * as logger from './logger' +import { WorkflowJobType } from "./interfaces"; +import { MERMAID_DEFAULTS } from "./config"; +import * as logger from "./logger"; function generateTraceChartForSteps(job: WorkflowJobType): string { - let chartContent = '' + let chartContent = ""; /** gantt @@ -20,100 +21,108 @@ function generateTraceChartForSteps(job: WorkflowJobType): string { Post Run actions/checkout@v2 : 1658073655000, 1658073655000 */ - chartContent = chartContent.concat('gantt', '\n') - chartContent = chartContent.concat('\t', `title ${job.name}`, '\n') - chartContent = chartContent.concat('\t', `dateFormat x`, '\n') - chartContent = chartContent.concat('\t', `axisFormat %H:%M:%S`, '\n') + chartContent = chartContent.concat("gantt", "\n"); + chartContent = chartContent.concat("\t", `title ${job.name}`, "\n"); + chartContent = chartContent.concat( + "\t", + `dateFormat ${MERMAID_DEFAULTS.gantt.dateFormat}`, + "\n" + ); + chartContent = chartContent.concat( + "\t", + `axisFormat ${MERMAID_DEFAULTS.gantt.axisFormat}`, + "\n" + ); for (const step of job.steps || []) { if (!step.started_at || !step.completed_at) { - continue + continue; } chartContent = chartContent.concat( - '\t', - `${step.name.replace(/:/g, '-')} : ` - ) + "\t", + `${step.name.replace(/:/g, "-")} : ` + ); - if (step.name === 'Set up job' && step.number === 1) { - chartContent = chartContent.concat('milestone, ') + if (step.name === "Set up job" && step.number === 1) { + chartContent = chartContent.concat("milestone, "); } - if (step.conclusion === 'failure') { + if (step.conclusion === "failure") { // to show red - chartContent = chartContent.concat('crit, ') - } else if (step.conclusion === 'skipped') { + chartContent = chartContent.concat("crit, "); + } else if (step.conclusion === "skipped") { // to show grey - chartContent = chartContent.concat('done, ') + chartContent = chartContent.concat("done, "); } - const startTime: number = new Date(step.started_at).getTime() - const finishTime: number = new Date(step.completed_at).getTime() + const startTime: number = new Date(step.started_at).getTime(); + const finishTime: number = new Date(step.completed_at).getTime(); chartContent = chartContent.concat( `${Math.min(startTime, finishTime)}, ${finishTime}`, - '\n' - ) + "\n" + ); } const postContentItems: string[] = [ - '', - '### Step Trace', - '', - '```mermaid' + '\n' + chartContent + '\n' + '```' - ] - return postContentItems.join('\n') + "", + "### Step trace", + "", + "```mermaid" + "\n" + chartContent + "\n" + "```", + ]; + return postContentItems.join("\n"); } /////////////////////////// export async function start(): Promise { - logger.info(`Starting step tracer ...`) + logger.info(`Starting step tracer ...`); try { - logger.info(`Started step tracer`) + logger.info(`Started step tracer`); - return true + return true; } catch (error: any) { - logger.error('Unable to start step tracer') - logger.error(error) + logger.error("Unable to start step tracer"); + logger.error(error); - return false + return false; } } export async function finish(currentJob: WorkflowJobType): Promise { - logger.info(`Finishing step tracer ...`) + logger.info(`Finishing step tracer ...`); try { - logger.info(`Finished step tracer`) + logger.info(`Finished step tracer`); - return true + return true; } catch (error: any) { - logger.error('Unable to finish step tracer') - logger.error(error) + logger.error("Unable to finish step tracer"); + logger.error(error); - return false + return false; } } export async function report( currentJob: WorkflowJobType ): Promise { - logger.info(`Reporting step tracer result ...`) + logger.info(`Reporting step tracer result ...`); if (!currentJob) { - return null + return null; } try { - const postContent: string = generateTraceChartForSteps(currentJob) + const postContent: string = generateTraceChartForSteps(currentJob); - logger.info(`Reported step tracer result`) + logger.info(`Reported step tracer result`); - return postContent + return postContent; } catch (error: any) { - logger.error('Unable to report step tracer result') - logger.error(error) + logger.error("Unable to report step tracer result"); + logger.error(error); - return null + return null; } }