fixed the issue with graphs on dashboard not showing information correctly, so now statistics are queued into a new table specifically for this and addd this in automation queue

This commit is contained in:
Muhammad Ibrahim
2025-11-07 10:00:19 +00:00
parent 63831caba3
commit e73ebc383c
6 changed files with 523 additions and 207 deletions

View File

@@ -0,0 +1,16 @@
-- CreateTable
CREATE TABLE "system_statistics" (
"id" TEXT NOT NULL,
"unique_packages_count" INTEGER NOT NULL,
"unique_security_count" INTEGER NOT NULL,
"total_packages" INTEGER NOT NULL,
"total_hosts" INTEGER NOT NULL,
"hosts_needing_updates" INTEGER NOT NULL,
"timestamp" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "system_statistics_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "system_statistics_timestamp_idx" ON "system_statistics"("timestamp");

View File

@@ -202,7 +202,7 @@ model update_history {
id String @id id String @id
host_id String host_id String
packages_count Int packages_count Int
security_count Int security_count Int
total_packages Int? total_packages Int?
payload_size_kb Float? payload_size_kb Float?
execution_time Float? execution_time Float?
@@ -212,6 +212,18 @@ model update_history {
hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade) hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade)
} }
model system_statistics {
id String @id
unique_packages_count Int
unique_security_count Int
total_packages Int
total_hosts Int
hosts_needing_updates Int
timestamp DateTime @default(now())
@@index([timestamp])
}
model users { model users {
id String @id id String @id
username String @unique username String @unique

View File

@@ -242,6 +242,30 @@ router.post(
}, },
); );
// Trigger manual system statistics collection
router.post(
"/trigger/system-statistics",
authenticateToken,
async (_req, res) => {
try {
const job = await queueManager.triggerSystemStatistics();
res.json({
success: true,
data: {
jobId: job.id,
message: "System statistics collection triggered successfully",
},
});
} catch (error) {
console.error("Error triggering system statistics collection:", error);
res.status(500).json({
success: false,
error: "Failed to trigger system statistics collection",
});
}
},
);
// Get queue health status // Get queue health status
router.get("/health", authenticateToken, async (_req, res) => { router.get("/health", authenticateToken, async (_req, res) => {
try { try {
@@ -300,6 +324,7 @@ router.get("/overview", authenticateToken, async (_req, res) => {
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1), queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1),
queueManager.getRecentJobs(QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP, 1), queueManager.getRecentJobs(QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP, 1),
queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1), queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1),
queueManager.getRecentJobs(QUEUE_NAMES.SYSTEM_STATISTICS, 1),
]); ]);
// Calculate overview metrics // Calculate overview metrics
@@ -309,21 +334,24 @@ router.get("/overview", authenticateToken, async (_req, res) => {
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed + stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed + stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed +
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed + stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed +
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].delayed, stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].delayed +
stats[QUEUE_NAMES.SYSTEM_STATISTICS].delayed,
runningTasks: runningTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active + stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
stats[QUEUE_NAMES.SESSION_CLEANUP].active + stats[QUEUE_NAMES.SESSION_CLEANUP].active +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active + stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active +
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active + stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active +
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].active, stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].active +
stats[QUEUE_NAMES.SYSTEM_STATISTICS].active,
failedTasks: failedTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed + stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
stats[QUEUE_NAMES.SESSION_CLEANUP].failed + stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed + stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed +
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed + stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed +
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].failed, stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].failed +
stats[QUEUE_NAMES.SYSTEM_STATISTICS].failed,
totalAutomations: Object.values(stats).reduce((sum, queueStats) => { totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
return ( return (
@@ -435,6 +463,22 @@ router.get("/overview", authenticateToken, async (_req, res) => {
: "Never run", : "Never run",
stats: stats[QUEUE_NAMES.AGENT_COMMANDS], stats: stats[QUEUE_NAMES.AGENT_COMMANDS],
}, },
{
name: "System Statistics Collection",
queue: QUEUE_NAMES.SYSTEM_STATISTICS,
description: "Collects aggregated system-wide package statistics",
schedule: "Every 30 minutes",
lastRun: recentJobs[6][0]?.finishedOn
? new Date(recentJobs[6][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[6][0]?.finishedOn || 0,
status: recentJobs[6][0]?.failedReason
? "Failed"
: recentJobs[6][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.SYSTEM_STATISTICS],
},
].sort((a, b) => { ].sort((a, b) => {
// Sort by last run timestamp (most recent first) // Sort by last run timestamp (most recent first)
// If both have never run (timestamp 0), maintain original order // If both have never run (timestamp 0), maintain original order

View File

@@ -564,174 +564,216 @@ router.get(
const startDate = new Date(); const startDate = new Date();
startDate.setDate(endDate.getDate() - daysInt); startDate.setDate(endDate.getDate() - daysInt);
// Build where clause
const whereClause = {
timestamp: {
gte: startDate,
lte: endDate,
},
};
// Add host filter if specified
if (hostId && hostId !== "all" && hostId !== "undefined") {
whereClause.host_id = hostId;
}
// Get all update history records in the date range
const trendsData = await prisma.update_history.findMany({
where: whereClause,
select: {
timestamp: true,
packages_count: true,
security_count: true,
total_packages: true,
host_id: true,
status: true,
},
orderBy: {
timestamp: "asc",
},
});
// Enhanced data validation and processing
const processedData = trendsData
.filter((record) => {
// Enhanced validation
return (
record.total_packages !== null &&
record.total_packages >= 0 &&
record.packages_count >= 0 &&
record.security_count >= 0 &&
record.security_count <= record.packages_count && // Security can't exceed outdated
record.status === "success"
); // Only include successful reports
})
.map((record) => {
const date = new Date(record.timestamp);
let timeKey;
if (daysInt <= 1) {
// For hourly view, group by hour only (not minutes)
timeKey = date.toISOString().substring(0, 13); // YYYY-MM-DDTHH
} else {
// For daily view, group by day
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
}
return {
timeKey,
total_packages: record.total_packages,
packages_count: record.packages_count || 0,
security_count: record.security_count || 0,
host_id: record.host_id,
timestamp: record.timestamp,
};
});
// Determine if we need aggregation based on host filter // Determine if we need aggregation based on host filter
const needsAggregation = const needsAggregation =
!hostId || hostId === "all" || hostId === "undefined"; !hostId || hostId === "all" || hostId === "undefined";
let trendsData;
if (needsAggregation) {
// For "All Hosts" mode, use system_statistics table
trendsData = await prisma.system_statistics.findMany({
where: {
timestamp: {
gte: startDate,
lte: endDate,
},
},
select: {
timestamp: true,
unique_packages_count: true,
unique_security_count: true,
total_packages: true,
total_hosts: true,
hosts_needing_updates: true,
},
orderBy: {
timestamp: "asc",
},
});
} else {
// For individual host, use update_history table
trendsData = await prisma.update_history.findMany({
where: {
host_id: hostId,
timestamp: {
gte: startDate,
lte: endDate,
},
},
select: {
timestamp: true,
packages_count: true,
security_count: true,
total_packages: true,
host_id: true,
status: true,
},
orderBy: {
timestamp: "asc",
},
});
}
// Process data based on source
let processedData;
let aggregatedArray; let aggregatedArray;
if (needsAggregation) { if (needsAggregation) {
// For "All Hosts" mode, we need to calculate the actual total packages differently // For "All Hosts" mode, data comes from system_statistics table
// Instead of aggregating historical data (which is per-host), we'll use the current total // Already aggregated, just need to format it
// and show that as a flat line, since total packages don't change much over time processedData = trendsData
.filter((record) => {
// Enhanced validation
return (
record.total_packages !== null &&
record.total_packages >= 0 &&
record.unique_packages_count >= 0 &&
record.unique_security_count >= 0 &&
record.unique_security_count <= record.unique_packages_count
);
})
.map((record) => {
const date = new Date(record.timestamp);
let timeKey;
// Get the current total packages count (unique packages across all hosts) if (daysInt <= 1) {
const currentTotalPackages = await prisma.packages.count({ // For "Last 24 hours", use full timestamp for each data point
where: { // This allows plotting all individual data points
host_packages: { timeKey = date.toISOString(); // Full ISO timestamp
some: {}, // At least one host has this package } else {
}, // For daily view, group by day
}, timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
}); }
// Aggregate data by timeKey when looking at "All Hosts" or no specific host return {
const aggregatedData = processedData.reduce((acc, item) => { timeKey,
if (!acc[item.timeKey]) { total_packages: record.total_packages,
acc[item.timeKey] = { packages_count: record.unique_packages_count,
timeKey: item.timeKey, security_count: record.unique_security_count,
total_packages: currentTotalPackages, // Use current total packages timestamp: record.timestamp,
packages_count: 0,
security_count: 0,
record_count: 0,
host_ids: new Set(),
min_timestamp: item.timestamp,
max_timestamp: item.timestamp,
}; };
} });
// For outdated and security packages: SUM (these represent counts across hosts) if (daysInt <= 1) {
acc[item.timeKey].packages_count += item.packages_count; // For "Last 24 hours", use all individual data points without grouping
acc[item.timeKey].security_count += item.security_count; // Sort by timestamp
aggregatedArray = processedData.sort(
(a, b) => a.timestamp.getTime() - b.timestamp.getTime(),
);
} else {
// For longer periods, group by timeKey and take the latest value for each period
const aggregatedData = processedData.reduce((acc, item) => {
if (
!acc[item.timeKey] ||
item.timestamp > acc[item.timeKey].timestamp
) {
acc[item.timeKey] = item;
}
return acc;
}, {});
acc[item.timeKey].record_count += 1; // Convert to array and sort
acc[item.timeKey].host_ids.add(item.host_id); aggregatedArray = Object.values(aggregatedData).sort((a, b) =>
a.timeKey.localeCompare(b.timeKey),
// Track timestamp range );
if (item.timestamp < acc[item.timeKey].min_timestamp) { }
acc[item.timeKey].min_timestamp = item.timestamp;
}
if (item.timestamp > acc[item.timeKey].max_timestamp) {
acc[item.timeKey].max_timestamp = item.timestamp;
}
return acc;
}, {});
// Convert to array and add metadata
aggregatedArray = Object.values(aggregatedData)
.map((item) => ({
...item,
host_count: item.host_ids.size,
host_ids: Array.from(item.host_ids),
}))
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
} else { } else {
// For specific host, show individual data points without aggregation // For individual host, data comes from update_history table
// But still group by timeKey to handle multiple reports from same host in same time period processedData = trendsData
const hostAggregatedData = processedData.reduce((acc, item) => { .filter((record) => {
if (!acc[item.timeKey]) { // Enhanced validation
acc[item.timeKey] = { return (
timeKey: item.timeKey, record.total_packages !== null &&
total_packages: 0, record.total_packages >= 0 &&
packages_count: 0, record.packages_count >= 0 &&
security_count: 0, record.security_count >= 0 &&
record_count: 0, record.security_count <= record.packages_count &&
host_ids: new Set([item.host_id]), record.status === "success"
min_timestamp: item.timestamp, );
max_timestamp: item.timestamp, })
.map((record) => {
const date = new Date(record.timestamp);
let timeKey;
if (daysInt <= 1) {
// For "Last 24 hours", use full timestamp for each data point
// This allows plotting all individual data points
timeKey = date.toISOString(); // Full ISO timestamp
} else {
// For daily view, group by day
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
}
return {
timeKey,
total_packages: record.total_packages,
packages_count: record.packages_count || 0,
security_count: record.security_count || 0,
host_id: record.host_id,
timestamp: record.timestamp,
}; };
} });
// For same host, take the latest values (not sum) if (daysInt <= 1) {
// This handles cases where a host reports multiple times in the same time period // For "Last 24 hours", use all individual data points without grouping
if (item.timestamp > acc[item.timeKey].max_timestamp) { // Sort by timestamp
acc[item.timeKey].total_packages = item.total_packages; aggregatedArray = processedData.sort(
acc[item.timeKey].packages_count = item.packages_count; (a, b) => a.timestamp.getTime() - b.timestamp.getTime(),
acc[item.timeKey].security_count = item.security_count; );
acc[item.timeKey].max_timestamp = item.timestamp; } else {
} // For longer periods, group by timeKey to handle multiple reports from same host in same time period
const hostAggregatedData = processedData.reduce((acc, item) => {
if (!acc[item.timeKey]) {
acc[item.timeKey] = {
timeKey: item.timeKey,
total_packages: 0,
packages_count: 0,
security_count: 0,
record_count: 0,
host_ids: new Set([item.host_id]),
min_timestamp: item.timestamp,
max_timestamp: item.timestamp,
};
}
acc[item.timeKey].record_count += 1; // For same host, take the latest values (not sum)
// This handles cases where a host reports multiple times in the same time period
if (item.timestamp > acc[item.timeKey].max_timestamp) {
acc[item.timeKey].total_packages = item.total_packages;
acc[item.timeKey].packages_count = item.packages_count;
acc[item.timeKey].security_count = item.security_count;
acc[item.timeKey].max_timestamp = item.timestamp;
}
return acc; acc[item.timeKey].record_count += 1;
}, {});
// Convert to array return acc;
aggregatedArray = Object.values(hostAggregatedData) }, {});
.map((item) => ({
...item, // Convert to array
host_count: item.host_ids.size, aggregatedArray = Object.values(hostAggregatedData)
host_ids: Array.from(item.host_ids), .map((item) => ({
})) ...item,
.sort((a, b) => a.timeKey.localeCompare(b.timeKey)); host_count: item.host_ids.size,
host_ids: Array.from(item.host_ids),
}))
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
}
} }
// Handle sparse data by filling missing time periods // Handle sparse data by filling missing time periods
const fillMissingPeriods = (data, daysInt) => { const fillMissingPeriods = (data, daysInt) => {
if (data.length === 0) {
return [];
}
// For "Last 24 hours", return data as-is without filling gaps
// This allows plotting all individual data points
if (daysInt <= 1) {
return data;
}
const filledData = []; const filledData = [];
const startDate = new Date(); const startDate = new Date();
startDate.setDate(startDate.getDate() - daysInt); startDate.setDate(startDate.getDate() - daysInt);
@@ -741,50 +783,58 @@ router.get(
const endDate = new Date(); const endDate = new Date();
const currentDate = new Date(startDate); const currentDate = new Date(startDate);
// Find the last known values for interpolation // Sort data by timeKey to get chronological order
const sortedData = [...data].sort((a, b) =>
a.timeKey.localeCompare(b.timeKey),
);
// Find the first actual data point (don't fill before this)
const firstDataPoint = sortedData[0];
const firstDataTimeKey = firstDataPoint?.timeKey;
// Track last known values as we iterate forward
let lastKnownValues = null; let lastKnownValues = null;
if (data.length > 0) { let hasSeenFirstDataPoint = false;
lastKnownValues = {
total_packages: data[0].total_packages,
packages_count: data[0].packages_count,
security_count: data[0].security_count,
};
}
while (currentDate <= endDate) { while (currentDate <= endDate) {
let timeKey; let timeKey;
if (daysInt <= 1) { // For daily view, group by day
timeKey = currentDate.toISOString().substring(0, 13); // Hourly timeKey = currentDate.toISOString().split("T")[0]; // YYYY-MM-DD
currentDate.setHours(currentDate.getHours() + 1); currentDate.setDate(currentDate.getDate() + 1);
} else {
timeKey = currentDate.toISOString().split("T")[0]; // Daily // Skip periods before the first actual data point
currentDate.setDate(currentDate.getDate() + 1); if (firstDataTimeKey && timeKey < firstDataTimeKey) {
continue;
} }
if (dataMap.has(timeKey)) { if (dataMap.has(timeKey)) {
const item = dataMap.get(timeKey); const item = dataMap.get(timeKey);
filledData.push(item); filledData.push(item);
// Update last known values // Update last known values with actual data
lastKnownValues = { lastKnownValues = {
total_packages: item.total_packages, total_packages: item.total_packages || 0,
packages_count: item.packages_count, packages_count: item.packages_count || 0,
security_count: item.security_count, security_count: item.security_count || 0,
}; };
hasSeenFirstDataPoint = true;
} else { } else {
// For missing periods, use the last known values (interpolation) // For missing periods AFTER the first data point, use forward-fill
// This creates a continuous line instead of gaps // Only fill if we have a last known value and we've seen the first data point
filledData.push({ if (lastKnownValues !== null && hasSeenFirstDataPoint) {
timeKey, filledData.push({
total_packages: lastKnownValues?.total_packages || 0, timeKey,
packages_count: lastKnownValues?.packages_count || 0, total_packages: lastKnownValues.total_packages,
security_count: lastKnownValues?.security_count || 0, packages_count: lastKnownValues.packages_count,
record_count: 0, security_count: lastKnownValues.security_count,
host_count: 0, record_count: 0,
host_ids: [], host_count: 0,
min_timestamp: null, host_ids: [],
max_timestamp: null, min_timestamp: null,
isInterpolated: true, // Mark as interpolated for debugging max_timestamp: null,
}); isInterpolated: true, // Mark as interpolated for debugging
});
}
// If we haven't seen the first data point yet, skip this period
} }
} }
@@ -810,7 +860,7 @@ router.get(
// Get current package state for offline fallback // Get current package state for offline fallback
let currentPackageState = null; let currentPackageState = null;
if (hostId && hostId !== "all" && hostId !== "undefined") { if (hostId && hostId !== "all" && hostId !== "undefined") {
// Get current package counts for specific host // For individual host, get current package counts from host_packages
const currentState = await prisma.host_packages.aggregate({ const currentState = await prisma.host_packages.aggregate({
where: { where: {
host_id: hostId, host_id: hostId,
@@ -841,34 +891,64 @@ router.get(
security_count: securityCount, security_count: securityCount,
}; };
} else { } else {
// Get current package counts for all hosts // For "All Hosts" mode, use the latest system_statistics record if available
// Total packages = count of unique packages installed on at least one host // Otherwise calculate from database
const totalPackagesCount = await prisma.packages.count({ const latestStats = await prisma.system_statistics.findFirst({
where: { orderBy: {
host_packages: { timestamp: "desc",
some: {}, // At least one host has this package },
select: {
total_packages: true,
unique_packages_count: true,
unique_security_count: true,
timestamp: true,
},
});
if (latestStats) {
// Use latest system statistics (collected by scheduled job)
currentPackageState = {
total_packages: latestStats.total_packages,
packages_count: latestStats.unique_packages_count,
security_count: latestStats.unique_security_count,
};
} else {
// Fallback: calculate from database if no statistics collected yet
const totalPackagesCount = await prisma.packages.count({
where: {
host_packages: {
some: {}, // At least one host has this package
},
}, },
}, });
});
// Get counts for boolean fields separately const uniqueOutdatedCount = await prisma.packages.count({
const outdatedCount = await prisma.host_packages.count({ where: {
where: { host_packages: {
needs_update: true, some: {
}, needs_update: true,
}); },
},
},
});
const securityCount = await prisma.host_packages.count({ const uniqueSecurityCount = await prisma.packages.count({
where: { where: {
is_security_update: true, host_packages: {
}, some: {
}); needs_update: true,
is_security_update: true,
},
},
},
});
currentPackageState = { currentPackageState = {
total_packages: totalPackagesCount, total_packages: totalPackagesCount,
packages_count: outdatedCount, packages_count: uniqueOutdatedCount,
security_count: securityCount, security_count: uniqueSecurityCount,
}; };
}
} }
// Format data for chart // Format data for chart
@@ -923,6 +1003,11 @@ router.get(
chartData.datasets[2].data.push(item.security_count); chartData.datasets[2].data.push(item.security_count);
}); });
// Replace the last label with "Now" to indicate current state
if (chartData.labels.length > 0) {
chartData.labels[chartData.labels.length - 1] = "Now";
}
// Calculate data quality metrics // Calculate data quality metrics
const dataQuality = { const dataQuality = {
totalRecords: trendsData.length, totalRecords: trendsData.length,

View File

@@ -13,6 +13,7 @@ const OrphanedPackageCleanup = require("./orphanedPackageCleanup");
const DockerInventoryCleanup = require("./dockerInventoryCleanup"); const DockerInventoryCleanup = require("./dockerInventoryCleanup");
const DockerImageUpdateCheck = require("./dockerImageUpdateCheck"); const DockerImageUpdateCheck = require("./dockerImageUpdateCheck");
const MetricsReporting = require("./metricsReporting"); const MetricsReporting = require("./metricsReporting");
const SystemStatistics = require("./systemStatistics");
// Queue names // Queue names
const QUEUE_NAMES = { const QUEUE_NAMES = {
@@ -23,6 +24,7 @@ const QUEUE_NAMES = {
DOCKER_INVENTORY_CLEANUP: "docker-inventory-cleanup", DOCKER_INVENTORY_CLEANUP: "docker-inventory-cleanup",
DOCKER_IMAGE_UPDATE_CHECK: "docker-image-update-check", DOCKER_IMAGE_UPDATE_CHECK: "docker-image-update-check",
METRICS_REPORTING: "metrics-reporting", METRICS_REPORTING: "metrics-reporting",
SYSTEM_STATISTICS: "system-statistics",
AGENT_COMMANDS: "agent-commands", AGENT_COMMANDS: "agent-commands",
}; };
@@ -106,6 +108,9 @@ class QueueManager {
this.automations[QUEUE_NAMES.METRICS_REPORTING] = new MetricsReporting( this.automations[QUEUE_NAMES.METRICS_REPORTING] = new MetricsReporting(
this, this,
); );
this.automations[QUEUE_NAMES.SYSTEM_STATISTICS] = new SystemStatistics(
this,
);
console.log("✅ All automation classes initialized"); console.log("✅ All automation classes initialized");
} }
@@ -191,6 +196,15 @@ class QueueManager {
workerOptions, workerOptions,
); );
// System Statistics Worker
this.workers[QUEUE_NAMES.SYSTEM_STATISTICS] = new Worker(
QUEUE_NAMES.SYSTEM_STATISTICS,
this.automations[QUEUE_NAMES.SYSTEM_STATISTICS].process.bind(
this.automations[QUEUE_NAMES.SYSTEM_STATISTICS],
),
workerOptions,
);
// Agent Commands Worker // Agent Commands Worker
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker( this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
QUEUE_NAMES.AGENT_COMMANDS, QUEUE_NAMES.AGENT_COMMANDS,
@@ -323,6 +337,7 @@ class QueueManager {
await this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].schedule(); await this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].schedule();
await this.automations[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK].schedule(); await this.automations[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK].schedule();
await this.automations[QUEUE_NAMES.METRICS_REPORTING].schedule(); await this.automations[QUEUE_NAMES.METRICS_REPORTING].schedule();
await this.automations[QUEUE_NAMES.SYSTEM_STATISTICS].schedule();
} }
/** /**
@@ -358,6 +373,10 @@ class QueueManager {
].triggerManual(); ].triggerManual();
} }
async triggerSystemStatistics() {
return this.automations[QUEUE_NAMES.SYSTEM_STATISTICS].triggerManual();
}
async triggerMetricsReporting() { async triggerMetricsReporting() {
return this.automations[QUEUE_NAMES.METRICS_REPORTING].triggerManual(); return this.automations[QUEUE_NAMES.METRICS_REPORTING].triggerManual();
} }

View File

@@ -0,0 +1,140 @@
const { prisma } = require("./shared/prisma");
const { v4: uuidv4 } = require("uuid");
/**
* System Statistics Collection Automation
* Collects aggregated system-wide statistics every 30 minutes
* for use in package trends charts
*/
class SystemStatistics {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "system-statistics";
}
/**
* Process system statistics collection job
*/
async process(_job) {
const startTime = Date.now();
console.log("📊 Starting system statistics collection...");
try {
// Calculate unique package counts across all hosts
const uniquePackagesCount = await prisma.packages.count({
where: {
host_packages: {
some: {
needs_update: true,
},
},
},
});
const uniqueSecurityCount = await prisma.packages.count({
where: {
host_packages: {
some: {
needs_update: true,
is_security_update: true,
},
},
},
});
// Calculate total unique packages installed on at least one host
const totalPackages = await prisma.packages.count({
where: {
host_packages: {
some: {}, // At least one host has this package
},
},
});
// Calculate total hosts
const totalHosts = await prisma.hosts.count({
where: {
status: "active",
},
});
// Calculate hosts needing updates (distinct hosts with packages needing updates)
const hostsNeedingUpdates = await prisma.hosts.count({
where: {
status: "active",
host_packages: {
some: {
needs_update: true,
},
},
},
});
// Store statistics in database
await prisma.system_statistics.create({
data: {
id: uuidv4(),
unique_packages_count: uniquePackagesCount,
unique_security_count: uniqueSecurityCount,
total_packages: totalPackages,
total_hosts: totalHosts,
hosts_needing_updates: hostsNeedingUpdates,
timestamp: new Date(),
},
});
const executionTime = Date.now() - startTime;
console.log(
`✅ System statistics collection completed in ${executionTime}ms - Unique packages: ${uniquePackagesCount}, Security: ${uniqueSecurityCount}, Total hosts: ${totalHosts}`,
);
return {
success: true,
uniquePackagesCount,
uniqueSecurityCount,
totalPackages,
totalHosts,
hostsNeedingUpdates,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ System statistics collection failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Schedule recurring system statistics collection (every 30 minutes)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"system-statistics",
{},
{
repeat: { pattern: "*/30 * * * *" }, // Every 30 minutes
jobId: "system-statistics-recurring",
},
);
console.log("✅ System statistics collection scheduled (every 30 minutes)");
return job;
}
/**
* Trigger manual system statistics collection
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"system-statistics-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual system statistics collection triggered");
return job;
}
}
module.exports = SystemStatistics;