mirror of
https://github.com/9technologygroup/patchmon.net.git
synced 2025-11-12 18:06:39 +00:00
I should really commit more often instead of sending over one massive commit
Blame my ADHD brain Sorry - Now we have the server working properly in automation using BullMQ and Redis - It also presents an API endpoint that is used to accept connections for websockets by agents (WS or WSS) - Updated the docker-compose.yml and its documentation
This commit is contained in:
@@ -194,6 +194,30 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Trigger manual orphaned package cleanup
|
||||
router.post(
|
||||
"/trigger/orphaned-package-cleanup",
|
||||
authenticateToken,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
const job = await queueManager.triggerOrphanedPackageCleanup();
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
jobId: job.id,
|
||||
message: "Orphaned package cleanup triggered successfully",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error triggering orphaned package cleanup:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to trigger orphaned package cleanup",
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Get queue health status
|
||||
router.get("/health", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
@@ -249,6 +273,7 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.GITHUB_UPDATE_CHECK, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1),
|
||||
]);
|
||||
|
||||
@@ -257,17 +282,20 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
||||
scheduledTasks:
|
||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
|
||||
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed,
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed +
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed,
|
||||
|
||||
runningTasks:
|
||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
|
||||
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active,
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active +
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active,
|
||||
|
||||
failedTasks:
|
||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
|
||||
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed,
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed +
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed,
|
||||
|
||||
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
|
||||
return (
|
||||
@@ -331,10 +359,10 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
||||
stats: stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
|
||||
},
|
||||
{
|
||||
name: "Collect Host Statistics",
|
||||
queue: QUEUE_NAMES.AGENT_COMMANDS,
|
||||
description: "Collects package statistics from connected agents only",
|
||||
schedule: `Every ${settings.update_interval} minutes (Agent-driven)`,
|
||||
name: "Orphaned Package Cleanup",
|
||||
queue: QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP,
|
||||
description: "Removes packages with no associated hosts",
|
||||
schedule: "Daily at 3 AM",
|
||||
lastRun: recentJobs[3][0]?.finishedOn
|
||||
? new Date(recentJobs[3][0].finishedOn).toLocaleString()
|
||||
: "Never",
|
||||
@@ -344,6 +372,22 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
||||
: recentJobs[3][0]
|
||||
? "Success"
|
||||
: "Never run",
|
||||
stats: stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||
},
|
||||
{
|
||||
name: "Collect Host Statistics",
|
||||
queue: QUEUE_NAMES.AGENT_COMMANDS,
|
||||
description: "Collects package statistics from connected agents only",
|
||||
schedule: `Every ${settings.update_interval} minutes (Agent-driven)`,
|
||||
lastRun: recentJobs[4][0]?.finishedOn
|
||||
? new Date(recentJobs[4][0].finishedOn).toLocaleString()
|
||||
: "Never",
|
||||
lastRunTimestamp: recentJobs[4][0]?.finishedOn || 0,
|
||||
status: recentJobs[4][0]?.failedReason
|
||||
? "Failed"
|
||||
: recentJobs[4][0]
|
||||
? "Success"
|
||||
: "Never run",
|
||||
stats: stats[QUEUE_NAMES.AGENT_COMMANDS],
|
||||
},
|
||||
].sort((a, b) => {
|
||||
|
||||
@@ -202,11 +202,15 @@ router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => {
|
||||
auto_update: true,
|
||||
notes: true,
|
||||
api_id: true,
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
_count: {
|
||||
@@ -356,11 +360,15 @@ router.get(
|
||||
prisma.hosts.findUnique({
|
||||
where: { id: hostId },
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
host_packages: {
|
||||
@@ -558,22 +566,34 @@ router.get(
|
||||
packages_count: true,
|
||||
security_count: true,
|
||||
total_packages: true,
|
||||
host_id: true,
|
||||
status: true,
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
// Process data to show actual values (no averaging)
|
||||
// Enhanced data validation and processing
|
||||
const processedData = trendsData
|
||||
.filter((record) => record.total_packages !== null) // Only include records with valid data
|
||||
.filter((record) => {
|
||||
// Enhanced validation
|
||||
return (
|
||||
record.total_packages !== null &&
|
||||
record.total_packages >= 0 &&
|
||||
record.packages_count >= 0 &&
|
||||
record.security_count >= 0 &&
|
||||
record.security_count <= record.packages_count && // Security can't exceed outdated
|
||||
record.status === "success"
|
||||
); // Only include successful reports
|
||||
})
|
||||
.map((record) => {
|
||||
const date = new Date(record.timestamp);
|
||||
let timeKey;
|
||||
|
||||
if (daysInt <= 1) {
|
||||
// For hourly view, use exact timestamp
|
||||
timeKey = date.toISOString().substring(0, 16); // YYYY-MM-DDTHH:MM
|
||||
// For hourly view, group by hour only (not minutes)
|
||||
timeKey = date.toISOString().substring(0, 13); // YYYY-MM-DDTHH
|
||||
} else {
|
||||
// For daily view, group by day
|
||||
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
|
||||
@@ -584,64 +604,342 @@ router.get(
|
||||
total_packages: record.total_packages,
|
||||
packages_count: record.packages_count || 0,
|
||||
security_count: record.security_count || 0,
|
||||
host_id: record.host_id,
|
||||
timestamp: record.timestamp,
|
||||
};
|
||||
})
|
||||
.sort((a, b) => a.timeKey.localeCompare(b.timeKey)); // Sort by time
|
||||
});
|
||||
|
||||
// Get hosts list for dropdown (always fetch for dropdown functionality)
|
||||
// Determine if we need aggregation based on host filter
|
||||
const needsAggregation =
|
||||
!hostId || hostId === "all" || hostId === "undefined";
|
||||
|
||||
let aggregatedArray;
|
||||
|
||||
if (needsAggregation) {
|
||||
// For "All Hosts" mode, we need to calculate the actual total packages differently
|
||||
// Instead of aggregating historical data (which is per-host), we'll use the current total
|
||||
// and show that as a flat line, since total packages don't change much over time
|
||||
|
||||
// Get the current total packages count (unique packages across all hosts)
|
||||
const currentTotalPackages = await prisma.packages.count({
|
||||
where: {
|
||||
host_packages: {
|
||||
some: {}, // At least one host has this package
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Aggregate data by timeKey when looking at "All Hosts" or no specific host
|
||||
const aggregatedData = processedData.reduce((acc, item) => {
|
||||
if (!acc[item.timeKey]) {
|
||||
acc[item.timeKey] = {
|
||||
timeKey: item.timeKey,
|
||||
total_packages: currentTotalPackages, // Use current total packages
|
||||
packages_count: 0,
|
||||
security_count: 0,
|
||||
record_count: 0,
|
||||
host_ids: new Set(),
|
||||
min_timestamp: item.timestamp,
|
||||
max_timestamp: item.timestamp,
|
||||
};
|
||||
}
|
||||
|
||||
// For outdated and security packages: SUM (these represent counts across hosts)
|
||||
acc[item.timeKey].packages_count += item.packages_count;
|
||||
acc[item.timeKey].security_count += item.security_count;
|
||||
|
||||
acc[item.timeKey].record_count += 1;
|
||||
acc[item.timeKey].host_ids.add(item.host_id);
|
||||
|
||||
// Track timestamp range
|
||||
if (item.timestamp < acc[item.timeKey].min_timestamp) {
|
||||
acc[item.timeKey].min_timestamp = item.timestamp;
|
||||
}
|
||||
if (item.timestamp > acc[item.timeKey].max_timestamp) {
|
||||
acc[item.timeKey].max_timestamp = item.timestamp;
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
// Convert to array and add metadata
|
||||
aggregatedArray = Object.values(aggregatedData)
|
||||
.map((item) => ({
|
||||
...item,
|
||||
host_count: item.host_ids.size,
|
||||
host_ids: Array.from(item.host_ids),
|
||||
}))
|
||||
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
|
||||
} else {
|
||||
// For specific host, show individual data points without aggregation
|
||||
// But still group by timeKey to handle multiple reports from same host in same time period
|
||||
const hostAggregatedData = processedData.reduce((acc, item) => {
|
||||
if (!acc[item.timeKey]) {
|
||||
acc[item.timeKey] = {
|
||||
timeKey: item.timeKey,
|
||||
total_packages: 0,
|
||||
packages_count: 0,
|
||||
security_count: 0,
|
||||
record_count: 0,
|
||||
host_ids: new Set([item.host_id]),
|
||||
min_timestamp: item.timestamp,
|
||||
max_timestamp: item.timestamp,
|
||||
};
|
||||
}
|
||||
|
||||
// For same host, take the latest values (not sum)
|
||||
// This handles cases where a host reports multiple times in the same time period
|
||||
if (item.timestamp > acc[item.timeKey].max_timestamp) {
|
||||
acc[item.timeKey].total_packages = item.total_packages;
|
||||
acc[item.timeKey].packages_count = item.packages_count;
|
||||
acc[item.timeKey].security_count = item.security_count;
|
||||
acc[item.timeKey].max_timestamp = item.timestamp;
|
||||
}
|
||||
|
||||
acc[item.timeKey].record_count += 1;
|
||||
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
// Convert to array
|
||||
aggregatedArray = Object.values(hostAggregatedData)
|
||||
.map((item) => ({
|
||||
...item,
|
||||
host_count: item.host_ids.size,
|
||||
host_ids: Array.from(item.host_ids),
|
||||
}))
|
||||
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
|
||||
}
|
||||
|
||||
// Handle sparse data by filling missing time periods
|
||||
const fillMissingPeriods = (data, daysInt) => {
|
||||
const filledData = [];
|
||||
const startDate = new Date();
|
||||
startDate.setDate(startDate.getDate() - daysInt);
|
||||
|
||||
const dataMap = new Map(data.map((item) => [item.timeKey, item]));
|
||||
|
||||
const endDate = new Date();
|
||||
const currentDate = new Date(startDate);
|
||||
|
||||
// Find the last known values for interpolation
|
||||
let lastKnownValues = null;
|
||||
if (data.length > 0) {
|
||||
lastKnownValues = {
|
||||
total_packages: data[0].total_packages,
|
||||
packages_count: data[0].packages_count,
|
||||
security_count: data[0].security_count,
|
||||
};
|
||||
}
|
||||
|
||||
while (currentDate <= endDate) {
|
||||
let timeKey;
|
||||
if (daysInt <= 1) {
|
||||
timeKey = currentDate.toISOString().substring(0, 13); // Hourly
|
||||
currentDate.setHours(currentDate.getHours() + 1);
|
||||
} else {
|
||||
timeKey = currentDate.toISOString().split("T")[0]; // Daily
|
||||
currentDate.setDate(currentDate.getDate() + 1);
|
||||
}
|
||||
|
||||
if (dataMap.has(timeKey)) {
|
||||
const item = dataMap.get(timeKey);
|
||||
filledData.push(item);
|
||||
// Update last known values
|
||||
lastKnownValues = {
|
||||
total_packages: item.total_packages,
|
||||
packages_count: item.packages_count,
|
||||
security_count: item.security_count,
|
||||
};
|
||||
} else {
|
||||
// For missing periods, use the last known values (interpolation)
|
||||
// This creates a continuous line instead of gaps
|
||||
filledData.push({
|
||||
timeKey,
|
||||
total_packages: lastKnownValues?.total_packages || 0,
|
||||
packages_count: lastKnownValues?.packages_count || 0,
|
||||
security_count: lastKnownValues?.security_count || 0,
|
||||
record_count: 0,
|
||||
host_count: 0,
|
||||
host_ids: [],
|
||||
min_timestamp: null,
|
||||
max_timestamp: null,
|
||||
isInterpolated: true, // Mark as interpolated for debugging
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return filledData;
|
||||
};
|
||||
|
||||
const finalProcessedData = fillMissingPeriods(aggregatedArray, daysInt);
|
||||
|
||||
// Get hosts list for dropdown
|
||||
const hostsList = await prisma.hosts.findMany({
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
hostname: true,
|
||||
last_update: true,
|
||||
status: true,
|
||||
},
|
||||
orderBy: {
|
||||
friendly_name: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
// Get current package state for offline fallback
|
||||
let currentPackageState = null;
|
||||
if (hostId && hostId !== "all" && hostId !== "undefined") {
|
||||
// Get current package counts for specific host
|
||||
const currentState = await prisma.host_packages.aggregate({
|
||||
where: {
|
||||
host_id: hostId,
|
||||
},
|
||||
_count: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Get counts for boolean fields separately
|
||||
const outdatedCount = await prisma.host_packages.count({
|
||||
where: {
|
||||
host_id: hostId,
|
||||
needs_update: true,
|
||||
},
|
||||
});
|
||||
|
||||
const securityCount = await prisma.host_packages.count({
|
||||
where: {
|
||||
host_id: hostId,
|
||||
is_security_update: true,
|
||||
},
|
||||
});
|
||||
|
||||
currentPackageState = {
|
||||
total_packages: currentState._count.id,
|
||||
packages_count: outdatedCount,
|
||||
security_count: securityCount,
|
||||
};
|
||||
} else {
|
||||
// Get current package counts for all hosts
|
||||
// Total packages = count of unique packages installed on at least one host
|
||||
const totalPackagesCount = await prisma.packages.count({
|
||||
where: {
|
||||
host_packages: {
|
||||
some: {}, // At least one host has this package
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Get counts for boolean fields separately
|
||||
const outdatedCount = await prisma.host_packages.count({
|
||||
where: {
|
||||
needs_update: true,
|
||||
},
|
||||
});
|
||||
|
||||
const securityCount = await prisma.host_packages.count({
|
||||
where: {
|
||||
is_security_update: true,
|
||||
},
|
||||
});
|
||||
|
||||
currentPackageState = {
|
||||
total_packages: totalPackagesCount,
|
||||
packages_count: outdatedCount,
|
||||
security_count: securityCount,
|
||||
};
|
||||
}
|
||||
|
||||
// Format data for chart
|
||||
const chartData = {
|
||||
labels: [],
|
||||
datasets: [
|
||||
{
|
||||
label: "Total Packages",
|
||||
label: needsAggregation
|
||||
? "Total Packages (All Hosts)"
|
||||
: "Total Packages",
|
||||
data: [],
|
||||
borderColor: "#3B82F6", // Blue
|
||||
backgroundColor: "rgba(59, 130, 246, 0.1)",
|
||||
tension: 0.4,
|
||||
hidden: true, // Hidden by default
|
||||
spanGaps: true, // Connect lines across missing data
|
||||
pointRadius: 3,
|
||||
pointHoverRadius: 5,
|
||||
},
|
||||
{
|
||||
label: "Outdated Packages",
|
||||
label: needsAggregation
|
||||
? "Total Outdated Packages"
|
||||
: "Outdated Packages",
|
||||
data: [],
|
||||
borderColor: "#F59E0B", // Orange
|
||||
backgroundColor: "rgba(245, 158, 11, 0.1)",
|
||||
tension: 0.4,
|
||||
spanGaps: true, // Connect lines across missing data
|
||||
pointRadius: 3,
|
||||
pointHoverRadius: 5,
|
||||
},
|
||||
{
|
||||
label: "Security Packages",
|
||||
label: needsAggregation
|
||||
? "Total Security Packages"
|
||||
: "Security Packages",
|
||||
data: [],
|
||||
borderColor: "#EF4444", // Red
|
||||
backgroundColor: "rgba(239, 68, 68, 0.1)",
|
||||
tension: 0.4,
|
||||
spanGaps: true, // Connect lines across missing data
|
||||
pointRadius: 3,
|
||||
pointHoverRadius: 5,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Process aggregated data
|
||||
processedData.forEach((item) => {
|
||||
finalProcessedData.forEach((item) => {
|
||||
chartData.labels.push(item.timeKey);
|
||||
chartData.datasets[0].data.push(item.total_packages);
|
||||
chartData.datasets[1].data.push(item.packages_count);
|
||||
chartData.datasets[2].data.push(item.security_count);
|
||||
});
|
||||
|
||||
// Calculate data quality metrics
|
||||
const dataQuality = {
|
||||
totalRecords: trendsData.length,
|
||||
validRecords: processedData.length,
|
||||
aggregatedPoints: aggregatedArray.length,
|
||||
filledPoints: finalProcessedData.length,
|
||||
recordsWithNullTotal: trendsData.filter(
|
||||
(r) => r.total_packages === null,
|
||||
).length,
|
||||
recordsWithInvalidData: trendsData.length - processedData.length,
|
||||
successfulReports: trendsData.filter((r) => r.status === "success")
|
||||
.length,
|
||||
failedReports: trendsData.filter((r) => r.status === "error").length,
|
||||
};
|
||||
|
||||
res.json({
|
||||
chartData,
|
||||
hosts: hostsList,
|
||||
period: daysInt,
|
||||
hostId: hostId || "all",
|
||||
currentPackageState,
|
||||
dataQuality,
|
||||
aggregationInfo: {
|
||||
hasData: aggregatedArray.length > 0,
|
||||
hasGaps: finalProcessedData.some((item) => item.record_count === 0),
|
||||
lastDataPoint:
|
||||
aggregatedArray.length > 0
|
||||
? aggregatedArray[aggregatedArray.length - 1]
|
||||
: null,
|
||||
aggregationMode: needsAggregation
|
||||
? "sum_across_hosts"
|
||||
: "individual_host_data",
|
||||
explanation: needsAggregation
|
||||
? "Data is summed across all hosts for each time period"
|
||||
: "Data shows individual host values without cross-host aggregation",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching package trends:", error);
|
||||
@@ -650,4 +948,348 @@ router.get(
|
||||
},
|
||||
);
|
||||
|
||||
// Diagnostic endpoint to investigate package spikes
|
||||
router.get(
|
||||
"/package-spike-analysis",
|
||||
authenticateToken,
|
||||
requireViewHosts,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { date, time, hours = 2 } = req.query;
|
||||
|
||||
if (!date || !time) {
|
||||
return res.status(400).json({
|
||||
error:
|
||||
"Date and time parameters are required. Format: date=2025-10-17&time=18:00",
|
||||
});
|
||||
}
|
||||
|
||||
// Parse the specific date and time
|
||||
const targetDateTime = new Date(`${date}T${time}:00`);
|
||||
const startTime = new Date(targetDateTime);
|
||||
startTime.setHours(startTime.getHours() - parseInt(hours, 10));
|
||||
const endTime = new Date(targetDateTime);
|
||||
endTime.setHours(endTime.getHours() + parseInt(hours, 10));
|
||||
|
||||
console.log(
|
||||
`Analyzing package spike around ${targetDateTime.toISOString()}`,
|
||||
);
|
||||
console.log(
|
||||
`Time range: ${startTime.toISOString()} to ${endTime.toISOString()}`,
|
||||
);
|
||||
|
||||
// Get all update history records in the time window
|
||||
const spikeData = await prisma.update_history.findMany({
|
||||
where: {
|
||||
timestamp: {
|
||||
gte: startTime,
|
||||
lte: endTime,
|
||||
},
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
host_id: true,
|
||||
timestamp: true,
|
||||
packages_count: true,
|
||||
security_count: true,
|
||||
total_packages: true,
|
||||
status: true,
|
||||
error_message: true,
|
||||
execution_time: true,
|
||||
payload_size_kb: true,
|
||||
hosts: {
|
||||
select: {
|
||||
friendly_name: true,
|
||||
hostname: true,
|
||||
os_type: true,
|
||||
os_version: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
timestamp: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
// Analyze the data
|
||||
const analysis = {
|
||||
timeWindow: {
|
||||
start: startTime.toISOString(),
|
||||
end: endTime.toISOString(),
|
||||
target: targetDateTime.toISOString(),
|
||||
},
|
||||
totalRecords: spikeData.length,
|
||||
successfulReports: spikeData.filter((r) => r.status === "success")
|
||||
.length,
|
||||
failedReports: spikeData.filter((r) => r.status === "error").length,
|
||||
uniqueHosts: [...new Set(spikeData.map((r) => r.host_id))].length,
|
||||
hosts: {},
|
||||
timeline: [],
|
||||
summary: {
|
||||
maxPackagesCount: 0,
|
||||
maxSecurityCount: 0,
|
||||
maxTotalPackages: 0,
|
||||
avgPackagesCount: 0,
|
||||
avgSecurityCount: 0,
|
||||
avgTotalPackages: 0,
|
||||
},
|
||||
};
|
||||
|
||||
// Group by host and analyze each host's behavior
|
||||
spikeData.forEach((record) => {
|
||||
const hostId = record.host_id;
|
||||
if (!analysis.hosts[hostId]) {
|
||||
analysis.hosts[hostId] = {
|
||||
hostInfo: record.hosts,
|
||||
records: [],
|
||||
summary: {
|
||||
totalReports: 0,
|
||||
successfulReports: 0,
|
||||
failedReports: 0,
|
||||
maxPackagesCount: 0,
|
||||
maxSecurityCount: 0,
|
||||
maxTotalPackages: 0,
|
||||
avgPackagesCount: 0,
|
||||
avgSecurityCount: 0,
|
||||
avgTotalPackages: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
analysis.hosts[hostId].records.push({
|
||||
timestamp: record.timestamp,
|
||||
packages_count: record.packages_count,
|
||||
security_count: record.security_count,
|
||||
total_packages: record.total_packages,
|
||||
status: record.status,
|
||||
error_message: record.error_message,
|
||||
execution_time: record.execution_time,
|
||||
payload_size_kb: record.payload_size_kb,
|
||||
});
|
||||
|
||||
analysis.hosts[hostId].summary.totalReports++;
|
||||
if (record.status === "success") {
|
||||
analysis.hosts[hostId].summary.successfulReports++;
|
||||
analysis.hosts[hostId].summary.maxPackagesCount = Math.max(
|
||||
analysis.hosts[hostId].summary.maxPackagesCount,
|
||||
record.packages_count,
|
||||
);
|
||||
analysis.hosts[hostId].summary.maxSecurityCount = Math.max(
|
||||
analysis.hosts[hostId].summary.maxSecurityCount,
|
||||
record.security_count,
|
||||
);
|
||||
analysis.hosts[hostId].summary.maxTotalPackages = Math.max(
|
||||
analysis.hosts[hostId].summary.maxTotalPackages,
|
||||
record.total_packages || 0,
|
||||
);
|
||||
} else {
|
||||
analysis.hosts[hostId].summary.failedReports++;
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate averages for each host
|
||||
Object.keys(analysis.hosts).forEach((hostId) => {
|
||||
const host = analysis.hosts[hostId];
|
||||
const successfulRecords = host.records.filter(
|
||||
(r) => r.status === "success",
|
||||
);
|
||||
|
||||
if (successfulRecords.length > 0) {
|
||||
host.summary.avgPackagesCount = Math.round(
|
||||
successfulRecords.reduce((sum, r) => sum + r.packages_count, 0) /
|
||||
successfulRecords.length,
|
||||
);
|
||||
host.summary.avgSecurityCount = Math.round(
|
||||
successfulRecords.reduce((sum, r) => sum + r.security_count, 0) /
|
||||
successfulRecords.length,
|
||||
);
|
||||
host.summary.avgTotalPackages = Math.round(
|
||||
successfulRecords.reduce(
|
||||
(sum, r) => sum + (r.total_packages || 0),
|
||||
0,
|
||||
) / successfulRecords.length,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Create timeline with hourly/daily aggregation
|
||||
const timelineMap = new Map();
|
||||
spikeData.forEach((record) => {
|
||||
const timeKey = record.timestamp.toISOString().substring(0, 13); // Hourly
|
||||
if (!timelineMap.has(timeKey)) {
|
||||
timelineMap.set(timeKey, {
|
||||
timestamp: timeKey,
|
||||
totalReports: 0,
|
||||
successfulReports: 0,
|
||||
failedReports: 0,
|
||||
totalPackagesCount: 0,
|
||||
totalSecurityCount: 0,
|
||||
totalTotalPackages: 0,
|
||||
uniqueHosts: new Set(),
|
||||
});
|
||||
}
|
||||
|
||||
const timelineEntry = timelineMap.get(timeKey);
|
||||
timelineEntry.totalReports++;
|
||||
timelineEntry.uniqueHosts.add(record.host_id);
|
||||
|
||||
if (record.status === "success") {
|
||||
timelineEntry.successfulReports++;
|
||||
timelineEntry.totalPackagesCount += record.packages_count;
|
||||
timelineEntry.totalSecurityCount += record.security_count;
|
||||
timelineEntry.totalTotalPackages += record.total_packages || 0;
|
||||
} else {
|
||||
timelineEntry.failedReports++;
|
||||
}
|
||||
});
|
||||
|
||||
// Convert timeline map to array
|
||||
analysis.timeline = Array.from(timelineMap.values())
|
||||
.map((entry) => ({
|
||||
...entry,
|
||||
uniqueHosts: entry.uniqueHosts.size,
|
||||
}))
|
||||
.sort((a, b) => a.timestamp.localeCompare(b.timestamp));
|
||||
|
||||
// Calculate overall summary
|
||||
const successfulRecords = spikeData.filter((r) => r.status === "success");
|
||||
if (successfulRecords.length > 0) {
|
||||
analysis.summary.maxPackagesCount = Math.max(
|
||||
...successfulRecords.map((r) => r.packages_count),
|
||||
);
|
||||
analysis.summary.maxSecurityCount = Math.max(
|
||||
...successfulRecords.map((r) => r.security_count),
|
||||
);
|
||||
analysis.summary.maxTotalPackages = Math.max(
|
||||
...successfulRecords.map((r) => r.total_packages || 0),
|
||||
);
|
||||
analysis.summary.avgPackagesCount = Math.round(
|
||||
successfulRecords.reduce((sum, r) => sum + r.packages_count, 0) /
|
||||
successfulRecords.length,
|
||||
);
|
||||
analysis.summary.avgSecurityCount = Math.round(
|
||||
successfulRecords.reduce((sum, r) => sum + r.security_count, 0) /
|
||||
successfulRecords.length,
|
||||
);
|
||||
analysis.summary.avgTotalPackages = Math.round(
|
||||
successfulRecords.reduce(
|
||||
(sum, r) => sum + (r.total_packages || 0),
|
||||
0,
|
||||
) / successfulRecords.length,
|
||||
);
|
||||
}
|
||||
|
||||
// Identify potential causes of the spike
|
||||
const potentialCauses = [];
|
||||
|
||||
// Check for hosts with unusually high package counts
|
||||
Object.keys(analysis.hosts).forEach((hostId) => {
|
||||
const host = analysis.hosts[hostId];
|
||||
if (
|
||||
host.summary.maxPackagesCount >
|
||||
analysis.summary.avgPackagesCount * 2
|
||||
) {
|
||||
potentialCauses.push({
|
||||
type: "high_package_count",
|
||||
hostId,
|
||||
hostName: host.hostInfo.friendly_name || host.hostInfo.hostname,
|
||||
value: host.summary.maxPackagesCount,
|
||||
avg: analysis.summary.avgPackagesCount,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Check for multiple hosts reporting at the same time (this explains the 500 vs 59 discrepancy)
|
||||
const concurrentReports = analysis.timeline.filter(
|
||||
(entry) => entry.uniqueHosts > 1,
|
||||
);
|
||||
if (concurrentReports.length > 0) {
|
||||
potentialCauses.push({
|
||||
type: "concurrent_reports",
|
||||
description:
|
||||
"Multiple hosts reported simultaneously - this explains why chart shows higher numbers than individual host reports",
|
||||
count: concurrentReports.length,
|
||||
details: concurrentReports.map((entry) => ({
|
||||
timestamp: entry.timestamp,
|
||||
totalPackagesCount: entry.totalPackagesCount,
|
||||
uniqueHosts: entry.uniqueHosts,
|
||||
avgPerHost: Math.round(
|
||||
entry.totalPackagesCount / entry.uniqueHosts,
|
||||
),
|
||||
})),
|
||||
explanation:
|
||||
"The chart sums package counts across all hosts. If multiple hosts report at the same time, the chart shows the total sum, not individual host counts.",
|
||||
});
|
||||
}
|
||||
|
||||
// Check for failed reports that might indicate system issues
|
||||
if (analysis.failedReports > 0) {
|
||||
potentialCauses.push({
|
||||
type: "failed_reports",
|
||||
count: analysis.failedReports,
|
||||
percentage: Math.round(
|
||||
(analysis.failedReports / analysis.totalRecords) * 100,
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
// Add aggregation explanation
|
||||
const aggregationExplanation = {
|
||||
type: "aggregation_explanation",
|
||||
description: "Chart Aggregation Logic",
|
||||
details: {
|
||||
howItWorks:
|
||||
"The package trends chart sums package counts across all hosts for each time period",
|
||||
individualHosts:
|
||||
"Each host reports its own package count (e.g., 59 packages)",
|
||||
chartDisplay:
|
||||
"Chart shows the sum of all hosts' package counts (e.g., 59 + other hosts = 500)",
|
||||
timeGrouping:
|
||||
"Multiple hosts reporting in the same hour/day are aggregated together",
|
||||
},
|
||||
example: {
|
||||
host1: "Host A reports 59 outdated packages",
|
||||
host2: "Host B reports 120 outdated packages",
|
||||
host3: "Host C reports 321 outdated packages",
|
||||
chartShows: "Chart displays 500 total packages (59+120+321)",
|
||||
},
|
||||
};
|
||||
potentialCauses.push(aggregationExplanation);
|
||||
|
||||
// Add specific host breakdown if a host ID is provided
|
||||
let specificHostAnalysis = null;
|
||||
if (req.query.hostId) {
|
||||
const hostId = req.query.hostId;
|
||||
const hostData = analysis.hosts[hostId];
|
||||
if (hostData) {
|
||||
specificHostAnalysis = {
|
||||
hostId,
|
||||
hostInfo: hostData.hostInfo,
|
||||
summary: hostData.summary,
|
||||
records: hostData.records,
|
||||
explanation: `This host reported ${hostData.summary.maxPackagesCount} outdated packages, but the chart shows ${analysis.summary.maxPackagesCount} because it sums across all hosts that reported at the same time.`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
analysis,
|
||||
potentialCauses,
|
||||
specificHostAnalysis,
|
||||
recommendations: [
|
||||
"Check if any hosts had major package updates around this time",
|
||||
"Verify if any new hosts were added to the system",
|
||||
"Check for system maintenance or updates that might have triggered package checks",
|
||||
"Review any automation or scheduled tasks that run around 6pm",
|
||||
"Check if any repositories were updated or new packages were released",
|
||||
"Remember: Chart shows SUM of all hosts' package counts, not individual host counts",
|
||||
],
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error analyzing package spike:", error);
|
||||
res.status(500).json({ error: "Failed to analyze package spike" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -15,7 +15,7 @@ router.get("/", authenticateToken, async (_req, res) => {
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
hosts: true,
|
||||
host_group_memberships: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -39,16 +39,20 @@ router.get("/:id", authenticateToken, async (req, res) => {
|
||||
const hostGroup = await prisma.host_groups.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
hosts: {
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
hostname: true,
|
||||
ip: true,
|
||||
os_type: true,
|
||||
os_version: true,
|
||||
status: true,
|
||||
last_update: true,
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
hosts: {
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
hostname: true,
|
||||
ip: true,
|
||||
os_type: true,
|
||||
os_version: true,
|
||||
status: true,
|
||||
last_update: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -195,7 +199,7 @@ router.delete(
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
hosts: true,
|
||||
host_group_memberships: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -205,11 +209,10 @@ router.delete(
|
||||
return res.status(404).json({ error: "Host group not found" });
|
||||
}
|
||||
|
||||
// If host group has hosts, ungroup them first
|
||||
if (existingGroup._count.hosts > 0) {
|
||||
await prisma.hosts.updateMany({
|
||||
// If host group has memberships, remove them first
|
||||
if (existingGroup._count.host_group_memberships > 0) {
|
||||
await prisma.host_group_memberships.deleteMany({
|
||||
where: { host_group_id: id },
|
||||
data: { host_group_id: null },
|
||||
});
|
||||
}
|
||||
|
||||
@@ -231,7 +234,13 @@ router.get("/:id/hosts", authenticateToken, async (req, res) => {
|
||||
const { id } = req.params;
|
||||
|
||||
const hosts = await prisma.hosts.findMany({
|
||||
where: { host_group_id: id },
|
||||
where: {
|
||||
host_group_memberships: {
|
||||
some: {
|
||||
host_group_id: id,
|
||||
},
|
||||
},
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
|
||||
@@ -14,7 +14,7 @@ const {
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Secure endpoint to download the agent script (requires API authentication)
|
||||
// Secure endpoint to download the agent binary (requires API authentication)
|
||||
router.get("/agent/download", async (req, res) => {
|
||||
try {
|
||||
// Verify API credentials
|
||||
@@ -34,46 +34,50 @@ router.get("/agent/download", async (req, res) => {
|
||||
return res.status(401).json({ error: "Invalid API credentials" });
|
||||
}
|
||||
|
||||
// Serve agent script directly from file system
|
||||
// Get architecture parameter (default to amd64)
|
||||
const architecture = req.query.arch || "amd64";
|
||||
|
||||
// Validate architecture
|
||||
const validArchitectures = ["amd64", "386", "arm64"];
|
||||
if (!validArchitectures.includes(architecture)) {
|
||||
return res.status(400).json({
|
||||
error: "Invalid architecture. Must be one of: amd64, 386, arm64",
|
||||
});
|
||||
}
|
||||
|
||||
// Serve agent binary directly from file system
|
||||
const fs = require("node:fs");
|
||||
const path = require("node:path");
|
||||
|
||||
const agentPath = path.join(__dirname, "../../../agents/patchmon-agent.sh");
|
||||
const binaryName = `patchmon-agent-linux-${architecture}`;
|
||||
const binaryPath = path.join(__dirname, "../../../agents", binaryName);
|
||||
|
||||
if (!fs.existsSync(agentPath)) {
|
||||
return res.status(404).json({ error: "Agent script not found" });
|
||||
if (!fs.existsSync(binaryPath)) {
|
||||
return res.status(404).json({
|
||||
error: `Agent binary not found for architecture: ${architecture}`,
|
||||
});
|
||||
}
|
||||
|
||||
// Read file and convert line endings
|
||||
let scriptContent = fs
|
||||
.readFileSync(agentPath, "utf8")
|
||||
.replace(/\r\n/g, "\n")
|
||||
.replace(/\r/g, "\n");
|
||||
|
||||
// Determine curl flags dynamically from settings for consistency
|
||||
let curlFlags = "-s";
|
||||
try {
|
||||
const settings = await prisma.settings.findFirst();
|
||||
if (settings && settings.ignore_ssl_self_signed === true) {
|
||||
curlFlags = "-sk";
|
||||
}
|
||||
} catch (_) {}
|
||||
|
||||
// Inject the curl flags into the script
|
||||
scriptContent = scriptContent.replace(
|
||||
'CURL_FLAGS=""',
|
||||
`CURL_FLAGS="${curlFlags}"`,
|
||||
);
|
||||
|
||||
res.setHeader("Content-Type", "application/x-shellscript");
|
||||
// Set appropriate headers for binary download
|
||||
res.setHeader("Content-Type", "application/octet-stream");
|
||||
res.setHeader(
|
||||
"Content-Disposition",
|
||||
'attachment; filename="patchmon-agent.sh"',
|
||||
`attachment; filename="${binaryName}"`,
|
||||
);
|
||||
res.send(scriptContent);
|
||||
|
||||
// Stream the binary file
|
||||
const fileStream = fs.createReadStream(binaryPath);
|
||||
fileStream.pipe(res);
|
||||
|
||||
fileStream.on("error", (error) => {
|
||||
console.error("Binary stream error:", error);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ error: "Failed to stream agent binary" });
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Agent download error:", error);
|
||||
res.status(500).json({ error: "Failed to download agent script" });
|
||||
res.status(500).json({ error: "Failed to serve agent binary" });
|
||||
}
|
||||
});
|
||||
|
||||
@@ -158,7 +162,14 @@ router.post(
|
||||
body("friendly_name")
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("Friendly name is required"),
|
||||
body("hostGroupId").optional(),
|
||||
body("hostGroupIds")
|
||||
.optional()
|
||||
.isArray()
|
||||
.withMessage("Host group IDs must be an array"),
|
||||
body("hostGroupIds.*")
|
||||
.optional()
|
||||
.isUUID()
|
||||
.withMessage("Each host group ID must be a valid UUID"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
@@ -167,19 +178,21 @@ router.post(
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { friendly_name, hostGroupId } = req.body;
|
||||
const { friendly_name, hostGroupIds } = req.body;
|
||||
|
||||
// Generate unique API credentials for this host
|
||||
const { apiId, apiKey } = generateApiCredentials();
|
||||
|
||||
// If hostGroupId is provided, verify the group exists
|
||||
if (hostGroupId) {
|
||||
const hostGroup = await prisma.host_groups.findUnique({
|
||||
where: { id: hostGroupId },
|
||||
// If hostGroupIds is provided, verify all groups exist
|
||||
if (hostGroupIds && hostGroupIds.length > 0) {
|
||||
const hostGroups = await prisma.host_groups.findMany({
|
||||
where: { id: { in: hostGroupIds } },
|
||||
});
|
||||
|
||||
if (!hostGroup) {
|
||||
return res.status(400).json({ error: "Host group not found" });
|
||||
if (hostGroups.length !== hostGroupIds.length) {
|
||||
return res
|
||||
.status(400)
|
||||
.json({ error: "One or more host groups not found" });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,16 +208,31 @@ router.post(
|
||||
architecture: null, // Will be updated when agent connects
|
||||
api_id: apiId,
|
||||
api_key: apiKey,
|
||||
host_group_id: hostGroupId || null,
|
||||
status: "pending", // Will change to 'active' when agent connects
|
||||
updated_at: new Date(),
|
||||
// Create host group memberships if hostGroupIds are provided
|
||||
host_group_memberships:
|
||||
hostGroupIds && hostGroupIds.length > 0
|
||||
? {
|
||||
create: hostGroupIds.map((groupId) => ({
|
||||
id: uuidv4(),
|
||||
host_groups: {
|
||||
connect: { id: groupId },
|
||||
},
|
||||
})),
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -216,7 +244,10 @@ router.post(
|
||||
friendlyName: host.friendly_name,
|
||||
apiId: host.api_id,
|
||||
apiKey: host.api_key,
|
||||
hostGroup: host.host_groups,
|
||||
hostGroups:
|
||||
host.host_group_memberships?.map(
|
||||
(membership) => membership.host_groups,
|
||||
) || [],
|
||||
instructions:
|
||||
"Use these credentials in your patchmon agent configuration. System information will be automatically detected when the agent connects.",
|
||||
});
|
||||
@@ -732,18 +763,96 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Admin endpoint to bulk update host groups
|
||||
// TODO: Admin endpoint to bulk update host groups - needs to be rewritten for many-to-many relationship
|
||||
// router.put(
|
||||
// "/bulk/group",
|
||||
// authenticateToken,
|
||||
// requireManageHosts,
|
||||
// [
|
||||
// body("hostIds").isArray().withMessage("Host IDs must be an array"),
|
||||
// body("hostIds.*")
|
||||
// .isLength({ min: 1 })
|
||||
// .withMessage("Each host ID must be provided"),
|
||||
// body("hostGroupId").optional(),
|
||||
// ],
|
||||
// async (req, res) => {
|
||||
// try {
|
||||
// const errors = validationResult(req);
|
||||
// if (!errors.isEmpty()) {
|
||||
// return res.status(400).json({ errors: errors.array() });
|
||||
// }
|
||||
|
||||
// const { hostIds, hostGroupId } = req.body;
|
||||
|
||||
// // If hostGroupId is provided, verify the group exists
|
||||
// if (hostGroupId) {
|
||||
// const hostGroup = await prisma.host_groups.findUnique({
|
||||
// where: { id: hostGroupId },
|
||||
// });
|
||||
|
||||
// if (!hostGroup) {
|
||||
// return res.status(400).json({ error: "Host group not found" });
|
||||
// }
|
||||
// }
|
||||
|
||||
// // Check if all hosts exist
|
||||
// const existingHosts = await prisma.hosts.findMany({
|
||||
// where: { id: { in: hostIds } },
|
||||
// select: { id: true, friendly_name: true },
|
||||
// });
|
||||
|
||||
// if (existingHosts.length !== hostIds.length) {
|
||||
// const foundIds = existingHosts.map((h) => h.id);
|
||||
// const missingIds = hostIds.filter((id) => !foundIds.includes(id));
|
||||
// return res.status(400).json({
|
||||
// error: "Some hosts not found",
|
||||
// missingHostIds: missingIds,
|
||||
// });
|
||||
// }
|
||||
|
||||
// // Bulk update host groups
|
||||
// const updateResult = await prisma.hosts.updateMany({
|
||||
// where: { id: { in: hostIds } },
|
||||
// data: {
|
||||
// host_group_id: hostGroupId || null,
|
||||
// updated_at: new Date(),
|
||||
// },
|
||||
// });
|
||||
|
||||
// // Get updated hosts with group information
|
||||
// const updatedHosts = await prisma.hosts.findMany({
|
||||
// where: { id: { in: hostIds } },
|
||||
// select: {
|
||||
// id: true,
|
||||
// friendly_name: true,
|
||||
// host_groups: {
|
||||
// select: {
|
||||
// id: true,
|
||||
// name: true,
|
||||
// color: true,
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// });
|
||||
|
||||
// res.json({
|
||||
// message: `Successfully updated ${updateResult.count} host${updateResult.count !== 1 ? "s" : ""}`,
|
||||
// updatedCount: updateResult.count,
|
||||
// hosts: updatedHosts,
|
||||
// });
|
||||
// } catch (error) {
|
||||
// console.error("Bulk host group update error:", error);
|
||||
// res.status(500).json({ error: "Failed to update host groups" });
|
||||
// }
|
||||
// },
|
||||
// );
|
||||
|
||||
// Admin endpoint to update host groups (many-to-many)
|
||||
router.put(
|
||||
"/bulk/group",
|
||||
"/:hostId/groups",
|
||||
authenticateToken,
|
||||
requireManageHosts,
|
||||
[
|
||||
body("hostIds").isArray().withMessage("Host IDs must be an array"),
|
||||
body("hostIds.*")
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("Each host ID must be provided"),
|
||||
body("hostGroupId").optional(),
|
||||
],
|
||||
[body("groupIds").isArray().optional()],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
@@ -751,72 +860,83 @@ router.put(
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { hostIds, hostGroupId } = req.body;
|
||||
const { hostId } = req.params;
|
||||
const { groupIds = [] } = req.body;
|
||||
|
||||
// If hostGroupId is provided, verify the group exists
|
||||
if (hostGroupId) {
|
||||
const hostGroup = await prisma.host_groups.findUnique({
|
||||
where: { id: hostGroupId },
|
||||
// Check if host exists
|
||||
const host = await prisma.hosts.findUnique({
|
||||
where: { id: hostId },
|
||||
});
|
||||
|
||||
if (!host) {
|
||||
return res.status(404).json({ error: "Host not found" });
|
||||
}
|
||||
|
||||
// Verify all groups exist
|
||||
if (groupIds.length > 0) {
|
||||
const existingGroups = await prisma.host_groups.findMany({
|
||||
where: { id: { in: groupIds } },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!hostGroup) {
|
||||
return res.status(400).json({ error: "Host group not found" });
|
||||
if (existingGroups.length !== groupIds.length) {
|
||||
return res.status(400).json({
|
||||
error: "One or more host groups not found",
|
||||
provided: groupIds,
|
||||
found: existingGroups.map((g) => g.id),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check if all hosts exist
|
||||
const existingHosts = await prisma.hosts.findMany({
|
||||
where: { id: { in: hostIds } },
|
||||
select: { id: true, friendly_name: true },
|
||||
});
|
||||
|
||||
if (existingHosts.length !== hostIds.length) {
|
||||
const foundIds = existingHosts.map((h) => h.id);
|
||||
const missingIds = hostIds.filter((id) => !foundIds.includes(id));
|
||||
return res.status(400).json({
|
||||
error: "Some hosts not found",
|
||||
missingHostIds: missingIds,
|
||||
// Use transaction to update group memberships
|
||||
const updatedHost = await prisma.$transaction(async (tx) => {
|
||||
// Remove existing memberships
|
||||
await tx.host_group_memberships.deleteMany({
|
||||
where: { host_id: hostId },
|
||||
});
|
||||
}
|
||||
|
||||
// Bulk update host groups
|
||||
const updateResult = await prisma.hosts.updateMany({
|
||||
where: { id: { in: hostIds } },
|
||||
data: {
|
||||
host_group_id: hostGroupId || null,
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
// Add new memberships
|
||||
if (groupIds.length > 0) {
|
||||
await tx.host_group_memberships.createMany({
|
||||
data: groupIds.map((groupId) => ({
|
||||
id: crypto.randomUUID(),
|
||||
host_id: hostId,
|
||||
host_group_id: groupId,
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
// Get updated hosts with group information
|
||||
const updatedHosts = await prisma.hosts.findMany({
|
||||
where: { id: { in: hostIds } },
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
// Return updated host with groups
|
||||
return await tx.hosts.findUnique({
|
||||
where: { id: hostId },
|
||||
include: {
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: `Successfully updated ${updateResult.count} host${updateResult.count !== 1 ? "s" : ""}`,
|
||||
updatedCount: updateResult.count,
|
||||
hosts: updatedHosts,
|
||||
message: "Host groups updated successfully",
|
||||
host: updatedHost,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Bulk host group update error:", error);
|
||||
console.error("Host groups update error:", error);
|
||||
res.status(500).json({ error: "Failed to update host groups" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Admin endpoint to update host group
|
||||
// Legacy endpoint to update single host group (for backward compatibility)
|
||||
router.put(
|
||||
"/:hostId/group",
|
||||
authenticateToken,
|
||||
@@ -832,6 +952,9 @@ router.put(
|
||||
const { hostId } = req.params;
|
||||
const { hostGroupId } = req.body;
|
||||
|
||||
// Convert single group to array and use the new endpoint logic
|
||||
const _groupIds = hostGroupId ? [hostGroupId] : [];
|
||||
|
||||
// Check if host exists
|
||||
const host = await prisma.hosts.findUnique({
|
||||
where: { id: hostId },
|
||||
@@ -841,7 +964,7 @@ router.put(
|
||||
return res.status(404).json({ error: "Host not found" });
|
||||
}
|
||||
|
||||
// If hostGroupId is provided, verify the group exists
|
||||
// Verify group exists if provided
|
||||
if (hostGroupId) {
|
||||
const hostGroup = await prisma.host_groups.findUnique({
|
||||
where: { id: hostGroupId },
|
||||
@@ -852,22 +975,41 @@ router.put(
|
||||
}
|
||||
}
|
||||
|
||||
// Update host group
|
||||
const updatedHost = await prisma.hosts.update({
|
||||
where: { id: hostId },
|
||||
data: {
|
||||
host_group_id: hostGroupId || null,
|
||||
updated_at: new Date(),
|
||||
},
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
// Use transaction to update group memberships
|
||||
const updatedHost = await prisma.$transaction(async (tx) => {
|
||||
// Remove existing memberships
|
||||
await tx.host_group_memberships.deleteMany({
|
||||
where: { host_id: hostId },
|
||||
});
|
||||
|
||||
// Add new membership if group provided
|
||||
if (hostGroupId) {
|
||||
await tx.host_group_memberships.create({
|
||||
data: {
|
||||
id: crypto.randomUUID(),
|
||||
host_id: hostId,
|
||||
host_group_id: hostGroupId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Return updated host with groups
|
||||
return await tx.hosts.findUnique({
|
||||
where: { id: hostId },
|
||||
include: {
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
res.json({
|
||||
@@ -903,13 +1045,16 @@ router.get(
|
||||
agent_version: true,
|
||||
auto_update: true,
|
||||
created_at: true,
|
||||
host_group_id: true,
|
||||
notes: true,
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1175,13 +1320,17 @@ router.get("/install", async (req, res) => {
|
||||
// Check for --force parameter
|
||||
const forceInstall = req.query.force === "true" || req.query.force === "1";
|
||||
|
||||
// Inject the API credentials, server URL, curl flags, and force flag into the script
|
||||
// Get architecture parameter (default to amd64)
|
||||
const architecture = req.query.arch || "amd64";
|
||||
|
||||
// Inject the API credentials, server URL, curl flags, force flag, and architecture into the script
|
||||
const envVars = `#!/bin/bash
|
||||
export PATCHMON_URL="${serverUrl}"
|
||||
export API_ID="${host.api_id}"
|
||||
export API_KEY="${host.api_key}"
|
||||
export CURL_FLAGS="${curlFlags}"
|
||||
export FORCE_INSTALL="${forceInstall ? "true" : "false"}"
|
||||
export ARCHITECTURE="${architecture}"
|
||||
|
||||
`;
|
||||
|
||||
@@ -1558,16 +1707,16 @@ router.patch(
|
||||
architecture: true,
|
||||
last_update: true,
|
||||
status: true,
|
||||
host_group_id: true,
|
||||
agent_version: true,
|
||||
auto_update: true,
|
||||
created_at: true,
|
||||
updated_at: true,
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1631,17 +1780,16 @@ router.patch(
|
||||
architecture: true,
|
||||
last_update: true,
|
||||
status: true,
|
||||
host_group_id: true,
|
||||
agent_version: true,
|
||||
auto_update: true,
|
||||
created_at: true,
|
||||
updated_at: true,
|
||||
notes: true,
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -8,8 +8,8 @@ const { getSettings, updateSettings } = require("../services/settingsService");
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// WebSocket broadcaster for agent policy updates
|
||||
const { broadcastSettingsUpdate } = require("../services/agentWs");
|
||||
// WebSocket broadcaster for agent policy updates (no longer used - queue-based delivery preferred)
|
||||
// const { broadcastSettingsUpdate } = require("../services/agentWs");
|
||||
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||
|
||||
// Helpers
|
||||
@@ -225,9 +225,8 @@ router.put(
|
||||
// Bulk add jobs
|
||||
await queue.addBulk(jobs);
|
||||
|
||||
// Also broadcast immediately to currently connected agents (best-effort)
|
||||
// This ensures agents receive the change even if their host status isn't active yet
|
||||
broadcastSettingsUpdate(updateData.update_interval);
|
||||
// Note: Queue-based delivery handles retries and ensures reliable delivery
|
||||
// No need for immediate broadcast as it would cause duplicate messages
|
||||
}
|
||||
|
||||
res.json({
|
||||
|
||||
@@ -4,6 +4,10 @@ const {
|
||||
getConnectionInfo,
|
||||
subscribeToConnectionChanges,
|
||||
} = require("../services/agentWs");
|
||||
const {
|
||||
validate_session,
|
||||
update_session_activity,
|
||||
} = require("../utils/session_manager");
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -41,12 +45,25 @@ router.get("/status/:apiId/stream", async (req, res) => {
|
||||
return res.status(401).json({ error: "Authentication required" });
|
||||
}
|
||||
|
||||
// Verify token manually
|
||||
// Verify token manually with session validation
|
||||
const jwt = require("jsonwebtoken");
|
||||
try {
|
||||
const decoded = jwt.verify(token, process.env.JWT_SECRET);
|
||||
req.user = decoded;
|
||||
} catch (_err) {
|
||||
|
||||
// Validate session (same as regular auth middleware)
|
||||
const validation = await validate_session(decoded.sessionId, token);
|
||||
if (!validation.valid) {
|
||||
console.error("[SSE] Session validation failed:", validation.reason);
|
||||
console.error("[SSE] Invalid session for api_id:", apiId);
|
||||
return res.status(401).json({ error: "Invalid or expired session" });
|
||||
}
|
||||
|
||||
// Update session activity to prevent inactivity timeout
|
||||
await update_session_activity(decoded.sessionId);
|
||||
|
||||
req.user = validation.user;
|
||||
} catch (err) {
|
||||
console.error("[SSE] JWT verification failed:", err.message);
|
||||
console.error("[SSE] Invalid token for api_id:", apiId);
|
||||
return res.status(401).json({ error: "Invalid or expired token" });
|
||||
}
|
||||
@@ -95,9 +112,23 @@ router.get("/status/:apiId/stream", async (req, res) => {
|
||||
unsubscribe();
|
||||
});
|
||||
|
||||
// Handle errors
|
||||
// Handle errors - distinguish between different error types
|
||||
req.on("error", (err) => {
|
||||
console.error("[SSE] Request error:", err);
|
||||
// Only log non-connection-reset errors to reduce noise
|
||||
if (err.code !== "ECONNRESET" && err.code !== "EPIPE") {
|
||||
console.error("[SSE] Request error:", err);
|
||||
} else {
|
||||
console.log("[SSE] Client connection reset for api_id:", apiId);
|
||||
}
|
||||
clearInterval(heartbeat);
|
||||
unsubscribe();
|
||||
});
|
||||
|
||||
// Handle response errors
|
||||
res.on("error", (err) => {
|
||||
if (err.code !== "ECONNRESET" && err.code !== "EPIPE") {
|
||||
console.error("[SSE] Response error:", err);
|
||||
}
|
||||
clearInterval(heartbeat);
|
||||
unsubscribe();
|
||||
});
|
||||
|
||||
@@ -7,12 +7,14 @@ const agentWs = require("../agentWs");
|
||||
const GitHubUpdateCheck = require("./githubUpdateCheck");
|
||||
const SessionCleanup = require("./sessionCleanup");
|
||||
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
|
||||
const OrphanedPackageCleanup = require("./orphanedPackageCleanup");
|
||||
|
||||
// Queue names
|
||||
const QUEUE_NAMES = {
|
||||
GITHUB_UPDATE_CHECK: "github-update-check",
|
||||
SESSION_CLEANUP: "session-cleanup",
|
||||
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
|
||||
ORPHANED_PACKAGE_CLEANUP: "orphaned-package-cleanup",
|
||||
AGENT_COMMANDS: "agent-commands",
|
||||
};
|
||||
|
||||
@@ -87,6 +89,8 @@ class QueueManager {
|
||||
this.automations[QUEUE_NAMES.SESSION_CLEANUP] = new SessionCleanup(this);
|
||||
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] =
|
||||
new OrphanedRepoCleanup(this);
|
||||
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] =
|
||||
new OrphanedPackageCleanup(this);
|
||||
|
||||
console.log("✅ All automation classes initialized");
|
||||
}
|
||||
@@ -131,6 +135,18 @@ class QueueManager {
|
||||
},
|
||||
);
|
||||
|
||||
// Orphaned Package Cleanup Worker
|
||||
this.workers[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] = new Worker(
|
||||
QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP,
|
||||
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].process.bind(
|
||||
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||
),
|
||||
{
|
||||
connection: redisConnection,
|
||||
concurrency: 1,
|
||||
},
|
||||
);
|
||||
|
||||
// Agent Commands Worker
|
||||
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
|
||||
QUEUE_NAMES.AGENT_COMMANDS,
|
||||
@@ -317,6 +333,7 @@ class QueueManager {
|
||||
await this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].schedule();
|
||||
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
|
||||
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
|
||||
await this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].schedule();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -334,6 +351,12 @@ class QueueManager {
|
||||
return this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].triggerManual();
|
||||
}
|
||||
|
||||
async triggerOrphanedPackageCleanup() {
|
||||
return this.automations[
|
||||
QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP
|
||||
].triggerManual();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue statistics
|
||||
*/
|
||||
|
||||
116
backend/src/services/automation/orphanedPackageCleanup.js
Normal file
116
backend/src/services/automation/orphanedPackageCleanup.js
Normal file
@@ -0,0 +1,116 @@
|
||||
const { prisma } = require("./shared/prisma");
|
||||
|
||||
/**
|
||||
* Orphaned Package Cleanup Automation
|
||||
* Removes packages with no associated hosts
|
||||
*/
|
||||
class OrphanedPackageCleanup {
|
||||
constructor(queueManager) {
|
||||
this.queueManager = queueManager;
|
||||
this.queueName = "orphaned-package-cleanup";
|
||||
}
|
||||
|
||||
/**
|
||||
* Process orphaned package cleanup job
|
||||
*/
|
||||
async process(_job) {
|
||||
const startTime = Date.now();
|
||||
console.log("🧹 Starting orphaned package cleanup...");
|
||||
|
||||
try {
|
||||
// Find packages with 0 hosts
|
||||
const orphanedPackages = await prisma.packages.findMany({
|
||||
where: {
|
||||
host_packages: {
|
||||
none: {},
|
||||
},
|
||||
},
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
host_packages: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let deletedCount = 0;
|
||||
const deletedPackages = [];
|
||||
|
||||
// Delete orphaned packages
|
||||
for (const pkg of orphanedPackages) {
|
||||
try {
|
||||
await prisma.packages.delete({
|
||||
where: { id: pkg.id },
|
||||
});
|
||||
deletedCount++;
|
||||
deletedPackages.push({
|
||||
id: pkg.id,
|
||||
name: pkg.name,
|
||||
description: pkg.description,
|
||||
category: pkg.category,
|
||||
latest_version: pkg.latest_version,
|
||||
});
|
||||
console.log(
|
||||
`🗑️ Deleted orphaned package: ${pkg.name} (${pkg.latest_version})`,
|
||||
);
|
||||
} catch (deleteError) {
|
||||
console.error(
|
||||
`❌ Failed to delete package ${pkg.id}:`,
|
||||
deleteError.message,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.log(
|
||||
`✅ Orphaned package cleanup completed in ${executionTime}ms - Deleted ${deletedCount} packages`,
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
deletedCount,
|
||||
deletedPackages,
|
||||
executionTime,
|
||||
};
|
||||
} catch (error) {
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.error(
|
||||
`❌ Orphaned package cleanup failed after ${executionTime}ms:`,
|
||||
error.message,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule recurring orphaned package cleanup (daily at 3 AM)
|
||||
*/
|
||||
async schedule() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"orphaned-package-cleanup",
|
||||
{},
|
||||
{
|
||||
repeat: { cron: "0 3 * * *" }, // Daily at 3 AM
|
||||
jobId: "orphaned-package-cleanup-recurring",
|
||||
},
|
||||
);
|
||||
console.log("✅ Orphaned package cleanup scheduled");
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual orphaned package cleanup
|
||||
*/
|
||||
async triggerManual() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"orphaned-package-cleanup-manual",
|
||||
{},
|
||||
{ priority: 1 },
|
||||
);
|
||||
console.log("✅ Manual orphaned package cleanup triggered");
|
||||
return job;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = OrphanedPackageCleanup;
|
||||
Reference in New Issue
Block a user