Compare commits

...

4 Commits

Author SHA1 Message Date
Muhammad Ibrahim
e73ebc383c fixed the issue with graphs on dashboard not showing information correctly, so now statistics are queued into a new table specifically for this and addd this in automation queue 2025-11-07 10:00:19 +00:00
Muhammad Ibrahim
63831caba3 fixed tfa route for handling insertion of tfa number
Better handling of existing systems already enrolled, done via checking if the config.yml file exists and ping through its credentials as opposed to checking for machine_ID
UI justification improvements on repositories pages
2025-11-07 08:20:42 +00:00
Muhammad Ibrahim
8e5eb54e02 fixed code quality 2025-11-06 22:16:35 +00:00
Muhammad Ibrahim
a8eb3ec21c fix docker error handling
fix websocket routes
Add timezone variable in code
changed the env.example to suit
2025-11-06 22:08:00 +00:00
16 changed files with 836 additions and 293 deletions

View File

@@ -311,6 +311,37 @@ else
mkdir -p /etc/patchmon mkdir -p /etc/patchmon
fi fi
# Check if agent is already configured and working (before we overwrite anything)
info "🔍 Checking if agent is already configured..."
if [[ -f /etc/patchmon/config.yml ]] && [[ -f /etc/patchmon/credentials.yml ]]; then
if [[ -f /usr/local/bin/patchmon-agent ]]; then
info "📋 Found existing agent configuration"
info "🧪 Testing existing configuration with ping..."
if /usr/local/bin/patchmon-agent ping >/dev/null 2>&1; then
success "✅ Agent is already configured and ping successful"
info "📋 Existing configuration is working - skipping installation"
info ""
info "If you want to reinstall, remove the configuration files first:"
info " sudo rm -f /etc/patchmon/config.yml /etc/patchmon/credentials.yml"
echo ""
exit 0
else
warning "⚠️ Agent configuration exists but ping failed"
warning "⚠️ Will move existing configuration and reinstall"
echo ""
fi
else
warning "⚠️ Configuration files exist but agent binary is missing"
warning "⚠️ Will move existing configuration and reinstall"
echo ""
fi
else
success "✅ Agent not yet configured - proceeding with installation"
echo ""
fi
# Step 2: Create configuration files # Step 2: Create configuration files
info "🔐 Creating configuration files..." info "🔐 Creating configuration files..."
@@ -426,33 +457,6 @@ if [[ -f "/etc/patchmon/logs/patchmon-agent.log" ]]; then
fi fi
# Step 4: Test the configuration # Step 4: Test the configuration
# Check if this machine is already enrolled
info "🔍 Checking if machine is already enrolled..."
existing_check=$(curl $CURL_FLAGS -s -X POST \
-H "X-API-ID: $API_ID" \
-H "X-API-KEY: $API_KEY" \
-H "Content-Type: application/json" \
-d "{\"machine_id\": \"$MACHINE_ID\"}" \
"$PATCHMON_URL/api/v1/hosts/check-machine-id" \
-w "\n%{http_code}" 2>&1)
http_code=$(echo "$existing_check" | tail -n 1)
response_body=$(echo "$existing_check" | sed '$d')
if [[ "$http_code" == "200" ]]; then
already_enrolled=$(echo "$response_body" | jq -r '.exists' 2>/dev/null || echo "false")
if [[ "$already_enrolled" == "true" ]]; then
warning "⚠️ This machine is already enrolled in PatchMon"
info "Machine ID: $MACHINE_ID"
info "Existing host: $(echo "$response_body" | jq -r '.host.friendly_name' 2>/dev/null)"
info ""
info "The agent will be reinstalled/updated with existing credentials."
echo ""
else
success "✅ Machine not yet enrolled - proceeding with installation"
fi
fi
info "🧪 Testing API credentials and connectivity..." info "🧪 Testing API credentials and connectivity..."
if /usr/local/bin/patchmon-agent ping; then if /usr/local/bin/patchmon-agent ping; then
success "✅ TEST: API credentials are valid and server is reachable" success "✅ TEST: API credentials are valid and server is reachable"

View File

@@ -230,6 +230,40 @@ while IFS= read -r line; do
info " ✓ Host enrolled successfully: $api_id" info " ✓ Host enrolled successfully: $api_id"
# Check if agent is already installed and working
info " Checking if agent is already configured..."
config_check=$(timeout 10 pct exec "$vmid" -- bash -c "
if [[ -f /etc/patchmon/config.yml ]] && [[ -f /etc/patchmon/credentials.yml ]]; then
if [[ -f /usr/local/bin/patchmon-agent ]]; then
# Try to ping using existing configuration
if /usr/local/bin/patchmon-agent ping >/dev/null 2>&1; then
echo 'ping_success'
else
echo 'ping_failed'
fi
else
echo 'binary_missing'
fi
else
echo 'not_configured'
fi
" 2>/dev/null </dev/null || echo "error")
if [[ "$config_check" == "ping_success" ]]; then
info " ✓ Host already enrolled and agent ping successful - skipping"
((skipped_count++)) || true
echo ""
continue
elif [[ "$config_check" == "ping_failed" ]]; then
warn " ⚠ Agent configuration exists but ping failed - will reinstall"
elif [[ "$config_check" == "binary_missing" ]]; then
warn " ⚠ Config exists but agent binary missing - will reinstall"
elif [[ "$config_check" == "not_configured" ]]; then
info " Agent not yet configured - proceeding with installation"
else
warn " ⚠ Could not check agent status - proceeding with installation"
fi
# Ensure curl is installed in the container # Ensure curl is installed in the container
info " Checking for curl in container..." info " Checking for curl in container..."
curl_check=$(timeout 10 pct exec "$vmid" -- bash -c "command -v curl >/dev/null 2>&1 && echo 'installed' || echo 'missing'" 2>/dev/null </dev/null || echo "error") curl_check=$(timeout 10 pct exec "$vmid" -- bash -c "command -v curl >/dev/null 2>&1 && echo 'installed' || echo 'missing'" 2>/dev/null </dev/null || echo "error")
@@ -283,9 +317,10 @@ while IFS= read -r line; do
install_exit_code=0 install_exit_code=0
# Download and execute in separate steps to avoid stdin issues with piping # Download and execute in separate steps to avoid stdin issues with piping
install_output=$(timeout 180 pct exec "$vmid" -- bash -c " # Pass CURL_FLAGS as environment variable to container
install_output=$(timeout 180 pct exec "$vmid" --env CURL_FLAGS="$CURL_FLAGS" -- bash -c "
cd /tmp cd /tmp
curl $CURL_FLAGS \ curl \$CURL_FLAGS \
-H \"X-API-ID: $api_id\" \ -H \"X-API-ID: $api_id\" \
-H \"X-API-KEY: $api_key\" \ -H \"X-API-KEY: $api_key\" \
-o patchmon-install.sh \ -o patchmon-install.sh \
@@ -422,9 +457,10 @@ if [[ ${#dpkg_error_containers[@]} -gt 0 ]]; then
info " Retrying agent installation..." info " Retrying agent installation..."
install_exit_code=0 install_exit_code=0
install_output=$(timeout 180 pct exec "$vmid" -- bash -c " # Pass CURL_FLAGS as environment variable to container
install_output=$(timeout 180 pct exec "$vmid" --env CURL_FLAGS="$CURL_FLAGS" -- bash -c "
cd /tmp cd /tmp
curl $CURL_FLAGS \ curl \$CURL_FLAGS \
-H \"X-API-ID: $api_id\" \ -H \"X-API-ID: $api_id\" \
-H \"X-API-KEY: $api_key\" \ -H \"X-API-KEY: $api_key\" \
-o patchmon-install.sh \ -o patchmon-install.sh \

View File

@@ -54,3 +54,8 @@ ENABLE_LOGGING=true
TFA_REMEMBER_ME_EXPIRES_IN=30d TFA_REMEMBER_ME_EXPIRES_IN=30d
TFA_MAX_REMEMBER_SESSIONS=5 TFA_MAX_REMEMBER_SESSIONS=5
TFA_SUSPICIOUS_ACTIVITY_THRESHOLD=3 TFA_SUSPICIOUS_ACTIVITY_THRESHOLD=3
# Timezone Configuration
# Set the timezone for timestamps and logs (e.g., 'UTC', 'America/New_York', 'Europe/London')
# Defaults to UTC if not set. This ensures consistent timezone handling across the application.
TZ=UTC

View File

@@ -0,0 +1,16 @@
-- CreateTable
CREATE TABLE "system_statistics" (
"id" TEXT NOT NULL,
"unique_packages_count" INTEGER NOT NULL,
"unique_security_count" INTEGER NOT NULL,
"total_packages" INTEGER NOT NULL,
"total_hosts" INTEGER NOT NULL,
"hosts_needing_updates" INTEGER NOT NULL,
"timestamp" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "system_statistics_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "system_statistics_timestamp_idx" ON "system_statistics"("timestamp");

View File

@@ -202,7 +202,7 @@ model update_history {
id String @id id String @id
host_id String host_id String
packages_count Int packages_count Int
security_count Int security_count Int
total_packages Int? total_packages Int?
payload_size_kb Float? payload_size_kb Float?
execution_time Float? execution_time Float?
@@ -212,6 +212,18 @@ model update_history {
hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade) hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade)
} }
model system_statistics {
id String @id
unique_packages_count Int
unique_security_count Int
total_packages Int
total_hosts Int
hosts_needing_updates Int
timestamp DateTime @default(now())
@@index([timestamp])
}
model users { model users {
id String @id id String @id
username String @unique username String @unique

View File

@@ -242,6 +242,30 @@ router.post(
}, },
); );
// Trigger manual system statistics collection
router.post(
"/trigger/system-statistics",
authenticateToken,
async (_req, res) => {
try {
const job = await queueManager.triggerSystemStatistics();
res.json({
success: true,
data: {
jobId: job.id,
message: "System statistics collection triggered successfully",
},
});
} catch (error) {
console.error("Error triggering system statistics collection:", error);
res.status(500).json({
success: false,
error: "Failed to trigger system statistics collection",
});
}
},
);
// Get queue health status // Get queue health status
router.get("/health", authenticateToken, async (_req, res) => { router.get("/health", authenticateToken, async (_req, res) => {
try { try {
@@ -300,6 +324,7 @@ router.get("/overview", authenticateToken, async (_req, res) => {
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1), queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1),
queueManager.getRecentJobs(QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP, 1), queueManager.getRecentJobs(QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP, 1),
queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1), queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1),
queueManager.getRecentJobs(QUEUE_NAMES.SYSTEM_STATISTICS, 1),
]); ]);
// Calculate overview metrics // Calculate overview metrics
@@ -309,21 +334,24 @@ router.get("/overview", authenticateToken, async (_req, res) => {
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed + stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed + stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed +
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed + stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed +
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].delayed, stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].delayed +
stats[QUEUE_NAMES.SYSTEM_STATISTICS].delayed,
runningTasks: runningTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active + stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
stats[QUEUE_NAMES.SESSION_CLEANUP].active + stats[QUEUE_NAMES.SESSION_CLEANUP].active +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active + stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active +
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active + stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active +
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].active, stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].active +
stats[QUEUE_NAMES.SYSTEM_STATISTICS].active,
failedTasks: failedTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed + stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
stats[QUEUE_NAMES.SESSION_CLEANUP].failed + stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed + stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed +
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed + stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed +
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].failed, stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].failed +
stats[QUEUE_NAMES.SYSTEM_STATISTICS].failed,
totalAutomations: Object.values(stats).reduce((sum, queueStats) => { totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
return ( return (
@@ -435,6 +463,22 @@ router.get("/overview", authenticateToken, async (_req, res) => {
: "Never run", : "Never run",
stats: stats[QUEUE_NAMES.AGENT_COMMANDS], stats: stats[QUEUE_NAMES.AGENT_COMMANDS],
}, },
{
name: "System Statistics Collection",
queue: QUEUE_NAMES.SYSTEM_STATISTICS,
description: "Collects aggregated system-wide package statistics",
schedule: "Every 30 minutes",
lastRun: recentJobs[6][0]?.finishedOn
? new Date(recentJobs[6][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[6][0]?.finishedOn || 0,
status: recentJobs[6][0]?.failedReason
? "Failed"
: recentJobs[6][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.SYSTEM_STATISTICS],
},
].sort((a, b) => { ].sort((a, b) => {
// Sort by last run timestamp (most recent first) // Sort by last run timestamp (most recent first)
// If both have never run (timestamp 0), maintain original order // If both have never run (timestamp 0), maintain original order

View File

@@ -564,174 +564,216 @@ router.get(
const startDate = new Date(); const startDate = new Date();
startDate.setDate(endDate.getDate() - daysInt); startDate.setDate(endDate.getDate() - daysInt);
// Build where clause
const whereClause = {
timestamp: {
gte: startDate,
lte: endDate,
},
};
// Add host filter if specified
if (hostId && hostId !== "all" && hostId !== "undefined") {
whereClause.host_id = hostId;
}
// Get all update history records in the date range
const trendsData = await prisma.update_history.findMany({
where: whereClause,
select: {
timestamp: true,
packages_count: true,
security_count: true,
total_packages: true,
host_id: true,
status: true,
},
orderBy: {
timestamp: "asc",
},
});
// Enhanced data validation and processing
const processedData = trendsData
.filter((record) => {
// Enhanced validation
return (
record.total_packages !== null &&
record.total_packages >= 0 &&
record.packages_count >= 0 &&
record.security_count >= 0 &&
record.security_count <= record.packages_count && // Security can't exceed outdated
record.status === "success"
); // Only include successful reports
})
.map((record) => {
const date = new Date(record.timestamp);
let timeKey;
if (daysInt <= 1) {
// For hourly view, group by hour only (not minutes)
timeKey = date.toISOString().substring(0, 13); // YYYY-MM-DDTHH
} else {
// For daily view, group by day
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
}
return {
timeKey,
total_packages: record.total_packages,
packages_count: record.packages_count || 0,
security_count: record.security_count || 0,
host_id: record.host_id,
timestamp: record.timestamp,
};
});
// Determine if we need aggregation based on host filter // Determine if we need aggregation based on host filter
const needsAggregation = const needsAggregation =
!hostId || hostId === "all" || hostId === "undefined"; !hostId || hostId === "all" || hostId === "undefined";
let trendsData;
if (needsAggregation) {
// For "All Hosts" mode, use system_statistics table
trendsData = await prisma.system_statistics.findMany({
where: {
timestamp: {
gte: startDate,
lte: endDate,
},
},
select: {
timestamp: true,
unique_packages_count: true,
unique_security_count: true,
total_packages: true,
total_hosts: true,
hosts_needing_updates: true,
},
orderBy: {
timestamp: "asc",
},
});
} else {
// For individual host, use update_history table
trendsData = await prisma.update_history.findMany({
where: {
host_id: hostId,
timestamp: {
gte: startDate,
lte: endDate,
},
},
select: {
timestamp: true,
packages_count: true,
security_count: true,
total_packages: true,
host_id: true,
status: true,
},
orderBy: {
timestamp: "asc",
},
});
}
// Process data based on source
let processedData;
let aggregatedArray; let aggregatedArray;
if (needsAggregation) { if (needsAggregation) {
// For "All Hosts" mode, we need to calculate the actual total packages differently // For "All Hosts" mode, data comes from system_statistics table
// Instead of aggregating historical data (which is per-host), we'll use the current total // Already aggregated, just need to format it
// and show that as a flat line, since total packages don't change much over time processedData = trendsData
.filter((record) => {
// Enhanced validation
return (
record.total_packages !== null &&
record.total_packages >= 0 &&
record.unique_packages_count >= 0 &&
record.unique_security_count >= 0 &&
record.unique_security_count <= record.unique_packages_count
);
})
.map((record) => {
const date = new Date(record.timestamp);
let timeKey;
// Get the current total packages count (unique packages across all hosts) if (daysInt <= 1) {
const currentTotalPackages = await prisma.packages.count({ // For "Last 24 hours", use full timestamp for each data point
where: { // This allows plotting all individual data points
host_packages: { timeKey = date.toISOString(); // Full ISO timestamp
some: {}, // At least one host has this package } else {
}, // For daily view, group by day
}, timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
}); }
// Aggregate data by timeKey when looking at "All Hosts" or no specific host return {
const aggregatedData = processedData.reduce((acc, item) => { timeKey,
if (!acc[item.timeKey]) { total_packages: record.total_packages,
acc[item.timeKey] = { packages_count: record.unique_packages_count,
timeKey: item.timeKey, security_count: record.unique_security_count,
total_packages: currentTotalPackages, // Use current total packages timestamp: record.timestamp,
packages_count: 0,
security_count: 0,
record_count: 0,
host_ids: new Set(),
min_timestamp: item.timestamp,
max_timestamp: item.timestamp,
}; };
} });
// For outdated and security packages: SUM (these represent counts across hosts) if (daysInt <= 1) {
acc[item.timeKey].packages_count += item.packages_count; // For "Last 24 hours", use all individual data points without grouping
acc[item.timeKey].security_count += item.security_count; // Sort by timestamp
aggregatedArray = processedData.sort(
(a, b) => a.timestamp.getTime() - b.timestamp.getTime(),
);
} else {
// For longer periods, group by timeKey and take the latest value for each period
const aggregatedData = processedData.reduce((acc, item) => {
if (
!acc[item.timeKey] ||
item.timestamp > acc[item.timeKey].timestamp
) {
acc[item.timeKey] = item;
}
return acc;
}, {});
acc[item.timeKey].record_count += 1; // Convert to array and sort
acc[item.timeKey].host_ids.add(item.host_id); aggregatedArray = Object.values(aggregatedData).sort((a, b) =>
a.timeKey.localeCompare(b.timeKey),
// Track timestamp range );
if (item.timestamp < acc[item.timeKey].min_timestamp) { }
acc[item.timeKey].min_timestamp = item.timestamp;
}
if (item.timestamp > acc[item.timeKey].max_timestamp) {
acc[item.timeKey].max_timestamp = item.timestamp;
}
return acc;
}, {});
// Convert to array and add metadata
aggregatedArray = Object.values(aggregatedData)
.map((item) => ({
...item,
host_count: item.host_ids.size,
host_ids: Array.from(item.host_ids),
}))
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
} else { } else {
// For specific host, show individual data points without aggregation // For individual host, data comes from update_history table
// But still group by timeKey to handle multiple reports from same host in same time period processedData = trendsData
const hostAggregatedData = processedData.reduce((acc, item) => { .filter((record) => {
if (!acc[item.timeKey]) { // Enhanced validation
acc[item.timeKey] = { return (
timeKey: item.timeKey, record.total_packages !== null &&
total_packages: 0, record.total_packages >= 0 &&
packages_count: 0, record.packages_count >= 0 &&
security_count: 0, record.security_count >= 0 &&
record_count: 0, record.security_count <= record.packages_count &&
host_ids: new Set([item.host_id]), record.status === "success"
min_timestamp: item.timestamp, );
max_timestamp: item.timestamp, })
.map((record) => {
const date = new Date(record.timestamp);
let timeKey;
if (daysInt <= 1) {
// For "Last 24 hours", use full timestamp for each data point
// This allows plotting all individual data points
timeKey = date.toISOString(); // Full ISO timestamp
} else {
// For daily view, group by day
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
}
return {
timeKey,
total_packages: record.total_packages,
packages_count: record.packages_count || 0,
security_count: record.security_count || 0,
host_id: record.host_id,
timestamp: record.timestamp,
}; };
} });
// For same host, take the latest values (not sum) if (daysInt <= 1) {
// This handles cases where a host reports multiple times in the same time period // For "Last 24 hours", use all individual data points without grouping
if (item.timestamp > acc[item.timeKey].max_timestamp) { // Sort by timestamp
acc[item.timeKey].total_packages = item.total_packages; aggregatedArray = processedData.sort(
acc[item.timeKey].packages_count = item.packages_count; (a, b) => a.timestamp.getTime() - b.timestamp.getTime(),
acc[item.timeKey].security_count = item.security_count; );
acc[item.timeKey].max_timestamp = item.timestamp; } else {
} // For longer periods, group by timeKey to handle multiple reports from same host in same time period
const hostAggregatedData = processedData.reduce((acc, item) => {
if (!acc[item.timeKey]) {
acc[item.timeKey] = {
timeKey: item.timeKey,
total_packages: 0,
packages_count: 0,
security_count: 0,
record_count: 0,
host_ids: new Set([item.host_id]),
min_timestamp: item.timestamp,
max_timestamp: item.timestamp,
};
}
acc[item.timeKey].record_count += 1; // For same host, take the latest values (not sum)
// This handles cases where a host reports multiple times in the same time period
if (item.timestamp > acc[item.timeKey].max_timestamp) {
acc[item.timeKey].total_packages = item.total_packages;
acc[item.timeKey].packages_count = item.packages_count;
acc[item.timeKey].security_count = item.security_count;
acc[item.timeKey].max_timestamp = item.timestamp;
}
return acc; acc[item.timeKey].record_count += 1;
}, {});
// Convert to array return acc;
aggregatedArray = Object.values(hostAggregatedData) }, {});
.map((item) => ({
...item, // Convert to array
host_count: item.host_ids.size, aggregatedArray = Object.values(hostAggregatedData)
host_ids: Array.from(item.host_ids), .map((item) => ({
})) ...item,
.sort((a, b) => a.timeKey.localeCompare(b.timeKey)); host_count: item.host_ids.size,
host_ids: Array.from(item.host_ids),
}))
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
}
} }
// Handle sparse data by filling missing time periods // Handle sparse data by filling missing time periods
const fillMissingPeriods = (data, daysInt) => { const fillMissingPeriods = (data, daysInt) => {
if (data.length === 0) {
return [];
}
// For "Last 24 hours", return data as-is without filling gaps
// This allows plotting all individual data points
if (daysInt <= 1) {
return data;
}
const filledData = []; const filledData = [];
const startDate = new Date(); const startDate = new Date();
startDate.setDate(startDate.getDate() - daysInt); startDate.setDate(startDate.getDate() - daysInt);
@@ -741,50 +783,58 @@ router.get(
const endDate = new Date(); const endDate = new Date();
const currentDate = new Date(startDate); const currentDate = new Date(startDate);
// Find the last known values for interpolation // Sort data by timeKey to get chronological order
const sortedData = [...data].sort((a, b) =>
a.timeKey.localeCompare(b.timeKey),
);
// Find the first actual data point (don't fill before this)
const firstDataPoint = sortedData[0];
const firstDataTimeKey = firstDataPoint?.timeKey;
// Track last known values as we iterate forward
let lastKnownValues = null; let lastKnownValues = null;
if (data.length > 0) { let hasSeenFirstDataPoint = false;
lastKnownValues = {
total_packages: data[0].total_packages,
packages_count: data[0].packages_count,
security_count: data[0].security_count,
};
}
while (currentDate <= endDate) { while (currentDate <= endDate) {
let timeKey; let timeKey;
if (daysInt <= 1) { // For daily view, group by day
timeKey = currentDate.toISOString().substring(0, 13); // Hourly timeKey = currentDate.toISOString().split("T")[0]; // YYYY-MM-DD
currentDate.setHours(currentDate.getHours() + 1); currentDate.setDate(currentDate.getDate() + 1);
} else {
timeKey = currentDate.toISOString().split("T")[0]; // Daily // Skip periods before the first actual data point
currentDate.setDate(currentDate.getDate() + 1); if (firstDataTimeKey && timeKey < firstDataTimeKey) {
continue;
} }
if (dataMap.has(timeKey)) { if (dataMap.has(timeKey)) {
const item = dataMap.get(timeKey); const item = dataMap.get(timeKey);
filledData.push(item); filledData.push(item);
// Update last known values // Update last known values with actual data
lastKnownValues = { lastKnownValues = {
total_packages: item.total_packages, total_packages: item.total_packages || 0,
packages_count: item.packages_count, packages_count: item.packages_count || 0,
security_count: item.security_count, security_count: item.security_count || 0,
}; };
hasSeenFirstDataPoint = true;
} else { } else {
// For missing periods, use the last known values (interpolation) // For missing periods AFTER the first data point, use forward-fill
// This creates a continuous line instead of gaps // Only fill if we have a last known value and we've seen the first data point
filledData.push({ if (lastKnownValues !== null && hasSeenFirstDataPoint) {
timeKey, filledData.push({
total_packages: lastKnownValues?.total_packages || 0, timeKey,
packages_count: lastKnownValues?.packages_count || 0, total_packages: lastKnownValues.total_packages,
security_count: lastKnownValues?.security_count || 0, packages_count: lastKnownValues.packages_count,
record_count: 0, security_count: lastKnownValues.security_count,
host_count: 0, record_count: 0,
host_ids: [], host_count: 0,
min_timestamp: null, host_ids: [],
max_timestamp: null, min_timestamp: null,
isInterpolated: true, // Mark as interpolated for debugging max_timestamp: null,
}); isInterpolated: true, // Mark as interpolated for debugging
});
}
// If we haven't seen the first data point yet, skip this period
} }
} }
@@ -810,7 +860,7 @@ router.get(
// Get current package state for offline fallback // Get current package state for offline fallback
let currentPackageState = null; let currentPackageState = null;
if (hostId && hostId !== "all" && hostId !== "undefined") { if (hostId && hostId !== "all" && hostId !== "undefined") {
// Get current package counts for specific host // For individual host, get current package counts from host_packages
const currentState = await prisma.host_packages.aggregate({ const currentState = await prisma.host_packages.aggregate({
where: { where: {
host_id: hostId, host_id: hostId,
@@ -841,34 +891,64 @@ router.get(
security_count: securityCount, security_count: securityCount,
}; };
} else { } else {
// Get current package counts for all hosts // For "All Hosts" mode, use the latest system_statistics record if available
// Total packages = count of unique packages installed on at least one host // Otherwise calculate from database
const totalPackagesCount = await prisma.packages.count({ const latestStats = await prisma.system_statistics.findFirst({
where: { orderBy: {
host_packages: { timestamp: "desc",
some: {}, // At least one host has this package },
select: {
total_packages: true,
unique_packages_count: true,
unique_security_count: true,
timestamp: true,
},
});
if (latestStats) {
// Use latest system statistics (collected by scheduled job)
currentPackageState = {
total_packages: latestStats.total_packages,
packages_count: latestStats.unique_packages_count,
security_count: latestStats.unique_security_count,
};
} else {
// Fallback: calculate from database if no statistics collected yet
const totalPackagesCount = await prisma.packages.count({
where: {
host_packages: {
some: {}, // At least one host has this package
},
}, },
}, });
});
// Get counts for boolean fields separately const uniqueOutdatedCount = await prisma.packages.count({
const outdatedCount = await prisma.host_packages.count({ where: {
where: { host_packages: {
needs_update: true, some: {
}, needs_update: true,
}); },
},
},
});
const securityCount = await prisma.host_packages.count({ const uniqueSecurityCount = await prisma.packages.count({
where: { where: {
is_security_update: true, host_packages: {
}, some: {
}); needs_update: true,
is_security_update: true,
},
},
},
});
currentPackageState = { currentPackageState = {
total_packages: totalPackagesCount, total_packages: totalPackagesCount,
packages_count: outdatedCount, packages_count: uniqueOutdatedCount,
security_count: securityCount, security_count: uniqueSecurityCount,
}; };
}
} }
// Format data for chart // Format data for chart
@@ -923,6 +1003,11 @@ router.get(
chartData.datasets[2].data.push(item.security_count); chartData.datasets[2].data.push(item.security_count);
}); });
// Replace the last label with "Now" to indicate current state
if (chartData.labels.length > 0) {
chartData.labels[chartData.labels.length - 1] = "Now";
}
// Calculate data quality metrics // Calculate data quality metrics
const dataQuality = { const dataQuality = {
totalRecords: trendsData.length, totalRecords: trendsData.length,

View File

@@ -2,6 +2,7 @@ const express = require("express");
const { authenticateToken } = require("../middleware/auth"); const { authenticateToken } = require("../middleware/auth");
const { getPrismaClient } = require("../config/prisma"); const { getPrismaClient } = require("../config/prisma");
const { v4: uuidv4 } = require("uuid"); const { v4: uuidv4 } = require("uuid");
const { get_current_time, parse_date } = require("../utils/timezone");
const prisma = getPrismaClient(); const prisma = getPrismaClient();
const router = express.Router(); const router = express.Router();
@@ -537,14 +538,7 @@ router.post("/collect", async (req, res) => {
return res.status(401).json({ error: "Invalid API credentials" }); return res.status(401).json({ error: "Invalid API credentials" });
} }
const now = new Date(); const now = get_current_time();
// Helper function to validate and parse dates
const parseDate = (dateString) => {
if (!dateString) return now;
const date = new Date(dateString);
return Number.isNaN(date.getTime()) ? now : date;
};
// Process containers // Process containers
if (containers && Array.isArray(containers)) { if (containers && Array.isArray(containers)) {
@@ -572,7 +566,7 @@ router.post("/collect", async (req, res) => {
tag: containerData.image_tag, tag: containerData.image_tag,
image_id: containerData.image_id || "unknown", image_id: containerData.image_id || "unknown",
source: containerData.image_source || "docker-hub", source: containerData.image_source || "docker-hub",
created_at: parseDate(containerData.created_at), created_at: parse_date(containerData.created_at, now),
last_checked: now, last_checked: now,
updated_at: now, updated_at: now,
}, },
@@ -597,7 +591,7 @@ router.post("/collect", async (req, res) => {
state: containerData.state, state: containerData.state,
ports: containerData.ports || null, ports: containerData.ports || null,
started_at: containerData.started_at started_at: containerData.started_at
? parseDate(containerData.started_at) ? parse_date(containerData.started_at, null)
: null, : null,
updated_at: now, updated_at: now,
last_checked: now, last_checked: now,
@@ -613,9 +607,9 @@ router.post("/collect", async (req, res) => {
status: containerData.status, status: containerData.status,
state: containerData.state, state: containerData.state,
ports: containerData.ports || null, ports: containerData.ports || null,
created_at: parseDate(containerData.created_at), created_at: parse_date(containerData.created_at, now),
started_at: containerData.started_at started_at: containerData.started_at
? parseDate(containerData.started_at) ? parse_date(containerData.started_at, null)
: null, : null,
updated_at: now, updated_at: now,
}, },
@@ -651,7 +645,7 @@ router.post("/collect", async (req, res) => {
? BigInt(imageData.size_bytes) ? BigInt(imageData.size_bytes)
: null, : null,
source: imageData.source || "docker-hub", source: imageData.source || "docker-hub",
created_at: parseDate(imageData.created_at), created_at: parse_date(imageData.created_at, now),
updated_at: now, updated_at: now,
}, },
}); });
@@ -780,14 +774,7 @@ router.post("/../integrations/docker", async (req, res) => {
`[Docker Integration] Processing for host: ${host.friendly_name}`, `[Docker Integration] Processing for host: ${host.friendly_name}`,
); );
const now = new Date(); const now = get_current_time();
// Helper function to validate and parse dates
const parseDate = (dateString) => {
if (!dateString) return now;
const date = new Date(dateString);
return Number.isNaN(date.getTime()) ? now : date;
};
let containersProcessed = 0; let containersProcessed = 0;
let imagesProcessed = 0; let imagesProcessed = 0;
@@ -822,7 +809,7 @@ router.post("/../integrations/docker", async (req, res) => {
tag: containerData.image_tag, tag: containerData.image_tag,
image_id: containerData.image_id || "unknown", image_id: containerData.image_id || "unknown",
source: containerData.image_source || "docker-hub", source: containerData.image_source || "docker-hub",
created_at: parseDate(containerData.created_at), created_at: parse_date(containerData.created_at, now),
last_checked: now, last_checked: now,
updated_at: now, updated_at: now,
}, },
@@ -847,7 +834,7 @@ router.post("/../integrations/docker", async (req, res) => {
state: containerData.state || containerData.status, state: containerData.state || containerData.status,
ports: containerData.ports || null, ports: containerData.ports || null,
started_at: containerData.started_at started_at: containerData.started_at
? parseDate(containerData.started_at) ? parse_date(containerData.started_at, null)
: null, : null,
updated_at: now, updated_at: now,
last_checked: now, last_checked: now,
@@ -863,9 +850,9 @@ router.post("/../integrations/docker", async (req, res) => {
status: containerData.status, status: containerData.status,
state: containerData.state || containerData.status, state: containerData.state || containerData.status,
ports: containerData.ports || null, ports: containerData.ports || null,
created_at: parseDate(containerData.created_at), created_at: parse_date(containerData.created_at, now),
started_at: containerData.started_at started_at: containerData.started_at
? parseDate(containerData.started_at) ? parse_date(containerData.started_at, null)
: null, : null,
updated_at: now, updated_at: now,
}, },
@@ -911,7 +898,7 @@ router.post("/../integrations/docker", async (req, res) => {
? BigInt(imageData.size_bytes) ? BigInt(imageData.size_bytes)
: null, : null,
source: imageSource, source: imageSource,
created_at: parseDate(imageData.created_at), created_at: parse_date(imageData.created_at, now),
last_checked: now, last_checked: now,
updated_at: now, updated_at: now,
}, },

View File

@@ -60,9 +60,14 @@ router.post(
authenticateToken, authenticateToken,
[ [
body("token") body("token")
.notEmpty()
.withMessage("Token is required")
.isString()
.withMessage("Token must be a string")
.isLength({ min: 6, max: 6 }) .isLength({ min: 6, max: 6 })
.withMessage("Token must be 6 digits"), .withMessage("Token must be exactly 6 digits")
body("token").isNumeric().withMessage("Token must contain only numbers"), .matches(/^\d{6}$/)
.withMessage("Token must contain only numbers"),
], ],
async (req, res) => { async (req, res) => {
try { try {
@@ -71,7 +76,11 @@ router.post(
return res.status(400).json({ errors: errors.array() }); return res.status(400).json({ errors: errors.array() });
} }
const { token } = req.body; // Ensure token is a string (convert if needed)
let { token } = req.body;
if (typeof token !== "string") {
token = String(token);
}
const userId = req.user.id; const userId = req.user.id;
// Get user's TFA secret // Get user's TFA secret

View File

@@ -3,6 +3,7 @@
const WebSocket = require("ws"); const WebSocket = require("ws");
const url = require("node:url"); const url = require("node:url");
const { get_current_time } = require("../utils/timezone");
// Connection registry by api_id // Connection registry by api_id
const apiIdToSocket = new Map(); const apiIdToSocket = new Map();
@@ -48,8 +49,30 @@ function init(server, prismaClient) {
// Accept the WebSocket connection for Bull Board // Accept the WebSocket connection for Bull Board
wss.handleUpgrade(request, socket, head, (ws) => { wss.handleUpgrade(request, socket, head, (ws) => {
ws.on("message", (message) => { ws.on("message", (message) => {
// Echo back for Bull Board WebSocket // Echo back for Bull Board WebSocket
try {
ws.send(message); ws.send(message);
} catch (_err) {
// Ignore send errors (connection may be closed)
}
});
ws.on("error", (err) => {
// Handle WebSocket errors gracefully for Bull Board
if (
err.code === "WS_ERR_INVALID_CLOSE_CODE" ||
err.code === "ECONNRESET" ||
err.code === "EPIPE"
) {
// These are expected errors, just log quietly
console.log("[bullboard-ws] connection error:", err.code);
} else {
console.error("[bullboard-ws] error:", err.message || err);
}
});
ws.on("close", () => {
// Connection closed, no action needed
}); });
}); });
return; return;
@@ -117,7 +140,58 @@ function init(server, prismaClient) {
} }
}); });
ws.on("close", () => { ws.on("error", (err) => {
// Handle WebSocket errors gracefully without crashing
// Common errors: invalid close codes (1006), connection resets, etc.
if (
err.code === "WS_ERR_INVALID_CLOSE_CODE" ||
err.message?.includes("invalid status code 1006") ||
err.message?.includes("Invalid WebSocket frame")
) {
// 1006 is a special close code indicating abnormal closure
// It cannot be sent in a close frame, but can occur when connection is lost
console.log(
`[agent-ws] connection error for ${apiId} (abnormal closure):`,
err.message || err.code,
);
} else if (
err.code === "ECONNRESET" ||
err.code === "EPIPE" ||
err.message?.includes("read ECONNRESET")
) {
// Connection reset errors are common and expected
console.log(`[agent-ws] connection reset for ${apiId}`);
} else {
// Log other errors for debugging
console.error(
`[agent-ws] error for ${apiId}:`,
err.message || err.code || err,
);
}
// Clean up connection on error
const existing = apiIdToSocket.get(apiId);
if (existing === ws) {
apiIdToSocket.delete(apiId);
connectionMetadata.delete(apiId);
// Notify subscribers of disconnection
notifyConnectionChange(apiId, false);
}
// Try to close the connection gracefully if still open
if (
ws.readyState === WebSocket.OPEN ||
ws.readyState === WebSocket.CONNECTING
) {
try {
ws.close(1000); // Normal closure
} catch {
// Ignore errors when closing
}
}
});
ws.on("close", (code, reason) => {
const existing = apiIdToSocket.get(apiId); const existing = apiIdToSocket.get(apiId);
if (existing === ws) { if (existing === ws) {
apiIdToSocket.delete(apiId); apiIdToSocket.delete(apiId);
@@ -126,7 +200,7 @@ function init(server, prismaClient) {
notifyConnectionChange(apiId, false); notifyConnectionChange(apiId, false);
} }
console.log( console.log(
`[agent-ws] disconnected api_id=${apiId} total=${apiIdToSocket.size}`, `[agent-ws] disconnected api_id=${apiId} code=${code} reason=${reason || "none"} total=${apiIdToSocket.size}`,
); );
}); });
@@ -314,7 +388,7 @@ async function handleDockerStatusEvent(apiId, message) {
status: status, status: status,
state: status, state: status,
updated_at: new Date(timestamp || Date.now()), updated_at: new Date(timestamp || Date.now()),
last_checked: new Date(), last_checked: get_current_time(),
}, },
}); });

View File

@@ -139,15 +139,13 @@ class DockerImageUpdateCheck {
console.log("🐳 Starting Docker image update check..."); console.log("🐳 Starting Docker image update check...");
try { try {
// Get all Docker images that have a digest and repository // Get all Docker images that have a digest
// Note: repository is required (non-nullable) in schema, so we don't need to check it
const images = await prisma.docker_images.findMany({ const images = await prisma.docker_images.findMany({
where: { where: {
digest: { digest: {
not: null, not: null,
}, },
repository: {
not: null,
},
}, },
include: { include: {
docker_image_updates: true, docker_image_updates: true,

View File

@@ -3,6 +3,7 @@ const { redis, redisConnection } = require("./shared/redis");
const { prisma } = require("./shared/prisma"); const { prisma } = require("./shared/prisma");
const agentWs = require("../agentWs"); const agentWs = require("../agentWs");
const { v4: uuidv4 } = require("uuid"); const { v4: uuidv4 } = require("uuid");
const { get_current_time } = require("../../utils/timezone");
// Import automation classes // Import automation classes
const GitHubUpdateCheck = require("./githubUpdateCheck"); const GitHubUpdateCheck = require("./githubUpdateCheck");
@@ -12,6 +13,7 @@ const OrphanedPackageCleanup = require("./orphanedPackageCleanup");
const DockerInventoryCleanup = require("./dockerInventoryCleanup"); const DockerInventoryCleanup = require("./dockerInventoryCleanup");
const DockerImageUpdateCheck = require("./dockerImageUpdateCheck"); const DockerImageUpdateCheck = require("./dockerImageUpdateCheck");
const MetricsReporting = require("./metricsReporting"); const MetricsReporting = require("./metricsReporting");
const SystemStatistics = require("./systemStatistics");
// Queue names // Queue names
const QUEUE_NAMES = { const QUEUE_NAMES = {
@@ -22,6 +24,7 @@ const QUEUE_NAMES = {
DOCKER_INVENTORY_CLEANUP: "docker-inventory-cleanup", DOCKER_INVENTORY_CLEANUP: "docker-inventory-cleanup",
DOCKER_IMAGE_UPDATE_CHECK: "docker-image-update-check", DOCKER_IMAGE_UPDATE_CHECK: "docker-image-update-check",
METRICS_REPORTING: "metrics-reporting", METRICS_REPORTING: "metrics-reporting",
SYSTEM_STATISTICS: "system-statistics",
AGENT_COMMANDS: "agent-commands", AGENT_COMMANDS: "agent-commands",
}; };
@@ -105,6 +108,9 @@ class QueueManager {
this.automations[QUEUE_NAMES.METRICS_REPORTING] = new MetricsReporting( this.automations[QUEUE_NAMES.METRICS_REPORTING] = new MetricsReporting(
this, this,
); );
this.automations[QUEUE_NAMES.SYSTEM_STATISTICS] = new SystemStatistics(
this,
);
console.log("✅ All automation classes initialized"); console.log("✅ All automation classes initialized");
} }
@@ -190,6 +196,15 @@ class QueueManager {
workerOptions, workerOptions,
); );
// System Statistics Worker
this.workers[QUEUE_NAMES.SYSTEM_STATISTICS] = new Worker(
QUEUE_NAMES.SYSTEM_STATISTICS,
this.automations[QUEUE_NAMES.SYSTEM_STATISTICS].process.bind(
this.automations[QUEUE_NAMES.SYSTEM_STATISTICS],
),
workerOptions,
);
// Agent Commands Worker // Agent Commands Worker
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker( this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
QUEUE_NAMES.AGENT_COMMANDS, QUEUE_NAMES.AGENT_COMMANDS,
@@ -216,8 +231,8 @@ class QueueManager {
api_id: api_id, api_id: api_id,
status: "active", status: "active",
attempt_number: job.attemptsMade + 1, attempt_number: job.attemptsMade + 1,
created_at: new Date(), created_at: get_current_time(),
updated_at: new Date(), updated_at: get_current_time(),
}, },
}); });
console.log(`📝 Logged job to job_history: ${job.id} (${type})`); console.log(`📝 Logged job to job_history: ${job.id} (${type})`);
@@ -257,8 +272,8 @@ class QueueManager {
where: { job_id: job.id }, where: { job_id: job.id },
data: { data: {
status: "completed", status: "completed",
completed_at: new Date(), completed_at: get_current_time(),
updated_at: new Date(), updated_at: get_current_time(),
}, },
}); });
console.log(`✅ Marked job as completed in job_history: ${job.id}`); console.log(`✅ Marked job as completed in job_history: ${job.id}`);
@@ -271,8 +286,8 @@ class QueueManager {
data: { data: {
status: "failed", status: "failed",
error_message: error.message, error_message: error.message,
completed_at: new Date(), completed_at: get_current_time(),
updated_at: new Date(), updated_at: get_current_time(),
}, },
}); });
console.log(`❌ Marked job as failed in job_history: ${job.id}`); console.log(`❌ Marked job as failed in job_history: ${job.id}`);
@@ -322,6 +337,7 @@ class QueueManager {
await this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].schedule(); await this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].schedule();
await this.automations[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK].schedule(); await this.automations[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK].schedule();
await this.automations[QUEUE_NAMES.METRICS_REPORTING].schedule(); await this.automations[QUEUE_NAMES.METRICS_REPORTING].schedule();
await this.automations[QUEUE_NAMES.SYSTEM_STATISTICS].schedule();
} }
/** /**
@@ -357,6 +373,10 @@ class QueueManager {
].triggerManual(); ].triggerManual();
} }
async triggerSystemStatistics() {
return this.automations[QUEUE_NAMES.SYSTEM_STATISTICS].triggerManual();
}
async triggerMetricsReporting() { async triggerMetricsReporting() {
return this.automations[QUEUE_NAMES.METRICS_REPORTING].triggerManual(); return this.automations[QUEUE_NAMES.METRICS_REPORTING].triggerManual();
} }

View File

@@ -0,0 +1,140 @@
const { prisma } = require("./shared/prisma");
const { v4: uuidv4 } = require("uuid");
/**
* System Statistics Collection Automation
* Collects aggregated system-wide statistics every 30 minutes
* for use in package trends charts
*/
class SystemStatistics {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "system-statistics";
}
/**
* Process system statistics collection job
*/
async process(_job) {
const startTime = Date.now();
console.log("📊 Starting system statistics collection...");
try {
// Calculate unique package counts across all hosts
const uniquePackagesCount = await prisma.packages.count({
where: {
host_packages: {
some: {
needs_update: true,
},
},
},
});
const uniqueSecurityCount = await prisma.packages.count({
where: {
host_packages: {
some: {
needs_update: true,
is_security_update: true,
},
},
},
});
// Calculate total unique packages installed on at least one host
const totalPackages = await prisma.packages.count({
where: {
host_packages: {
some: {}, // At least one host has this package
},
},
});
// Calculate total hosts
const totalHosts = await prisma.hosts.count({
where: {
status: "active",
},
});
// Calculate hosts needing updates (distinct hosts with packages needing updates)
const hostsNeedingUpdates = await prisma.hosts.count({
where: {
status: "active",
host_packages: {
some: {
needs_update: true,
},
},
},
});
// Store statistics in database
await prisma.system_statistics.create({
data: {
id: uuidv4(),
unique_packages_count: uniquePackagesCount,
unique_security_count: uniqueSecurityCount,
total_packages: totalPackages,
total_hosts: totalHosts,
hosts_needing_updates: hostsNeedingUpdates,
timestamp: new Date(),
},
});
const executionTime = Date.now() - startTime;
console.log(
`✅ System statistics collection completed in ${executionTime}ms - Unique packages: ${uniquePackagesCount}, Security: ${uniqueSecurityCount}, Total hosts: ${totalHosts}`,
);
return {
success: true,
uniquePackagesCount,
uniqueSecurityCount,
totalPackages,
totalHosts,
hostsNeedingUpdates,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ System statistics collection failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Schedule recurring system statistics collection (every 30 minutes)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"system-statistics",
{},
{
repeat: { pattern: "*/30 * * * *" }, // Every 30 minutes
jobId: "system-statistics-recurring",
},
);
console.log("✅ System statistics collection scheduled (every 30 minutes)");
return job;
}
/**
* Trigger manual system statistics collection
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"system-statistics-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual system statistics collection triggered");
return job;
}
}
module.exports = SystemStatistics;

View File

@@ -0,0 +1,107 @@
/**
* Timezone utility functions for consistent timestamp handling
*
* This module provides timezone-aware timestamp functions that use
* the TZ environment variable for consistent timezone handling across
* the application. If TZ is not set, defaults to UTC.
*/
/**
* Get the configured timezone from environment variable
* Defaults to UTC if not set
* @returns {string} Timezone string (e.g., 'UTC', 'America/New_York', 'Europe/London')
*/
function get_timezone() {
return process.env.TZ || process.env.TIMEZONE || "UTC";
}
/**
* Get current date/time in the configured timezone
* Returns a Date object that represents the current time in the configured timezone
* @returns {Date} Current date/time
*/
function get_current_time() {
const tz = get_timezone();
// If UTC, use Date.now() which is always UTC
if (tz === "UTC" || tz === "Etc/UTC") {
return new Date();
}
// For other timezones, we need to create a date string with timezone info
// and parse it. This ensures the date represents the correct time in that timezone.
// For database storage, we always store UTC timestamps
// The timezone is primarily used for display purposes
return new Date();
}
/**
* Get current timestamp in milliseconds (UTC)
* This is always UTC for database storage consistency
* @returns {number} Current timestamp in milliseconds
*/
function get_current_timestamp() {
return Date.now();
}
/**
* Format a date to ISO string in the configured timezone
* @param {Date} date - Date to format (defaults to now)
* @returns {string} ISO formatted date string
*/
function format_date_iso(date = null) {
const d = date || get_current_time();
return d.toISOString();
}
/**
* Parse a date string and return a Date object
* Handles various date formats and timezone conversions
* @param {string} date_string - Date string to parse
* @param {Date} fallback - Fallback date if parsing fails (defaults to now)
* @returns {Date} Parsed date or fallback
*/
function parse_date(date_string, fallback = null) {
if (!date_string) {
return fallback || get_current_time();
}
try {
const date = new Date(date_string);
if (Number.isNaN(date.getTime())) {
return fallback || get_current_time();
}
return date;
} catch (_error) {
return fallback || get_current_time();
}
}
/**
* Convert a date to the configured timezone for display
* @param {Date} date - Date to convert
* @returns {string} Formatted date string in configured timezone
*/
function format_date_for_display(date) {
const tz = get_timezone();
const formatter = new Intl.DateTimeFormat("en-US", {
timeZone: tz,
year: "numeric",
month: "2-digit",
day: "2-digit",
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
hour12: false,
});
return formatter.format(date);
}
module.exports = {
get_timezone,
get_current_time,
get_current_timestamp,
format_date_iso,
parse_date,
format_date_for_display,
};

View File

@@ -237,8 +237,14 @@ const Repositories = () => {
// Handle special cases // Handle special cases
if (sortField === "security") { if (sortField === "security") {
aValue = a.isSecure ? "Secure" : "Insecure"; // Use the same logic as filtering to determine isSecure
bValue = b.isSecure ? "Secure" : "Insecure"; const aIsSecure =
a.isSecure !== undefined ? a.isSecure : a.url.startsWith("https://");
const bIsSecure =
b.isSecure !== undefined ? b.isSecure : b.url.startsWith("https://");
// Sort by boolean: true (Secure) comes before false (Insecure) when ascending
aValue = aIsSecure ? 1 : 0;
bValue = bIsSecure ? 1 : 0;
} else if (sortField === "status") { } else if (sortField === "status") {
aValue = a.is_active ? "Active" : "Inactive"; aValue = a.is_active ? "Active" : "Inactive";
bValue = b.is_active ? "Active" : "Inactive"; bValue = b.is_active ? "Active" : "Inactive";
@@ -535,12 +541,12 @@ const Repositories = () => {
{visibleColumns.map((column) => ( {visibleColumns.map((column) => (
<th <th
key={column.id} key={column.id}
className="px-4 py-2 text-center text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider" className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider"
> >
<button <button
type="button" type="button"
onClick={() => handleSort(column.id)} onClick={() => handleSort(column.id)}
className="flex items-center gap-1 hover:text-secondary-700 dark:hover:text-secondary-200 transition-colors" className="flex items-center justify-start gap-1 hover:text-secondary-700 dark:hover:text-secondary-200 transition-colors"
> >
{column.label} {column.label}
{getSortIcon(column.id)} {getSortIcon(column.id)}
@@ -559,7 +565,7 @@ const Repositories = () => {
{visibleColumns.map((column) => ( {visibleColumns.map((column) => (
<td <td
key={column.id} key={column.id}
className="px-4 py-2 whitespace-nowrap text-center" className="px-4 py-2 whitespace-nowrap text-left"
> >
{renderCellContent(column, repo)} {renderCellContent(column, repo)}
</td> </td>
@@ -622,7 +628,7 @@ const Repositories = () => {
? repo.isSecure ? repo.isSecure
: repo.url.startsWith("https://"); : repo.url.startsWith("https://");
return ( return (
<div className="flex items-center justify-center"> <div className="flex items-center justify-start">
{isSecure ? ( {isSecure ? (
<div className="flex items-center gap-1 text-green-600"> <div className="flex items-center gap-1 text-green-600">
<Lock className="h-4 w-4" /> <Lock className="h-4 w-4" />
@@ -651,14 +657,14 @@ const Repositories = () => {
); );
case "hostCount": case "hostCount":
return ( return (
<div className="flex items-center justify-center gap-1 text-sm text-secondary-900 dark:text-white"> <div className="flex items-center justify-start gap-1 text-sm text-secondary-900 dark:text-white">
<Server className="h-4 w-4" /> <Server className="h-4 w-4" />
<span>{repo.hostCount}</span> <span>{repo.hostCount}</span>
</div> </div>
); );
case "actions": case "actions":
return ( return (
<div className="flex items-center justify-center"> <div className="flex items-center justify-start">
<button <button
type="button" type="button"
onClick={(e) => handleDeleteRepository(repo, e)} onClick={(e) => handleDeleteRepository(repo, e)}

8
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "patchmon", "name": "patchmon",
"version": "1.3.1", "version": "1.3.2",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "patchmon", "name": "patchmon",
"version": "1.3.1", "version": "1.3.2",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"workspaces": [ "workspaces": [
"backend", "backend",
@@ -23,7 +23,7 @@
}, },
"backend": { "backend": {
"name": "patchmon-backend", "name": "patchmon-backend",
"version": "1.3.1", "version": "1.3.2",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"dependencies": { "dependencies": {
"@bull-board/api": "^6.13.1", "@bull-board/api": "^6.13.1",
@@ -59,7 +59,7 @@
}, },
"frontend": { "frontend": {
"name": "patchmon-frontend", "name": "patchmon-frontend",
"version": "1.3.1", "version": "1.3.2",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"dependencies": { "dependencies": {
"@dnd-kit/core": "^6.3.1", "@dnd-kit/core": "^6.3.1",