Fixed reporting mechanism

This commit is contained in:
Muhammad Ibrahim
2025-11-07 18:33:17 +00:00
parent e73ebc383c
commit f5b0e930f7
13 changed files with 767 additions and 121 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -136,10 +136,34 @@ if [[ -z "$PATCHMON_URL" ]] || [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
error "Missing required parameters. This script should be called via the PatchMon web interface." error "Missing required parameters. This script should be called via the PatchMon web interface."
fi fi
# Parse architecture parameter (default to amd64) # Auto-detect architecture if not explicitly set
ARCHITECTURE="${ARCHITECTURE:-amd64}" if [[ -z "$ARCHITECTURE" ]]; then
if [[ "$ARCHITECTURE" != "amd64" && "$ARCHITECTURE" != "386" && "$ARCHITECTURE" != "arm64" ]]; then arch_raw=$(uname -m 2>/dev/null || echo "unknown")
error "Invalid architecture '$ARCHITECTURE'. Must be one of: amd64, 386, arm64"
# Map architecture to supported values
case "$arch_raw" in
"x86_64")
ARCHITECTURE="amd64"
;;
"i386"|"i686")
ARCHITECTURE="386"
;;
"aarch64"|"arm64")
ARCHITECTURE="arm64"
;;
"armv7l"|"armv6l"|"arm")
ARCHITECTURE="arm"
;;
*)
warning "⚠️ Unknown architecture '$arch_raw', defaulting to amd64"
ARCHITECTURE="amd64"
;;
esac
fi
# Validate architecture
if [[ "$ARCHITECTURE" != "amd64" && "$ARCHITECTURE" != "386" && "$ARCHITECTURE" != "arm64" && "$ARCHITECTURE" != "arm" ]]; then
error "Invalid architecture '$ARCHITECTURE'. Must be one of: amd64, 386, arm64, arm"
fi fi
# Check if --force flag is set (for bypassing broken packages) # Check if --force flag is set (for bypassing broken packages)
@@ -234,7 +258,98 @@ install_apt_packages() {
fi fi
} }
# Detect package manager and install jq and curl # Function to check and install packages for yum/dnf
install_yum_dnf_packages() {
local pkg_manager="$1"
shift
local packages=("$@")
local missing_packages=()
# Check which packages are missing
for pkg in "${packages[@]}"; do
if ! command_exists "$pkg"; then
missing_packages+=("$pkg")
fi
done
if [ ${#missing_packages[@]} -eq 0 ]; then
success "All required packages are already installed"
return 0
fi
info "Need to install: ${missing_packages[*]}"
if [[ "$pkg_manager" == "yum" ]]; then
yum install -y "${missing_packages[@]}"
else
dnf install -y "${missing_packages[@]}"
fi
}
# Function to check and install packages for zypper
install_zypper_packages() {
local packages=("$@")
local missing_packages=()
# Check which packages are missing
for pkg in "${packages[@]}"; do
if ! command_exists "$pkg"; then
missing_packages+=("$pkg")
fi
done
if [ ${#missing_packages[@]} -eq 0 ]; then
success "All required packages are already installed"
return 0
fi
info "Need to install: ${missing_packages[*]}"
zypper install -y "${missing_packages[@]}"
}
# Function to check and install packages for pacman
install_pacman_packages() {
local packages=("$@")
local missing_packages=()
# Check which packages are missing
for pkg in "${packages[@]}"; do
if ! command_exists "$pkg"; then
missing_packages+=("$pkg")
fi
done
if [ ${#missing_packages[@]} -eq 0 ]; then
success "All required packages are already installed"
return 0
fi
info "Need to install: ${missing_packages[*]}"
pacman -S --noconfirm "${missing_packages[@]}"
}
# Function to check and install packages for apk
install_apk_packages() {
local packages=("$@")
local missing_packages=()
# Check which packages are missing
for pkg in "${packages[@]}"; do
if ! command_exists "$pkg"; then
missing_packages+=("$pkg")
fi
done
if [ ${#missing_packages[@]} -eq 0 ]; then
success "All required packages are already installed"
return 0
fi
info "Need to install: ${missing_packages[*]}"
apk add --no-cache "${missing_packages[@]}"
}
# Detect package manager and install jq, curl, and bc
if command -v apt-get >/dev/null 2>&1; then if command -v apt-get >/dev/null 2>&1; then
# Debian/Ubuntu # Debian/Ubuntu
info "Detected apt-get (Debian/Ubuntu)" info "Detected apt-get (Debian/Ubuntu)"
@@ -260,31 +375,31 @@ elif command -v yum >/dev/null 2>&1; then
info "Detected yum (CentOS/RHEL 7)" info "Detected yum (CentOS/RHEL 7)"
echo "" echo ""
info "Installing jq, curl, and bc..." info "Installing jq, curl, and bc..."
yum install -y jq curl bc install_yum_dnf_packages yum jq curl bc
elif command -v dnf >/dev/null 2>&1; then elif command -v dnf >/dev/null 2>&1; then
# CentOS/RHEL 8+/Fedora # CentOS/RHEL 8+/Fedora
info "Detected dnf (CentOS/RHEL 8+/Fedora)" info "Detected dnf (CentOS/RHEL 8+/Fedora)"
echo "" echo ""
info "Installing jq, curl, and bc..." info "Installing jq, curl, and bc..."
dnf install -y jq curl bc install_yum_dnf_packages dnf jq curl bc
elif command -v zypper >/dev/null 2>&1; then elif command -v zypper >/dev/null 2>&1; then
# openSUSE # openSUSE
info "Detected zypper (openSUSE)" info "Detected zypper (openSUSE)"
echo "" echo ""
info "Installing jq, curl, and bc..." info "Installing jq, curl, and bc..."
zypper install -y jq curl bc install_zypper_packages jq curl bc
elif command -v pacman >/dev/null 2>&1; then elif command -v pacman >/dev/null 2>&1; then
# Arch Linux # Arch Linux
info "Detected pacman (Arch Linux)" info "Detected pacman (Arch Linux)"
echo "" echo ""
info "Installing jq, curl, and bc..." info "Installing jq, curl, and bc..."
pacman -S --noconfirm jq curl bc install_pacman_packages jq curl bc
elif command -v apk >/dev/null 2>&1; then elif command -v apk >/dev/null 2>&1; then
# Alpine Linux # Alpine Linux
info "Detected apk (Alpine Linux)" info "Detected apk (Alpine Linux)"
echo "" echo ""
info "Installing jq, curl, and bc..." info "Installing jq, curl, and bc..."
apk add --no-cache jq curl bc install_apk_packages jq curl bc
else else
warning "Could not detect package manager. Please ensure 'jq', 'curl', and 'bc' are installed manually." warning "Could not detect package manager. Please ensure 'jq', 'curl', and 'bc' are installed manually."
fi fi
@@ -464,15 +579,8 @@ else
error "❌ Failed to validate API credentials or reach server" error "❌ Failed to validate API credentials or reach server"
fi fi
# Step 5: Send initial data and setup systemd service # Step 5: Setup systemd service for WebSocket connection
info "📊 Sending initial package data to server..." # Note: The service will automatically send an initial report on startup (see serve.go)
if /usr/local/bin/patchmon-agent report; then
success "✅ UPDATE: Initial package data sent successfully"
else
warning "⚠️ Failed to send initial data. You can retry later with: /usr/local/bin/patchmon-agent report"
fi
# Step 6: Setup systemd service for WebSocket connection
info "🔧 Setting up systemd service..." info "🔧 Setting up systemd service..."
# Stop and disable existing service if it exists # Stop and disable existing service if it exists

View File

@@ -11,10 +11,16 @@ const {
requireManageSettings, requireManageSettings,
} = require("../middleware/permissions"); } = require("../middleware/permissions");
const { queueManager, QUEUE_NAMES } = require("../services/automation"); const { queueManager, QUEUE_NAMES } = require("../services/automation");
const { pushIntegrationToggle, isConnected } = require("../services/agentWs");
const agentVersionService = require("../services/agentVersionService");
const router = express.Router(); const router = express.Router();
const prisma = getPrismaClient(); const prisma = getPrismaClient();
// In-memory cache for integration states (api_id -> { integration_name -> enabled })
// This stores the last known state from successful toggles
const integrationStateCache = new Map();
// Secure endpoint to download the agent script/binary (requires API authentication) // Secure endpoint to download the agent script/binary (requires API authentication)
router.get("/agent/download", async (req, res) => { router.get("/agent/download", async (req, res) => {
try { try {
@@ -128,9 +134,6 @@ router.get("/agent/version", async (req, res) => {
try { try {
const fs = require("node:fs"); const fs = require("node:fs");
const path = require("node:path"); const path = require("node:path");
const { exec } = require("node:child_process");
const { promisify } = require("node:util");
const execAsync = promisify(exec);
// Get architecture parameter (default to amd64 for Go agents) // Get architecture parameter (default to amd64 for Go agents)
const architecture = req.query.arch || "amd64"; const architecture = req.query.arch || "amd64";
@@ -165,53 +168,108 @@ router.get("/agent/version", async (req, res) => {
minServerVersion: null, minServerVersion: null,
}); });
} else { } else {
// Go agent version check (binary) // Go agent version check
const binaryName = `patchmon-agent-linux-${architecture}`; // Detect server architecture and map to Go architecture names
const binaryPath = path.join(__dirname, "../../../agents", binaryName); const os = require("node:os");
const { exec } = require("node:child_process");
const { promisify } = require("node:util");
const execAsync = promisify(exec);
if (!fs.existsSync(binaryPath)) { const serverArch = os.arch();
return res.status(404).json({ // Map Node.js architecture to Go architecture names
error: `Go agent binary not found for architecture: ${architecture}`, const archMap = {
}); x64: "amd64",
ia32: "386",
arm64: "arm64",
arm: "arm",
};
const serverGoArch = archMap[serverArch] || serverArch;
// If requested architecture matches server architecture, execute the binary
if (architecture === serverGoArch) {
const binaryName = `patchmon-agent-linux-${architecture}`;
const binaryPath = path.join(__dirname, "../../../agents", binaryName);
if (!fs.existsSync(binaryPath)) {
// Binary doesn't exist, fall back to GitHub
console.log(`Binary ${binaryName} not found, falling back to GitHub`);
} else {
// Execute the binary to get its version
try {
const { stdout } = await execAsync(`${binaryPath} --help`, {
timeout: 10000,
});
// Parse version from help output (e.g., "PatchMon Agent v1.3.1")
const versionMatch = stdout.match(
/PatchMon Agent v([0-9]+\.[0-9]+\.[0-9]+)/i,
);
if (versionMatch) {
const serverVersion = versionMatch[1];
const agentVersion = req.query.currentVersion || serverVersion;
// Simple version comparison (assuming semantic versioning)
const hasUpdate = agentVersion !== serverVersion;
return res.json({
currentVersion: agentVersion,
latestVersion: serverVersion,
hasUpdate: hasUpdate,
downloadUrl: `/api/v1/hosts/agent/download?arch=${architecture}`,
releaseNotes: `PatchMon Agent v${serverVersion}`,
minServerVersion: null,
architecture: architecture,
agentType: "go",
});
}
} catch (execError) {
// Execution failed, fall back to GitHub
console.log(
`Failed to execute binary ${binaryName}: ${execError.message}, falling back to GitHub`,
);
}
}
} }
// Execute the binary to get its version // Fall back to GitHub if architecture doesn't match or binary execution failed
try { try {
const { stdout } = await execAsync(`${binaryPath} --help`, { const versionInfo = await agentVersionService.getVersionInfo();
timeout: 10000, const latestVersion = versionInfo.latestVersion;
}); const agentVersion =
req.query.currentVersion || latestVersion || "unknown";
// Parse version from help output (e.g., "PatchMon Agent v1.3.1") if (!latestVersion) {
const versionMatch = stdout.match( return res.status(503).json({
/PatchMon Agent v([0-9]+\.[0-9]+\.[0-9]+)/i, error: "Unable to determine latest version from GitHub releases",
); currentVersion: agentVersion,
latestVersion: null,
if (!versionMatch) { hasUpdate: false,
return res.status(500).json({
error: "Could not extract version from agent binary",
}); });
} }
const serverVersion = versionMatch[1];
const agentVersion = req.query.currentVersion || serverVersion;
// Simple version comparison (assuming semantic versioning) // Simple version comparison (assuming semantic versioning)
const hasUpdate = agentVersion !== serverVersion; const hasUpdate =
agentVersion !== latestVersion && latestVersion !== null;
res.json({ res.json({
currentVersion: agentVersion, currentVersion: agentVersion,
latestVersion: serverVersion, latestVersion: latestVersion,
hasUpdate: hasUpdate, hasUpdate: hasUpdate,
downloadUrl: `/api/v1/hosts/agent/download?arch=${architecture}`, downloadUrl: `/api/v1/hosts/agent/download?arch=${architecture}`,
releaseNotes: `PatchMon Agent v${serverVersion}`, releaseNotes: `PatchMon Agent v${latestVersion}`,
minServerVersion: null, minServerVersion: null,
architecture: architecture, architecture: architecture,
agentType: "go", agentType: "go",
}); });
} catch (execError) { } catch (serviceError) {
console.error("Failed to execute agent binary:", execError.message); console.error(
"Failed to get version from agentVersionService:",
serviceError.message,
);
return res.status(500).json({ return res.status(500).json({
error: "Failed to get version from agent binary", error: "Failed to get agent version from service",
details: serviceError.message,
}); });
} }
} }
@@ -1616,10 +1674,14 @@ router.get("/install", async (req, res) => {
// Check for --force parameter // Check for --force parameter
const forceInstall = req.query.force === "true" || req.query.force === "1"; const forceInstall = req.query.force === "true" || req.query.force === "1";
// Get architecture parameter (default to amd64) // Get architecture parameter (only set if explicitly provided, otherwise let script auto-detect)
const architecture = req.query.arch || "amd64"; const architecture = req.query.arch;
// Inject the API credentials, server URL, curl flags, SSL verify flag, force flag, and architecture into the script // Inject the API credentials, server URL, curl flags, SSL verify flag, force flag, and architecture into the script
// Only set ARCHITECTURE if explicitly provided, otherwise let the script auto-detect
const archExport = architecture
? `export ARCHITECTURE="${architecture}"\n`
: "";
const envVars = `#!/bin/bash const envVars = `#!/bin/bash
export PATCHMON_URL="${serverUrl}" export PATCHMON_URL="${serverUrl}"
export API_ID="${host.api_id}" export API_ID="${host.api_id}"
@@ -1627,8 +1689,7 @@ export API_KEY="${host.api_key}"
export CURL_FLAGS="${curlFlags}" export CURL_FLAGS="${curlFlags}"
export SKIP_SSL_VERIFY="${skipSSLVerify}" export SKIP_SSL_VERIFY="${skipSSLVerify}"
export FORCE_INSTALL="${forceInstall ? "true" : "false"}" export FORCE_INSTALL="${forceInstall ? "true" : "false"}"
export ARCHITECTURE="${architecture}" ${archExport}
`; `;
// Remove the shebang from the original script and prepend our env vars // Remove the shebang from the original script and prepend our env vars
@@ -2103,4 +2164,137 @@ router.patch(
}, },
); );
// Get integration status for a host
router.get(
"/:hostId/integrations",
authenticateToken,
requireManageHosts,
async (req, res) => {
try {
const { hostId } = req.params;
// Get host to verify it exists
const host = await prisma.hosts.findUnique({
where: { id: hostId },
select: { id: true, api_id: true, friendly_name: true },
});
if (!host) {
return res.status(404).json({ error: "Host not found" });
}
// Check if agent is connected
const connected = isConnected(host.api_id);
// Get integration states from cache (or defaults if not cached)
// Default: all integrations are disabled
const cachedState = integrationStateCache.get(host.api_id) || {};
const integrations = {
docker: cachedState.docker || false, // Default: disabled
// Future integrations can be added here
};
res.json({
success: true,
data: {
integrations,
connected,
host: {
id: host.id,
friendlyName: host.friendly_name,
apiId: host.api_id,
},
},
});
} catch (error) {
console.error("Get integration status error:", error);
res.status(500).json({ error: "Failed to get integration status" });
}
},
);
// Toggle integration status for a host
router.post(
"/:hostId/integrations/:integrationName/toggle",
authenticateToken,
requireManageHosts,
[body("enabled").isBoolean().withMessage("Enabled status must be a boolean")],
async (req, res) => {
try {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
const { hostId, integrationName } = req.params;
const { enabled } = req.body;
// Validate integration name
const validIntegrations = ["docker"]; // Add more as they're implemented
if (!validIntegrations.includes(integrationName)) {
return res.status(400).json({
error: "Invalid integration name",
validIntegrations,
});
}
// Get host to verify it exists
const host = await prisma.hosts.findUnique({
where: { id: hostId },
select: { id: true, api_id: true, friendly_name: true },
});
if (!host) {
return res.status(404).json({ error: "Host not found" });
}
// Check if agent is connected
if (!isConnected(host.api_id)) {
return res.status(503).json({
error: "Agent is not connected",
message:
"The agent must be connected via WebSocket to toggle integrations",
});
}
// Send WebSocket message to agent
const success = pushIntegrationToggle(
host.api_id,
integrationName,
enabled,
);
if (!success) {
return res.status(503).json({
error: "Failed to send integration toggle",
message: "Agent connection may have been lost",
});
}
// Update cache with new state
if (!integrationStateCache.has(host.api_id)) {
integrationStateCache.set(host.api_id, {});
}
integrationStateCache.get(host.api_id)[integrationName] = enabled;
res.json({
success: true,
message: `Integration ${integrationName} ${enabled ? "enabled" : "disabled"} successfully`,
data: {
integration: integrationName,
enabled,
host: {
id: host.id,
friendlyName: host.friendly_name,
apiId: host.api_id,
},
},
});
} catch (error) {
console.error("Toggle integration error:", error);
res.status(500).json({ error: "Failed to toggle integration" });
}
},
);
module.exports = router; module.exports = router;

View File

@@ -49,12 +49,12 @@ function init(server, prismaClient) {
// Accept the WebSocket connection for Bull Board // Accept the WebSocket connection for Bull Board
wss.handleUpgrade(request, socket, head, (ws) => { wss.handleUpgrade(request, socket, head, (ws) => {
ws.on("message", (message) => { ws.on("message", (message) => {
// Echo back for Bull Board WebSocket // Echo back for Bull Board WebSocket
try { try {
ws.send(message); ws.send(message);
} catch (_err) { } catch (_err) {
// Ignore send errors (connection may be closed) // Ignore send errors (connection may be closed)
} }
}); });
ws.on("error", (err) => { ws.on("error", (err) => {
@@ -255,6 +255,29 @@ function pushUpdateAgent(apiId) {
safeSend(ws, JSON.stringify({ type: "update_agent" })); safeSend(ws, JSON.stringify({ type: "update_agent" }));
} }
function pushIntegrationToggle(apiId, integrationName, enabled) {
const ws = apiIdToSocket.get(apiId);
if (ws && ws.readyState === WebSocket.OPEN) {
safeSend(
ws,
JSON.stringify({
type: "integration_toggle",
integration: integrationName,
enabled: enabled,
}),
);
console.log(
`📤 Pushed integration toggle to agent ${apiId}: ${integrationName} = ${enabled}`,
);
return true;
} else {
console.log(
`⚠️ Agent ${apiId} not connected, cannot push integration toggle, please edit config.yml manually`,
);
return false;
}
}
function getConnectionByApiId(apiId) { function getConnectionByApiId(apiId) {
return apiIdToSocket.get(apiId); return apiIdToSocket.get(apiId);
} }
@@ -414,6 +437,7 @@ module.exports = {
pushReportNow, pushReportNow,
pushSettingsUpdate, pushSettingsUpdate,
pushUpdateAgent, pushUpdateAgent,
pushIntegrationToggle,
pushUpdateNotification, pushUpdateNotification,
pushUpdateNotificationToAll, pushUpdateNotificationToAll,
// Expose read-only view of connected agents // Expose read-only view of connected agents

View File

@@ -120,7 +120,6 @@ const Layout = ({ children }) => {
name: "Automation", name: "Automation",
href: "/automation", href: "/automation",
icon: RefreshCw, icon: RefreshCw,
new: true,
}); });
if (canViewReports()) { if (canViewReports()) {

View File

@@ -196,6 +196,25 @@ const Automation = () => {
year: "numeric", year: "numeric",
}); });
} }
if (schedule === "Every 30 minutes") {
const now = new Date();
const nextRun = new Date(now);
// Round up to the next 30-minute mark
const minutes = now.getMinutes();
if (minutes < 30) {
nextRun.setMinutes(30, 0, 0);
} else {
nextRun.setHours(nextRun.getHours() + 1, 0, 0, 0);
}
return nextRun.toLocaleString([], {
hour12: true,
hour: "numeric",
minute: "2-digit",
day: "numeric",
month: "numeric",
year: "numeric",
});
}
return "Unknown"; return "Unknown";
}; };
@@ -236,6 +255,18 @@ const Automation = () => {
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0); nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
return nextHour.getTime(); return nextHour.getTime();
} }
if (schedule === "Every 30 minutes") {
const now = new Date();
const nextRun = new Date(now);
// Round up to the next 30-minute mark
const minutes = now.getMinutes();
if (minutes < 30) {
nextRun.setMinutes(30, 0, 0);
} else {
nextRun.setHours(nextRun.getHours() + 1, 0, 0, 0);
}
return nextRun.getTime();
}
return Number.MAX_SAFE_INTEGER; // Unknown schedules go to bottom return Number.MAX_SAFE_INTEGER; // Unknown schedules go to bottom
}; };
@@ -294,6 +325,8 @@ const Automation = () => {
endpoint = "/automation/trigger/docker-inventory-cleanup"; endpoint = "/automation/trigger/docker-inventory-cleanup";
} else if (jobType === "agent-collection") { } else if (jobType === "agent-collection") {
endpoint = "/automation/trigger/agent-collection"; endpoint = "/automation/trigger/agent-collection";
} else if (jobType === "system-statistics") {
endpoint = "/automation/trigger/system-statistics";
} }
const _response = await api.post(endpoint, data); const _response = await api.post(endpoint, data);
@@ -615,6 +648,10 @@ const Automation = () => {
automation.queue.includes("agent-commands") automation.queue.includes("agent-commands")
) { ) {
triggerManualJob("agent-collection"); triggerManualJob("agent-collection");
} else if (
automation.queue.includes("system-statistics")
) {
triggerManualJob("system-statistics");
} }
}} }}
className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200" className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200"

View File

@@ -55,6 +55,8 @@ const Dashboard = () => {
const [cardPreferences, setCardPreferences] = useState([]); const [cardPreferences, setCardPreferences] = useState([]);
const [packageTrendsPeriod, setPackageTrendsPeriod] = useState("1"); // days const [packageTrendsPeriod, setPackageTrendsPeriod] = useState("1"); // days
const [packageTrendsHost, setPackageTrendsHost] = useState("all"); // host filter const [packageTrendsHost, setPackageTrendsHost] = useState("all"); // host filter
const [systemStatsJobId, setSystemStatsJobId] = useState(null); // Track job ID for system statistics
const [isTriggeringJob, setIsTriggeringJob] = useState(false);
const navigate = useNavigate(); const navigate = useNavigate();
const { isDark } = useTheme(); const { isDark } = useTheme();
const { user } = useAuth(); const { user } = useAuth();
@@ -772,56 +774,108 @@ const Dashboard = () => {
<h3 className="text-lg font-medium text-secondary-900 dark:text-white"> <h3 className="text-lg font-medium text-secondary-900 dark:text-white">
Package Trends Over Time Package Trends Over Time
</h3> </h3>
<div className="flex items-center gap-3"> <div className="flex flex-col gap-2">
{/* Refresh Button */} <div className="flex items-center gap-3">
<button {/* Refresh Button */}
type="button" <button
onClick={() => refetchPackageTrends()} type="button"
disabled={packageTrendsFetching} onClick={async () => {
className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white hover:bg-secondary-50 dark:hover:bg-secondary-700 focus:ring-2 focus:ring-primary-500 focus:border-primary-500 disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2" if (packageTrendsHost === "all") {
title="Refresh data" // For "All Hosts", trigger system statistics collection job
> setIsTriggeringJob(true);
<RefreshCw try {
className={`h-4 w-4 ${packageTrendsFetching ? "animate-spin" : ""}`} const response =
/> await dashboardAPI.triggerSystemStatistics();
Refresh if (response.data?.data?.jobId) {
</button> setSystemStatsJobId(response.data.data.jobId);
// Wait a moment for the job to complete, then refetch
setTimeout(() => {
refetchPackageTrends();
}, 2000);
// Clear the job ID message after 2 seconds
setTimeout(() => {
setSystemStatsJobId(null);
}, 2000);
}
} catch (error) {
console.error(
"Failed to trigger system statistics:",
error,
);
// Still refetch data even if job trigger fails
refetchPackageTrends();
} finally {
setIsTriggeringJob(false);
}
} else {
// For individual host, just refetch the data
refetchPackageTrends();
}
}}
disabled={packageTrendsFetching || isTriggeringJob}
className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white hover:bg-secondary-50 dark:hover:bg-secondary-700 focus:ring-2 focus:ring-primary-500 focus:border-primary-500 disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2"
title={
packageTrendsHost === "all"
? "Trigger system statistics collection"
: "Refresh data"
}
>
<RefreshCw
className={`h-4 w-4 ${
packageTrendsFetching || isTriggeringJob
? "animate-spin"
: ""
}`}
/>
Refresh
</button>
{/* Period Selector */} {/* Period Selector */}
<select <select
value={packageTrendsPeriod} value={packageTrendsPeriod}
onChange={(e) => setPackageTrendsPeriod(e.target.value)} onChange={(e) => setPackageTrendsPeriod(e.target.value)}
className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:ring-2 focus:ring-primary-500 focus:border-primary-500" className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:ring-2 focus:ring-primary-500 focus:border-primary-500"
> >
<option value="1">Last 24 hours</option> <option value="1">Last 24 hours</option>
<option value="7">Last 7 days</option> <option value="7">Last 7 days</option>
<option value="30">Last 30 days</option> <option value="30">Last 30 days</option>
<option value="90">Last 90 days</option> <option value="90">Last 90 days</option>
<option value="180">Last 6 months</option> <option value="180">Last 6 months</option>
<option value="365">Last year</option> <option value="365">Last year</option>
</select> </select>
{/* Host Selector */} {/* Host Selector */}
<select <select
value={packageTrendsHost} value={packageTrendsHost}
onChange={(e) => setPackageTrendsHost(e.target.value)} onChange={(e) => {
className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:ring-2 focus:ring-primary-500 focus:border-primary-500" setPackageTrendsHost(e.target.value);
> // Clear job ID message when host selection changes
<option value="all">All Hosts</option> setSystemStatsJobId(null);
{packageTrendsData?.hosts?.length > 0 ? ( }}
packageTrendsData.hosts.map((host) => ( className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:ring-2 focus:ring-primary-500 focus:border-primary-500"
<option key={host.id} value={host.id}> >
{host.friendly_name || host.hostname} <option value="all">All Hosts</option>
{packageTrendsData?.hosts?.length > 0 ? (
packageTrendsData.hosts.map((host) => (
<option key={host.id} value={host.id}>
{host.friendly_name || host.hostname}
</option>
))
) : (
<option disabled>
{packageTrendsLoading
? "Loading hosts..."
: "No hosts available"}
</option> </option>
)) )}
) : ( </select>
<option disabled> </div>
{packageTrendsLoading {/* Job ID Message */}
? "Loading hosts..." {systemStatsJobId && packageTrendsHost === "all" && (
: "No hosts available"} <p className="text-xs text-secondary-600 dark:text-secondary-400 ml-1">
</option> Ran collection job #{systemStatsJobId}
)} </p>
</select> )}
</div> </div>
</div> </div>
@@ -1167,13 +1221,40 @@ const Dashboard = () => {
title: (context) => { title: (context) => {
const label = context[0].label; const label = context[0].label;
// Handle "Now" label
if (label === "Now") {
return "Now";
}
// Handle empty or invalid labels // Handle empty or invalid labels
if (!label || typeof label !== "string") { if (!label || typeof label !== "string") {
return "Unknown Date"; return "Unknown Date";
} }
// Check if it's a full ISO timestamp (for "Last 24 hours")
// Format: "2025-01-15T14:30:00.000Z" or "2025-01-15T14:30:00.000"
if (label.includes("T") && label.includes(":")) {
try {
const date = new Date(label);
// Check if date is valid
if (Number.isNaN(date.getTime())) {
return label; // Return original label if date is invalid
}
// Format full ISO timestamp with date and time
return date.toLocaleDateString("en-US", {
month: "short",
day: "numeric",
hour: "numeric",
minute: "2-digit",
hour12: true,
});
} catch (_error) {
return label; // Return original label if parsing fails
}
}
// Format hourly labels (e.g., "2025-10-07T14" -> "Oct 7, 2:00 PM") // Format hourly labels (e.g., "2025-10-07T14" -> "Oct 7, 2:00 PM")
if (label.includes("T")) { if (label.includes("T") && !label.includes(":")) {
try { try {
const date = new Date(`${label}:00:00`); const date = new Date(`${label}:00:00`);
// Check if date is valid // Check if date is valid
@@ -1233,13 +1314,41 @@ const Dashboard = () => {
callback: function (value, _index, _ticks) { callback: function (value, _index, _ticks) {
const label = this.getLabelForValue(value); const label = this.getLabelForValue(value);
// Handle "Now" label
if (label === "Now") {
return "Now";
}
// Handle empty or invalid labels // Handle empty or invalid labels
if (!label || typeof label !== "string") { if (!label || typeof label !== "string") {
return "Unknown"; return "Unknown";
} }
// Check if it's a full ISO timestamp (for "Last 24 hours")
// Format: "2025-01-15T14:30:00.000Z" or "2025-01-15T14:30:00.000"
if (label.includes("T") && label.includes(":")) {
try {
const date = new Date(label);
// Check if date is valid
if (Number.isNaN(date.getTime())) {
return label; // Return original label if date is invalid
}
// Extract hour from full ISO timestamp
const hourNum = date.getHours();
return hourNum === 0
? "12 AM"
: hourNum < 12
? `${hourNum} AM`
: hourNum === 12
? "12 PM"
: `${hourNum - 12} PM`;
} catch (_error) {
return label; // Return original label if parsing fails
}
}
// Format hourly labels (e.g., "2025-10-07T14" -> "2 PM") // Format hourly labels (e.g., "2025-10-07T14" -> "2 PM")
if (label.includes("T")) { if (label.includes("T") && !label.includes(":")) {
try { try {
const hour = label.split("T")[1]; const hour = label.split("T")[1];
const hourNum = parseInt(hour, 10); const hourNum = parseInt(hour, 10);

View File

@@ -281,6 +281,67 @@ const HostDetail = () => {
}, },
}); });
// Fetch integration status
const {
data: integrationsData,
isLoading: isLoadingIntegrations,
refetch: refetchIntegrations,
} = useQuery({
queryKey: ["host-integrations", hostId],
queryFn: () =>
adminHostsAPI.getIntegrations(hostId).then((res) => res.data),
staleTime: 30 * 1000, // 30 seconds
refetchOnWindowFocus: false,
enabled: !!hostId && activeTab === "integrations",
});
// Refetch integrations when WebSocket status changes (e.g., after agent restart)
useEffect(() => {
if (
wsStatus?.connected &&
activeTab === "integrations" &&
integrationsData?.data?.connected === false
) {
// Agent just reconnected, refetch integrations to get updated connection status
refetchIntegrations();
}
}, [
wsStatus?.connected,
activeTab,
integrationsData?.data?.connected,
refetchIntegrations,
]);
// Toggle integration mutation
const toggleIntegrationMutation = useMutation({
mutationFn: ({ integrationName, enabled }) =>
adminHostsAPI
.toggleIntegration(hostId, integrationName, enabled)
.then((res) => res.data),
onSuccess: (data) => {
// Optimistically update the cache with the new state
queryClient.setQueryData(["host-integrations", hostId], (oldData) => {
if (!oldData) return oldData;
return {
...oldData,
data: {
...oldData.data,
integrations: {
...oldData.data.integrations,
[data.data.integration]: data.data.enabled,
},
},
};
});
// Also invalidate to ensure we get fresh data
queryClient.invalidateQueries(["host-integrations", hostId]);
},
onError: () => {
// On error, refetch to get the actual state
refetchIntegrations();
},
});
const handleDeleteHost = async () => { const handleDeleteHost = async () => {
if ( if (
window.confirm( window.confirm(
@@ -666,6 +727,17 @@ const HostDetail = () => {
> >
Notes Notes
</button> </button>
<button
type="button"
onClick={() => handleTabChange("integrations")}
className={`px-4 py-2 text-sm font-medium ${
activeTab === "integrations"
? "text-primary-600 dark:text-primary-400 border-b-2 border-primary-500"
: "text-secondary-500 dark:text-secondary-400 hover:text-secondary-700 dark:hover:text-secondary-300"
}`}
>
Integrations
</button>
</div> </div>
<div className="p-4"> <div className="p-4">
@@ -1446,6 +1518,101 @@ const HostDetail = () => {
{/* Agent Queue */} {/* Agent Queue */}
{activeTab === "queue" && <AgentQueueTab hostId={hostId} />} {activeTab === "queue" && <AgentQueueTab hostId={hostId} />}
{/* Integrations */}
{activeTab === "integrations" && (
<div className="max-w-2xl space-y-4">
{isLoadingIntegrations ? (
<div className="flex items-center justify-center h-32">
<RefreshCw className="h-6 w-6 animate-spin text-primary-600" />
</div>
) : (
<div className="space-y-4">
{/* Docker Integration */}
<div className="bg-secondary-50 dark:bg-secondary-700 rounded-lg p-4 border border-secondary-200 dark:border-secondary-600">
<div className="flex items-start justify-between gap-4">
<div className="flex-1">
<div className="flex items-center gap-3 mb-2">
<Database className="h-5 w-5 text-primary-600 dark:text-primary-400" />
<h4 className="text-sm font-medium text-secondary-900 dark:text-white">
Docker
</h4>
{integrationsData?.data?.integrations?.docker ? (
<span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200">
Enabled
</span>
) : (
<span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold bg-gray-200 text-gray-600 dark:bg-gray-600 dark:text-gray-400">
Disabled
</span>
)}
</div>
<p className="text-xs text-secondary-600 dark:text-secondary-300">
Monitor Docker containers, images, volumes, and
networks. Collects real-time container status
events.
</p>
</div>
<div className="flex-shrink-0">
<button
type="button"
onClick={() =>
toggleIntegrationMutation.mutate({
integrationName: "docker",
enabled:
!integrationsData?.data?.integrations?.docker,
})
}
disabled={
toggleIntegrationMutation.isPending ||
!wsStatus?.connected
}
title={
!wsStatus?.connected
? "Agent is not connected"
: integrationsData?.data?.integrations?.docker
? "Disable Docker integration"
: "Enable Docker integration"
}
className={`relative inline-flex h-5 w-9 items-center rounded-full transition-colors focus:outline-none focus:ring-2 focus:ring-primary-500 focus:ring-offset-2 ${
integrationsData?.data?.integrations?.docker
? "bg-primary-600 dark:bg-primary-500"
: "bg-secondary-200 dark:bg-secondary-600"
} ${
toggleIntegrationMutation.isPending ||
!integrationsData?.data?.connected
? "opacity-50 cursor-not-allowed"
: ""
}`}
>
<span
className={`inline-block h-3 w-3 transform rounded-full bg-white transition-transform ${
integrationsData?.data?.integrations?.docker
? "translate-x-5"
: "translate-x-1"
}`}
/>
</button>
</div>
</div>
{!wsStatus?.connected && (
<p className="text-xs text-warning-600 dark:text-warning-400 mt-2">
Agent must be connected via WebSocket to toggle
integrations
</p>
)}
{toggleIntegrationMutation.isPending && (
<p className="text-xs text-secondary-600 dark:text-secondary-400 mt-2">
Updating integration...
</p>
)}
</div>
{/* Future integrations can be added here with the same pattern */}
</div>
)}
</div>
)}
</div> </div>
</div> </div>
</div> </div>
@@ -1639,7 +1806,8 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
> >
<option value="amd64">AMD64 (x86_64) - Default</option> <option value="amd64">AMD64 (x86_64) - Default</option>
<option value="386">386 (i386) - 32-bit</option> <option value="386">386 (i386) - 32-bit</option>
<option value="arm64">ARM64 (aarch64) - ARM</option> <option value="arm64">ARM64 (aarch64) - ARM 64-bit</option>
<option value="arm">ARM (armv7l/armv6l) - ARM 32-bit</option>
</select> </select>
<p className="text-xs text-primary-600 dark:text-primary-400 mt-1"> <p className="text-xs text-primary-600 dark:text-primary-400 mt-1">
Select the architecture of the target host Select the architecture of the target host

View File

@@ -99,6 +99,8 @@ export const dashboardAPI = {
}, },
getRecentUsers: () => api.get("/dashboard/recent-users"), getRecentUsers: () => api.get("/dashboard/recent-users"),
getRecentCollection: () => api.get("/dashboard/recent-collection"), getRecentCollection: () => api.get("/dashboard/recent-collection"),
triggerSystemStatistics: () =>
api.post("/automation/trigger/system-statistics"),
}; };
// Admin Hosts API (for management interface) // Admin Hosts API (for management interface)
@@ -129,6 +131,11 @@ export const adminHostsAPI = {
api.patch(`/hosts/${hostId}/notes`, { api.patch(`/hosts/${hostId}/notes`, {
notes: notes, notes: notes,
}), }),
getIntegrations: (hostId) => api.get(`/hosts/${hostId}/integrations`),
toggleIntegration: (hostId, integrationName, enabled) =>
api.post(`/hosts/${hostId}/integrations/${integrationName}/toggle`, {
enabled,
}),
}; };
// Host Groups API // Host Groups API

View File

@@ -443,7 +443,7 @@ generate_redis_password() {
# Find next available Redis database # Find next available Redis database
find_next_redis_db() { find_next_redis_db() {
print_info "Finding next available Redis database..." print_info "Finding next available Redis database..." >&2
# Start from database 0 and keep checking until we find an empty one # Start from database 0 and keep checking until we find an empty one
local db_num=0 local db_num=0
@@ -463,11 +463,11 @@ find_next_redis_db() {
# Try to load admin credentials if ACL file exists # Try to load admin credentials if ACL file exists
if [ -f /etc/redis/users.acl ] && grep -q "^user admin" /etc/redis/users.acl; then if [ -f /etc/redis/users.acl ] && grep -q "^user admin" /etc/redis/users.acl; then
# Redis is configured with ACL - try to extract admin password # Redis is configured with ACL - try to extract admin password
print_info "Redis requires authentication, attempting with admin credentials..." print_info "Redis requires authentication, attempting with admin credentials..." >&2
# For multi-instance setups, we can't know the admin password yet # For multi-instance setups, we can't know the admin password yet
# So we'll just use database 0 as default # So we'll just use database 0 as default
print_info "Using database 0 (Redis ACL already configured)" print_info "Using database 0 (Redis ACL already configured)" >&2
echo "0" echo "0"
return 0 return 0
fi fi
@@ -484,7 +484,7 @@ find_next_redis_db() {
# Check for authentication errors # Check for authentication errors
if echo "$redis_output" | grep -q "NOAUTH\|WRONGPASS"; then if echo "$redis_output" | grep -q "NOAUTH\|WRONGPASS"; then
# If we hit auth errors and haven't configured yet, use database 0 # If we hit auth errors and haven't configured yet, use database 0
print_info "Redis requires authentication, defaulting to database 0" print_info "Redis requires authentication, defaulting to database 0" >&2
echo "0" echo "0"
return 0 return 0
fi fi
@@ -492,10 +492,10 @@ find_next_redis_db() {
# Check for other errors # Check for other errors
if echo "$redis_output" | grep -q "ERR"; then if echo "$redis_output" | grep -q "ERR"; then
if echo "$redis_output" | grep -q "invalid DB index"; then if echo "$redis_output" | grep -q "invalid DB index"; then
print_warning "Reached maximum database limit at database $db_num" print_warning "Reached maximum database limit at database $db_num" >&2
break break
else else
print_error "Error checking database $db_num: $redis_output" print_error "Error checking database $db_num: $redis_output" >&2
return 1 return 1
fi fi
fi fi
@@ -504,17 +504,17 @@ find_next_redis_db() {
# If database is empty, use it # If database is empty, use it
if [ "$key_count" = "0" ] || [ "$key_count" = "(integer) 0" ]; then if [ "$key_count" = "0" ] || [ "$key_count" = "(integer) 0" ]; then
print_status "Found available Redis database: $db_num (empty)" print_status "Found available Redis database: $db_num (empty)" >&2
echo "$db_num" echo "$db_num"
return 0 return 0
fi fi
print_info "Database $db_num has $key_count keys, checking next..." print_info "Database $db_num has $key_count keys, checking next..." >&2
db_num=$((db_num + 1)) db_num=$((db_num + 1))
done done
print_warning "No available Redis databases found (checked 0-$max_attempts)" print_warning "No available Redis databases found (checked 0-$max_attempts)" >&2
print_info "Using database 0 (may have existing data)" print_info "Using database 0 (may have existing data)" >&2
echo "0" echo "0"
return 0 return 0
} }