mirror of
https://github.com/9technologygroup/patchmon.net.git
synced 2025-11-05 06:23:22 +00:00
Compare commits
78 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
913976b7f6 | ||
|
|
53ff3bb1e2 | ||
|
|
428207bc58 | ||
|
|
1547af6986 | ||
|
|
39fbafe01f | ||
|
|
f296cf2003 | ||
|
|
052a77dce8 | ||
|
|
94bfffd882 | ||
|
|
37462f4831 | ||
|
|
5457a1e9bc | ||
|
|
f3bca4a6d5 | ||
|
|
ca4d34c230 | ||
|
|
1e75f2b1fe | ||
|
|
79317b0052 | ||
|
|
77a945a5b6 | ||
|
|
276d910e83 | ||
|
|
dae536e96b | ||
|
|
8361caabe8 | ||
|
|
f6d23e45b2 | ||
|
|
aba0f5cb6b | ||
|
|
2ec2b3992c | ||
|
|
f85721b292 | ||
|
|
1d2c003830 | ||
|
|
2975da0f69 | ||
|
|
93760d03e1 | ||
|
|
43fb54a683 | ||
|
|
e9368d1a95 | ||
|
|
3ce8c02a31 | ||
|
|
ac420901a6 | ||
|
|
eb0218bdcb | ||
|
|
1f6f58360f | ||
|
|
746451c296 | ||
|
|
285e4c59ee | ||
|
|
9050595b7c | ||
|
|
cc46940b0c | ||
|
|
203a065479 | ||
|
|
8864de6c15 | ||
|
|
96aedbe761 | ||
|
|
3df2057f7e | ||
|
|
42f4e58bb4 | ||
|
|
12eef22912 | ||
|
|
c2121e3995 | ||
|
|
6792f96af9 | ||
|
|
1e617c8bb8 | ||
|
|
a76c5b8963 | ||
|
|
212b24b1c8 | ||
|
|
9fc3f4f9d1 | ||
|
|
3029278742 | ||
|
|
e4d6c1205c | ||
|
|
0f5272d12a | ||
|
|
5776d32e71 | ||
|
|
a11ff842eb | ||
|
|
48ce1951de | ||
|
|
9705e24b83 | ||
|
|
933c7a067e | ||
|
|
68f10c6c43 | ||
|
|
4b6f19c28e | ||
|
|
ae6afb0ef4 | ||
|
|
61523c9a44 | ||
|
|
3f9a5576ac | ||
|
|
e2dd7acca5 | ||
|
|
1c3b01f13c | ||
|
|
2c5a35b6c2 | ||
|
|
f42c53d34b | ||
|
|
95800e6d76 | ||
|
|
8d372411be | ||
|
|
cd03f0e66a | ||
|
|
deb6bed1a6 | ||
|
|
0189a307ef | ||
|
|
00abbc8c62 | ||
|
|
c9aef78912 | ||
|
|
fd2df0729e | ||
|
|
d7f7b24f8f | ||
|
|
1ef2308d56 | ||
|
|
fcd1b52e0e | ||
|
|
5be8e01aa3 | ||
|
|
293733dc0b | ||
|
|
c7ab40e4a2 |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -356,6 +356,7 @@ api_version: "v1"
|
|||||||
credentials_file: "/etc/patchmon/credentials.yml"
|
credentials_file: "/etc/patchmon/credentials.yml"
|
||||||
log_file: "/etc/patchmon/logs/patchmon-agent.log"
|
log_file: "/etc/patchmon/logs/patchmon-agent.log"
|
||||||
log_level: "info"
|
log_level: "info"
|
||||||
|
skip_ssl_verify: ${SKIP_SSL_VERIFY:-false}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
# Create credentials file
|
# Create credentials file
|
||||||
|
|||||||
@@ -3,6 +3,13 @@ DATABASE_URL="postgresql://patchmon_user:your-password-here@localhost:5432/patch
|
|||||||
PM_DB_CONN_MAX_ATTEMPTS=30
|
PM_DB_CONN_MAX_ATTEMPTS=30
|
||||||
PM_DB_CONN_WAIT_INTERVAL=2
|
PM_DB_CONN_WAIT_INTERVAL=2
|
||||||
|
|
||||||
|
# Database Connection Pool Configuration (Prisma)
|
||||||
|
DB_CONNECTION_LIMIT=30 # Maximum connections per instance (default: 30)
|
||||||
|
DB_POOL_TIMEOUT=20 # Seconds to wait for available connection (default: 20)
|
||||||
|
DB_CONNECT_TIMEOUT=10 # Seconds to wait for initial connection (default: 10)
|
||||||
|
DB_IDLE_TIMEOUT=300 # Seconds before closing idle connections (default: 300)
|
||||||
|
DB_MAX_LIFETIME=1800 # Maximum lifetime of a connection in seconds (default: 1800)
|
||||||
|
|
||||||
# JWT Configuration
|
# JWT Configuration
|
||||||
JWT_SECRET=your-secure-random-secret-key-change-this-in-production
|
JWT_SECRET=your-secure-random-secret-key-change-this-in-production
|
||||||
JWT_EXPIRES_IN=1h
|
JWT_EXPIRES_IN=1h
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon-backend",
|
"name": "patchmon-backend",
|
||||||
"version": "1.3.0",
|
"version": "1.3.2",
|
||||||
"description": "Backend API for Linux Patch Monitoring System",
|
"description": "Backend API for Linux Patch Monitoring System",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"main": "src/server.js",
|
"main": "src/server.js",
|
||||||
|
|||||||
@@ -0,0 +1,4 @@
|
|||||||
|
-- AlterTable
|
||||||
|
-- Add color_theme field to settings table for customizable app theming
|
||||||
|
ALTER TABLE "settings" ADD COLUMN "color_theme" TEXT NOT NULL DEFAULT 'default';
|
||||||
|
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
-- AddMetricsTelemetry
|
||||||
|
-- Add anonymous metrics and telemetry fields to settings table
|
||||||
|
|
||||||
|
-- Add metrics fields to settings table
|
||||||
|
ALTER TABLE "settings" ADD COLUMN "metrics_enabled" BOOLEAN NOT NULL DEFAULT true;
|
||||||
|
ALTER TABLE "settings" ADD COLUMN "metrics_anonymous_id" TEXT;
|
||||||
|
ALTER TABLE "settings" ADD COLUMN "metrics_last_sent" TIMESTAMP(3);
|
||||||
|
|
||||||
|
-- Generate UUID for existing records (if any exist)
|
||||||
|
-- This will use PostgreSQL's gen_random_uuid() function
|
||||||
|
UPDATE "settings"
|
||||||
|
SET "metrics_anonymous_id" = gen_random_uuid()::text
|
||||||
|
WHERE "metrics_anonymous_id" IS NULL;
|
||||||
|
|
||||||
@@ -0,0 +1,74 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_volumes" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"host_id" TEXT NOT NULL,
|
||||||
|
"volume_id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"driver" TEXT NOT NULL,
|
||||||
|
"mountpoint" TEXT,
|
||||||
|
"renderer" TEXT,
|
||||||
|
"scope" TEXT NOT NULL DEFAULT 'local',
|
||||||
|
"labels" JSONB,
|
||||||
|
"options" JSONB,
|
||||||
|
"size_bytes" BIGINT,
|
||||||
|
"ref_count" INTEGER NOT NULL DEFAULT 0,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_volumes_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_networks" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"host_id" TEXT NOT NULL,
|
||||||
|
"network_id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"driver" TEXT NOT NULL,
|
||||||
|
"scope" TEXT NOT NULL DEFAULT 'local',
|
||||||
|
"ipv6_enabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"internal" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"attachable" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"ingress" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"config_only" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"labels" JSONB,
|
||||||
|
"ipam" JSONB,
|
||||||
|
"container_count" INTEGER NOT NULL DEFAULT 0,
|
||||||
|
"created_at" TIMESTAMP(3),
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_networks_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_volumes_host_id_idx" ON "docker_volumes"("host_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_volumes_name_idx" ON "docker_volumes"("name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_volumes_driver_idx" ON "docker_volumes"("driver");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_volumes_host_id_volume_id_key" ON "docker_volumes"("host_id", "volume_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_networks_host_id_idx" ON "docker_networks"("host_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_networks_name_idx" ON "docker_networks"("name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_networks_driver_idx" ON "docker_networks"("driver");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_networks_host_id_network_id_key" ON "docker_networks"("host_id", "network_id");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "docker_volumes" ADD CONSTRAINT "docker_volumes_host_id_fkey" FOREIGN KEY ("host_id") REFERENCES "hosts"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "docker_networks" ADD CONSTRAINT "docker_networks_host_id_fkey" FOREIGN KEY ("host_id") REFERENCES "hosts"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "users" ADD COLUMN IF NOT EXISTS "theme_preference" VARCHAR(10) DEFAULT 'dark';
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "users" ADD COLUMN IF NOT EXISTS "color_theme" VARCHAR(50) DEFAULT 'cyber_blue';
|
||||||
|
|
||||||
@@ -114,6 +114,8 @@ model hosts {
|
|||||||
host_group_memberships host_group_memberships[]
|
host_group_memberships host_group_memberships[]
|
||||||
update_history update_history[]
|
update_history update_history[]
|
||||||
job_history job_history[]
|
job_history job_history[]
|
||||||
|
docker_volumes docker_volumes[]
|
||||||
|
docker_networks docker_networks[]
|
||||||
|
|
||||||
@@index([machine_id])
|
@@index([machine_id])
|
||||||
@@index([friendly_name])
|
@@index([friendly_name])
|
||||||
@@ -191,6 +193,9 @@ model settings {
|
|||||||
logo_dark String? @default("/assets/logo_dark.png")
|
logo_dark String? @default("/assets/logo_dark.png")
|
||||||
logo_light String? @default("/assets/logo_light.png")
|
logo_light String? @default("/assets/logo_light.png")
|
||||||
favicon String? @default("/assets/logo_square.svg")
|
favicon String? @default("/assets/logo_square.svg")
|
||||||
|
metrics_enabled Boolean @default(true)
|
||||||
|
metrics_anonymous_id String?
|
||||||
|
metrics_last_sent DateTime?
|
||||||
}
|
}
|
||||||
|
|
||||||
model update_history {
|
model update_history {
|
||||||
@@ -222,6 +227,8 @@ model users {
|
|||||||
tfa_secret String?
|
tfa_secret String?
|
||||||
first_name String?
|
first_name String?
|
||||||
last_name String?
|
last_name String?
|
||||||
|
theme_preference String? @default("dark")
|
||||||
|
color_theme String? @default("cyber_blue")
|
||||||
dashboard_preferences dashboard_preferences[]
|
dashboard_preferences dashboard_preferences[]
|
||||||
user_sessions user_sessions[]
|
user_sessions user_sessions[]
|
||||||
auto_enrollment_tokens auto_enrollment_tokens[]
|
auto_enrollment_tokens auto_enrollment_tokens[]
|
||||||
@@ -338,6 +345,56 @@ model docker_image_updates {
|
|||||||
@@index([is_security_update])
|
@@index([is_security_update])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model docker_volumes {
|
||||||
|
id String @id
|
||||||
|
host_id String
|
||||||
|
volume_id String
|
||||||
|
name String
|
||||||
|
driver String
|
||||||
|
mountpoint String?
|
||||||
|
renderer String?
|
||||||
|
scope String @default("local")
|
||||||
|
labels Json?
|
||||||
|
options Json?
|
||||||
|
size_bytes BigInt?
|
||||||
|
ref_count Int @default(0)
|
||||||
|
created_at DateTime
|
||||||
|
updated_at DateTime
|
||||||
|
last_checked DateTime @default(now())
|
||||||
|
hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([host_id, volume_id])
|
||||||
|
@@index([host_id])
|
||||||
|
@@index([name])
|
||||||
|
@@index([driver])
|
||||||
|
}
|
||||||
|
|
||||||
|
model docker_networks {
|
||||||
|
id String @id
|
||||||
|
host_id String
|
||||||
|
network_id String
|
||||||
|
name String
|
||||||
|
driver String
|
||||||
|
scope String @default("local")
|
||||||
|
ipv6_enabled Boolean @default(false)
|
||||||
|
internal Boolean @default(false)
|
||||||
|
attachable Boolean @default(true)
|
||||||
|
ingress Boolean @default(false)
|
||||||
|
config_only Boolean @default(false)
|
||||||
|
labels Json?
|
||||||
|
ipam Json? // IPAM configuration (driver, config, options)
|
||||||
|
container_count Int @default(0)
|
||||||
|
created_at DateTime?
|
||||||
|
updated_at DateTime
|
||||||
|
last_checked DateTime @default(now())
|
||||||
|
hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([host_id, network_id])
|
||||||
|
@@index([host_id])
|
||||||
|
@@index([name])
|
||||||
|
@@index([driver])
|
||||||
|
}
|
||||||
|
|
||||||
model job_history {
|
model job_history {
|
||||||
id String @id
|
id String @id
|
||||||
job_id String
|
job_id String
|
||||||
|
|||||||
@@ -16,12 +16,28 @@ function getOptimizedDatabaseUrl() {
|
|||||||
// Parse the URL
|
// Parse the URL
|
||||||
const url = new URL(originalUrl);
|
const url = new URL(originalUrl);
|
||||||
|
|
||||||
// Add connection pooling parameters for multiple instances
|
// Add connection pooling parameters - configurable via environment variables
|
||||||
url.searchParams.set("connection_limit", "5"); // Reduced from default 10
|
const connectionLimit = process.env.DB_CONNECTION_LIMIT || "30";
|
||||||
url.searchParams.set("pool_timeout", "10"); // 10 seconds
|
const poolTimeout = process.env.DB_POOL_TIMEOUT || "20";
|
||||||
url.searchParams.set("connect_timeout", "10"); // 10 seconds
|
const connectTimeout = process.env.DB_CONNECT_TIMEOUT || "10";
|
||||||
url.searchParams.set("idle_timeout", "300"); // 5 minutes
|
const idleTimeout = process.env.DB_IDLE_TIMEOUT || "300";
|
||||||
url.searchParams.set("max_lifetime", "1800"); // 30 minutes
|
const maxLifetime = process.env.DB_MAX_LIFETIME || "1800";
|
||||||
|
|
||||||
|
url.searchParams.set("connection_limit", connectionLimit);
|
||||||
|
url.searchParams.set("pool_timeout", poolTimeout);
|
||||||
|
url.searchParams.set("connect_timeout", connectTimeout);
|
||||||
|
url.searchParams.set("idle_timeout", idleTimeout);
|
||||||
|
url.searchParams.set("max_lifetime", maxLifetime);
|
||||||
|
|
||||||
|
// Log connection pool settings in development/debug mode
|
||||||
|
if (
|
||||||
|
process.env.ENABLE_LOGGING === "true" ||
|
||||||
|
process.env.LOG_LEVEL === "debug"
|
||||||
|
) {
|
||||||
|
console.log(
|
||||||
|
`[Database Pool] connection_limit=${connectionLimit}, pool_timeout=${poolTimeout}s, connect_timeout=${connectTimeout}s`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return url.toString();
|
return url.toString();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ const {
|
|||||||
refresh_access_token,
|
refresh_access_token,
|
||||||
revoke_session,
|
revoke_session,
|
||||||
revoke_all_user_sessions,
|
revoke_all_user_sessions,
|
||||||
|
generate_device_fingerprint,
|
||||||
} = require("../utils/session_manager");
|
} = require("../utils/session_manager");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
@@ -788,12 +789,40 @@ router.post(
|
|||||||
|
|
||||||
// Check if TFA is enabled
|
// Check if TFA is enabled
|
||||||
if (user.tfa_enabled) {
|
if (user.tfa_enabled) {
|
||||||
|
// Get device fingerprint from X-Device-ID header
|
||||||
|
const device_fingerprint = generate_device_fingerprint(req);
|
||||||
|
|
||||||
|
// Check if this device has a valid TFA bypass
|
||||||
|
if (device_fingerprint) {
|
||||||
|
const remembered_session = await prisma.user_sessions.findFirst({
|
||||||
|
where: {
|
||||||
|
user_id: user.id,
|
||||||
|
device_fingerprint: device_fingerprint,
|
||||||
|
tfa_remember_me: true,
|
||||||
|
tfa_bypass_until: { gt: new Date() }, // Bypass still valid
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (remembered_session) {
|
||||||
|
// Device is remembered and bypass is still valid - skip TFA
|
||||||
|
// Continue with login below
|
||||||
|
} else {
|
||||||
|
// No valid bypass for this device - require TFA
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
message: "TFA verification required",
|
message: "TFA verification required",
|
||||||
requiresTfa: true,
|
requiresTfa: true,
|
||||||
username: user.username,
|
username: user.username,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// No device ID provided - require TFA
|
||||||
|
return res.status(200).json({
|
||||||
|
message: "TFA verification required",
|
||||||
|
requiresTfa: true,
|
||||||
|
username: user.username,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Update last login
|
// Update last login
|
||||||
await prisma.users.update({
|
await prisma.users.update({
|
||||||
@@ -807,7 +836,13 @@ router.post(
|
|||||||
// Create session with access and refresh tokens
|
// Create session with access and refresh tokens
|
||||||
const ip_address = req.ip || req.connection.remoteAddress;
|
const ip_address = req.ip || req.connection.remoteAddress;
|
||||||
const user_agent = req.get("user-agent");
|
const user_agent = req.get("user-agent");
|
||||||
const session = await create_session(user.id, ip_address, user_agent);
|
const session = await create_session(
|
||||||
|
user.id,
|
||||||
|
ip_address,
|
||||||
|
user_agent,
|
||||||
|
false,
|
||||||
|
req,
|
||||||
|
);
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
message: "Login successful",
|
message: "Login successful",
|
||||||
@@ -825,6 +860,9 @@ router.post(
|
|||||||
last_login: user.last_login,
|
last_login: user.last_login,
|
||||||
created_at: user.created_at,
|
created_at: user.created_at,
|
||||||
updated_at: user.updated_at,
|
updated_at: user.updated_at,
|
||||||
|
// Include user preferences so they're available immediately after login
|
||||||
|
theme_preference: user.theme_preference,
|
||||||
|
color_theme: user.color_theme,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -841,8 +879,10 @@ router.post(
|
|||||||
body("username").notEmpty().withMessage("Username is required"),
|
body("username").notEmpty().withMessage("Username is required"),
|
||||||
body("token")
|
body("token")
|
||||||
.isLength({ min: 6, max: 6 })
|
.isLength({ min: 6, max: 6 })
|
||||||
.withMessage("Token must be 6 digits"),
|
.withMessage("Token must be 6 characters"),
|
||||||
body("token").isNumeric().withMessage("Token must contain only numbers"),
|
body("token")
|
||||||
|
.matches(/^[A-Z0-9]{6}$/)
|
||||||
|
.withMessage("Token must be 6 alphanumeric characters"),
|
||||||
body("remember_me")
|
body("remember_me")
|
||||||
.optional()
|
.optional()
|
||||||
.isBoolean()
|
.isBoolean()
|
||||||
@@ -915,10 +955,24 @@ router.post(
|
|||||||
return res.status(401).json({ error: "Invalid verification code" });
|
return res.status(401).json({ error: "Invalid verification code" });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update last login
|
// Update last login and fetch complete user data
|
||||||
await prisma.users.update({
|
const updatedUser = await prisma.users.update({
|
||||||
where: { id: user.id },
|
where: { id: user.id },
|
||||||
data: { last_login: new Date() },
|
data: { last_login: new Date() },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
username: true,
|
||||||
|
email: true,
|
||||||
|
first_name: true,
|
||||||
|
last_name: true,
|
||||||
|
role: true,
|
||||||
|
is_active: true,
|
||||||
|
last_login: true,
|
||||||
|
created_at: true,
|
||||||
|
updated_at: true,
|
||||||
|
theme_preference: true,
|
||||||
|
color_theme: true,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create session with access and refresh tokens
|
// Create session with access and refresh tokens
|
||||||
@@ -938,14 +992,7 @@ router.post(
|
|||||||
refresh_token: session.refresh_token,
|
refresh_token: session.refresh_token,
|
||||||
expires_at: session.expires_at,
|
expires_at: session.expires_at,
|
||||||
tfa_bypass_until: session.tfa_bypass_until,
|
tfa_bypass_until: session.tfa_bypass_until,
|
||||||
user: {
|
user: updatedUser,
|
||||||
id: user.id,
|
|
||||||
username: user.username,
|
|
||||||
email: user.email,
|
|
||||||
first_name: user.first_name,
|
|
||||||
last_name: user.last_name,
|
|
||||||
role: user.role,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("TFA verification error:", error);
|
console.error("TFA verification error:", error);
|
||||||
@@ -977,13 +1024,27 @@ router.put(
|
|||||||
.withMessage("Username must be at least 3 characters"),
|
.withMessage("Username must be at least 3 characters"),
|
||||||
body("email").optional().isEmail().withMessage("Valid email is required"),
|
body("email").optional().isEmail().withMessage("Valid email is required"),
|
||||||
body("first_name")
|
body("first_name")
|
||||||
.optional()
|
.optional({ nullable: true, checkFalsy: true })
|
||||||
.isLength({ min: 1 })
|
.custom((value) => {
|
||||||
.withMessage("First name must be at least 1 character"),
|
// Allow null, undefined, or empty string to clear the field
|
||||||
|
if (value === null || value === undefined || value === "") {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// If provided, must be at least 1 character after trimming
|
||||||
|
return typeof value === "string" && value.trim().length >= 1;
|
||||||
|
})
|
||||||
|
.withMessage("First name must be at least 1 character if provided"),
|
||||||
body("last_name")
|
body("last_name")
|
||||||
.optional()
|
.optional({ nullable: true, checkFalsy: true })
|
||||||
.isLength({ min: 1 })
|
.custom((value) => {
|
||||||
.withMessage("Last name must be at least 1 character"),
|
// Allow null, undefined, or empty string to clear the field
|
||||||
|
if (value === null || value === undefined || value === "") {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// If provided, must be at least 1 character after trimming
|
||||||
|
return typeof value === "string" && value.trim().length >= 1;
|
||||||
|
})
|
||||||
|
.withMessage("Last name must be at least 1 character if provided"),
|
||||||
],
|
],
|
||||||
async (req, res) => {
|
async (req, res) => {
|
||||||
try {
|
try {
|
||||||
@@ -993,12 +1054,27 @@ router.put(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { username, email, first_name, last_name } = req.body;
|
const { username, email, first_name, last_name } = req.body;
|
||||||
const updateData = {};
|
const updateData = {
|
||||||
|
updated_at: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
if (username) updateData.username = username;
|
// Handle all fields consistently - trim and update if provided
|
||||||
if (email) updateData.email = email;
|
if (username) updateData.username = username.trim();
|
||||||
if (first_name !== undefined) updateData.first_name = first_name || null;
|
if (email) updateData.email = email.trim();
|
||||||
if (last_name !== undefined) updateData.last_name = last_name || null;
|
if (first_name !== undefined) {
|
||||||
|
// Allow null or empty string to clear the field, otherwise trim
|
||||||
|
updateData.first_name =
|
||||||
|
first_name === "" || first_name === null
|
||||||
|
? null
|
||||||
|
: first_name.trim() || null;
|
||||||
|
}
|
||||||
|
if (last_name !== undefined) {
|
||||||
|
// Allow null or empty string to clear the field, otherwise trim
|
||||||
|
updateData.last_name =
|
||||||
|
last_name === "" || last_name === null
|
||||||
|
? null
|
||||||
|
: last_name.trim() || null;
|
||||||
|
}
|
||||||
|
|
||||||
// Check if username/email already exists (excluding current user)
|
// Check if username/email already exists (excluding current user)
|
||||||
if (username || email) {
|
if (username || email) {
|
||||||
@@ -1023,6 +1099,7 @@ router.put(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update user with explicit commit
|
||||||
const updatedUser = await prisma.users.update({
|
const updatedUser = await prisma.users.update({
|
||||||
where: { id: req.user.id },
|
where: { id: req.user.id },
|
||||||
data: updateData,
|
data: updateData,
|
||||||
@@ -1039,9 +1116,29 @@ router.put(
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Explicitly refresh user data from database to ensure we return latest data
|
||||||
|
// This ensures consistency especially in high-concurrency scenarios
|
||||||
|
const freshUser = await prisma.users.findUnique({
|
||||||
|
where: { id: req.user.id },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
username: true,
|
||||||
|
email: true,
|
||||||
|
first_name: true,
|
||||||
|
last_name: true,
|
||||||
|
role: true,
|
||||||
|
is_active: true,
|
||||||
|
last_login: true,
|
||||||
|
updated_at: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Use fresh data if available, otherwise fallback to updatedUser
|
||||||
|
const responseUser = freshUser || updatedUser;
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
message: "Profile updated successfully",
|
message: "Profile updated successfully",
|
||||||
user: updatedUser,
|
user: responseUser,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Update profile error:", error);
|
console.error("Update profile error:", error);
|
||||||
|
|||||||
@@ -218,6 +218,30 @@ router.post(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Trigger manual Docker inventory cleanup
|
||||||
|
router.post(
|
||||||
|
"/trigger/docker-inventory-cleanup",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerDockerInventoryCleanup();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Docker inventory cleanup triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering Docker inventory cleanup:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger Docker inventory cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// Get queue health status
|
// Get queue health status
|
||||||
router.get("/health", authenticateToken, async (_req, res) => {
|
router.get("/health", authenticateToken, async (_req, res) => {
|
||||||
try {
|
try {
|
||||||
@@ -274,6 +298,7 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
|||||||
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
|
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
|
||||||
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
|
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
|
||||||
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1),
|
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP, 1),
|
||||||
queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1),
|
queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -283,19 +308,22 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
|||||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
|
||||||
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
|
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
|
||||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed +
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed +
|
||||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed,
|
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed +
|
||||||
|
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].delayed,
|
||||||
|
|
||||||
runningTasks:
|
runningTasks:
|
||||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
|
||||||
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
|
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
|
||||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active +
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active +
|
||||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active,
|
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active +
|
||||||
|
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].active,
|
||||||
|
|
||||||
failedTasks:
|
failedTasks:
|
||||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
|
||||||
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
|
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
|
||||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed +
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed +
|
||||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed,
|
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed +
|
||||||
|
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].failed,
|
||||||
|
|
||||||
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
|
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
|
||||||
return (
|
return (
|
||||||
@@ -375,10 +403,11 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
|||||||
stats: stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
stats: stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Collect Host Statistics",
|
name: "Docker Inventory Cleanup",
|
||||||
queue: QUEUE_NAMES.AGENT_COMMANDS,
|
queue: QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP,
|
||||||
description: "Collects package statistics from connected agents only",
|
description:
|
||||||
schedule: `Every ${settings.update_interval} minutes (Agent-driven)`,
|
"Removes Docker containers and images for non-existent hosts",
|
||||||
|
schedule: "Daily at 4 AM",
|
||||||
lastRun: recentJobs[4][0]?.finishedOn
|
lastRun: recentJobs[4][0]?.finishedOn
|
||||||
? new Date(recentJobs[4][0].finishedOn).toLocaleString()
|
? new Date(recentJobs[4][0].finishedOn).toLocaleString()
|
||||||
: "Never",
|
: "Never",
|
||||||
@@ -388,6 +417,22 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
|||||||
: recentJobs[4][0]
|
: recentJobs[4][0]
|
||||||
? "Success"
|
? "Success"
|
||||||
: "Never run",
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Collect Host Statistics",
|
||||||
|
queue: QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
|
description: "Collects package statistics from connected agents only",
|
||||||
|
schedule: `Every ${settings.update_interval} minutes (Agent-driven)`,
|
||||||
|
lastRun: recentJobs[5][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[5][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[5][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[5][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[5][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
stats: stats[QUEUE_NAMES.AGENT_COMMANDS],
|
stats: stats[QUEUE_NAMES.AGENT_COMMANDS],
|
||||||
},
|
},
|
||||||
].sort((a, b) => {
|
].sort((a, b) => {
|
||||||
|
|||||||
@@ -193,11 +193,16 @@ router.get(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Get hosts with their update status
|
// Get hosts with their update status - OPTIMIZED
|
||||||
router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => {
|
router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => {
|
||||||
try {
|
try {
|
||||||
|
// Get settings once (outside the loop)
|
||||||
|
const settings = await prisma.settings.findFirst();
|
||||||
|
const updateIntervalMinutes = settings?.update_interval || 60;
|
||||||
|
const thresholdMinutes = updateIntervalMinutes * 2;
|
||||||
|
|
||||||
|
// Fetch hosts with groups
|
||||||
const hosts = await prisma.hosts.findMany({
|
const hosts = await prisma.hosts.findMany({
|
||||||
// Show all hosts regardless of status
|
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
machine_id: true,
|
machine_id: true,
|
||||||
@@ -223,40 +228,45 @@ router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_count: {
|
|
||||||
select: {
|
|
||||||
host_packages: {
|
|
||||||
where: {
|
|
||||||
needs_update: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
orderBy: { last_update: "desc" },
|
orderBy: { last_update: "desc" },
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get update counts for each host separately
|
// OPTIMIZATION: Get all package counts in 2 batch queries instead of N*2 queries
|
||||||
const hostsWithUpdateInfo = await Promise.all(
|
const hostIds = hosts.map((h) => h.id);
|
||||||
hosts.map(async (host) => {
|
|
||||||
const updatesCount = await prisma.host_packages.count({
|
const [updateCounts, totalCounts] = await Promise.all([
|
||||||
|
// Get update counts for all hosts at once
|
||||||
|
prisma.host_packages.groupBy({
|
||||||
|
by: ["host_id"],
|
||||||
where: {
|
where: {
|
||||||
host_id: host.id,
|
host_id: { in: hostIds },
|
||||||
needs_update: true,
|
needs_update: true,
|
||||||
},
|
},
|
||||||
});
|
_count: { id: true },
|
||||||
|
}),
|
||||||
// Get total packages count for this host
|
// Get total counts for all hosts at once
|
||||||
const totalPackagesCount = await prisma.host_packages.count({
|
prisma.host_packages.groupBy({
|
||||||
|
by: ["host_id"],
|
||||||
where: {
|
where: {
|
||||||
host_id: host.id,
|
host_id: { in: hostIds },
|
||||||
},
|
},
|
||||||
});
|
_count: { id: true },
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
// Get the agent update interval setting for stale calculation
|
// Create lookup maps for O(1) access
|
||||||
const settings = await prisma.settings.findFirst();
|
const updateCountMap = new Map(
|
||||||
const updateIntervalMinutes = settings?.update_interval || 60;
|
updateCounts.map((item) => [item.host_id, item._count.id]),
|
||||||
const thresholdMinutes = updateIntervalMinutes * 2;
|
);
|
||||||
|
const totalCountMap = new Map(
|
||||||
|
totalCounts.map((item) => [item.host_id, item._count.id]),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Process hosts with counts from maps (no more DB queries!)
|
||||||
|
const hostsWithUpdateInfo = hosts.map((host) => {
|
||||||
|
const updatesCount = updateCountMap.get(host.id) || 0;
|
||||||
|
const totalPackagesCount = totalCountMap.get(host.id) || 0;
|
||||||
|
|
||||||
// Calculate effective status based on reporting interval
|
// Calculate effective status based on reporting interval
|
||||||
const isStale = moment(host.last_update).isBefore(
|
const isStale = moment(host.last_update).isBefore(
|
||||||
@@ -276,8 +286,7 @@ router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => {
|
|||||||
isStale,
|
isStale,
|
||||||
effectiveStatus,
|
effectiveStatus,
|
||||||
};
|
};
|
||||||
}),
|
});
|
||||||
);
|
|
||||||
|
|
||||||
res.json(hostsWithUpdateInfo);
|
res.json(hostsWithUpdateInfo);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -522,7 +522,8 @@ router.get("/updates", authenticateToken, async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// POST /api/v1/docker/collect - Collect Docker data from agent
|
// POST /api/v1/docker/collect - Collect Docker data from agent (DEPRECATED - kept for backward compatibility)
|
||||||
|
// New agents should use POST /api/v1/integrations/docker
|
||||||
router.post("/collect", async (req, res) => {
|
router.post("/collect", async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { apiId, apiKey, containers, images, updates } = req.body;
|
const { apiId, apiKey, containers, images, updates } = req.body;
|
||||||
@@ -572,6 +573,7 @@ router.post("/collect", async (req, res) => {
|
|||||||
image_id: containerData.image_id || "unknown",
|
image_id: containerData.image_id || "unknown",
|
||||||
source: containerData.image_source || "docker-hub",
|
source: containerData.image_source || "docker-hub",
|
||||||
created_at: parseDate(containerData.created_at),
|
created_at: parseDate(containerData.created_at),
|
||||||
|
last_checked: now,
|
||||||
updated_at: now,
|
updated_at: now,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -745,6 +747,497 @@ router.post("/collect", async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// POST /api/v1/integrations/docker - New integration endpoint for Docker data collection
|
||||||
|
router.post("/../integrations/docker", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const apiId = req.headers["x-api-id"];
|
||||||
|
const apiKey = req.headers["x-api-key"];
|
||||||
|
const {
|
||||||
|
containers,
|
||||||
|
images,
|
||||||
|
updates,
|
||||||
|
daemon_info: _daemon_info,
|
||||||
|
hostname,
|
||||||
|
machine_id,
|
||||||
|
agent_version: _agent_version,
|
||||||
|
} = req.body;
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[Docker Integration] Received data from ${hostname || machine_id}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Validate API credentials
|
||||||
|
const host = await prisma.hosts.findFirst({
|
||||||
|
where: { api_id: apiId, api_key: apiKey },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
console.warn("[Docker Integration] Invalid API credentials");
|
||||||
|
return res.status(401).json({ error: "Invalid API credentials" });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[Docker Integration] Processing for host: ${host.friendly_name}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Helper function to validate and parse dates
|
||||||
|
const parseDate = (dateString) => {
|
||||||
|
if (!dateString) return now;
|
||||||
|
const date = new Date(dateString);
|
||||||
|
return Number.isNaN(date.getTime()) ? now : date;
|
||||||
|
};
|
||||||
|
|
||||||
|
let containersProcessed = 0;
|
||||||
|
let imagesProcessed = 0;
|
||||||
|
let updatesProcessed = 0;
|
||||||
|
|
||||||
|
// Process containers
|
||||||
|
if (containers && Array.isArray(containers)) {
|
||||||
|
console.log(
|
||||||
|
`[Docker Integration] Processing ${containers.length} containers`,
|
||||||
|
);
|
||||||
|
for (const containerData of containers) {
|
||||||
|
const containerId = uuidv4();
|
||||||
|
|
||||||
|
// Find or create image
|
||||||
|
let imageId = null;
|
||||||
|
if (containerData.image_repository && containerData.image_tag) {
|
||||||
|
const image = await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
source: containerData.image_source || "docker-hub",
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
imageId = image.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert container
|
||||||
|
await prisma.docker_containers.upsert({
|
||||||
|
where: {
|
||||||
|
host_id_container_id: {
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state || containerData.status,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
last_checked: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: containerId,
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state || containerData.status,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
containersProcessed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process standalone images
|
||||||
|
if (images && Array.isArray(images)) {
|
||||||
|
console.log(`[Docker Integration] Processing ${images.length} images`);
|
||||||
|
for (const imageData of images) {
|
||||||
|
// If image has no digest, it's likely locally built - override source to "local"
|
||||||
|
const imageSource =
|
||||||
|
!imageData.digest || imageData.digest.trim() === ""
|
||||||
|
? "local"
|
||||||
|
: imageData.source || "docker-hub";
|
||||||
|
|
||||||
|
await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
digest: imageData.digest || null,
|
||||||
|
source: imageSource, // Update source in case it changed
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
digest: imageData.digest,
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
source: imageSource,
|
||||||
|
created_at: parseDate(imageData.created_at),
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
imagesProcessed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process updates
|
||||||
|
if (updates && Array.isArray(updates)) {
|
||||||
|
console.log(`[Docker Integration] Processing ${updates.length} updates`);
|
||||||
|
for (const updateData of updates) {
|
||||||
|
// Find the image by repository and image_id
|
||||||
|
const image = await prisma.docker_images.findFirst({
|
||||||
|
where: {
|
||||||
|
repository: updateData.repository,
|
||||||
|
tag: updateData.current_tag,
|
||||||
|
image_id: updateData.image_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (image) {
|
||||||
|
// Store digest info in changelog_url field as JSON
|
||||||
|
const digestInfo = JSON.stringify({
|
||||||
|
method: "digest_comparison",
|
||||||
|
current_digest: updateData.current_digest,
|
||||||
|
available_digest: updateData.available_digest,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upsert the update record
|
||||||
|
await prisma.docker_image_updates.upsert({
|
||||||
|
where: {
|
||||||
|
image_id_available_tag: {
|
||||||
|
image_id: image.id,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
updated_at: now,
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
severity: "digest_changed",
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
image_id: image.id,
|
||||||
|
current_tag: updateData.current_tag,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
severity: "digest_changed",
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
updatesProcessed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[Docker Integration] Successfully processed: ${containersProcessed} containers, ${imagesProcessed} images, ${updatesProcessed} updates`,
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
message: "Docker data collected successfully",
|
||||||
|
containers_received: containersProcessed,
|
||||||
|
images_received: imagesProcessed,
|
||||||
|
updates_found: updatesProcessed,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Docker Integration] Error collecting Docker data:", error);
|
||||||
|
console.error("[Docker Integration] Error stack:", error.stack);
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to collect Docker data",
|
||||||
|
message: error.message,
|
||||||
|
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// DELETE /api/v1/docker/containers/:id - Delete a container
|
||||||
|
router.delete("/containers/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
// Check if container exists
|
||||||
|
const container = await prisma.docker_containers.findUnique({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!container) {
|
||||||
|
return res.status(404).json({ error: "Container not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the container
|
||||||
|
await prisma.docker_containers.delete({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`🗑️ Deleted container: ${container.name} (${id})`);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Container ${container.name} deleted successfully`,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error deleting container:", error);
|
||||||
|
res.status(500).json({ error: "Failed to delete container" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// DELETE /api/v1/docker/images/:id - Delete an image
|
||||||
|
router.delete("/images/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
// Check if image exists
|
||||||
|
const image = await prisma.docker_images.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
docker_containers: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!image) {
|
||||||
|
return res.status(404).json({ error: "Image not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if image is in use by containers
|
||||||
|
if (image._count.docker_containers > 0) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: `Cannot delete image: ${image._count.docker_containers} container(s) are using this image`,
|
||||||
|
containersCount: image._count.docker_containers,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete image updates first
|
||||||
|
await prisma.docker_image_updates.deleteMany({
|
||||||
|
where: { image_id: id },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Delete the image
|
||||||
|
await prisma.docker_images.delete({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`🗑️ Deleted image: ${image.repository}:${image.tag} (${id})`);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Image ${image.repository}:${image.tag} deleted successfully`,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error deleting image:", error);
|
||||||
|
res.status(500).json({ error: "Failed to delete image" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/volumes - Get all volumes with filters
|
||||||
|
router.get("/volumes", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { driver, search, page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (driver) where.driver = driver;
|
||||||
|
if (search) {
|
||||||
|
where.OR = [{ name: { contains: search, mode: "insensitive" } }];
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [volumes, total] = await Promise.all([
|
||||||
|
prisma.docker_volumes.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
hosts: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
friendly_name: true,
|
||||||
|
hostname: true,
|
||||||
|
ip: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { updated_at: "desc" },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_volumes.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
volumes,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching volumes:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch volumes" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/volumes/:id - Get volume detail
|
||||||
|
router.get("/volumes/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const volume = await prisma.docker_volumes.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
hosts: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
friendly_name: true,
|
||||||
|
hostname: true,
|
||||||
|
ip: true,
|
||||||
|
os_type: true,
|
||||||
|
os_version: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!volume) {
|
||||||
|
return res.status(404).json({ error: "Volume not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(convertBigIntToString({ volume }));
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching volume detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch volume detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/networks - Get all networks with filters
|
||||||
|
router.get("/networks", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { driver, search, page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (driver) where.driver = driver;
|
||||||
|
if (search) {
|
||||||
|
where.OR = [{ name: { contains: search, mode: "insensitive" } }];
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [networks, total] = await Promise.all([
|
||||||
|
prisma.docker_networks.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
hosts: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
friendly_name: true,
|
||||||
|
hostname: true,
|
||||||
|
ip: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { updated_at: "desc" },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_networks.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
networks,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching networks:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch networks" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/networks/:id - Get network detail
|
||||||
|
router.get("/networks/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const network = await prisma.docker_networks.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
hosts: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
friendly_name: true,
|
||||||
|
hostname: true,
|
||||||
|
ip: true,
|
||||||
|
os_type: true,
|
||||||
|
os_version: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!network) {
|
||||||
|
return res.status(404).json({ error: "Network not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json(convertBigIntToString({ network }));
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching network detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch network detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// GET /api/v1/docker/agent - Serve the Docker agent installation script
|
// GET /api/v1/docker/agent - Serve the Docker agent installation script
|
||||||
router.get("/agent", async (_req, res) => {
|
router.get("/agent", async (_req, res) => {
|
||||||
try {
|
try {
|
||||||
@@ -776,4 +1269,66 @@ router.get("/agent", async (_req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// DELETE /api/v1/docker/volumes/:id - Delete a volume
|
||||||
|
router.delete("/volumes/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
// Check if volume exists
|
||||||
|
const volume = await prisma.docker_volumes.findUnique({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!volume) {
|
||||||
|
return res.status(404).json({ error: "Volume not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the volume
|
||||||
|
await prisma.docker_volumes.delete({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`🗑️ Deleted volume: ${volume.name} (${id})`);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Volume ${volume.name} deleted successfully`,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error deleting volume:", error);
|
||||||
|
res.status(500).json({ error: "Failed to delete volume" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// DELETE /api/v1/docker/networks/:id - Delete a network
|
||||||
|
router.delete("/networks/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
// Check if network exists
|
||||||
|
const network = await prisma.docker_networks.findUnique({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!network) {
|
||||||
|
return res.status(404).json({ error: "Network not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the network
|
||||||
|
await prisma.docker_networks.delete({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`🗑️ Deleted network: ${network.name} (${id})`);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Network ${network.name} deleted successfully`,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error deleting network:", error);
|
||||||
|
res.status(500).json({ error: "Failed to delete network" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = router;
|
||||||
|
|||||||
@@ -24,7 +24,15 @@ router.get("/", authenticateToken, async (_req, res) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json(hostGroups);
|
// Transform the count field to match frontend expectations
|
||||||
|
const transformedGroups = hostGroups.map((group) => ({
|
||||||
|
...group,
|
||||||
|
_count: {
|
||||||
|
hosts: group._count.host_group_memberships,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json(transformedGroups);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching host groups:", error);
|
console.error("Error fetching host groups:", error);
|
||||||
res.status(500).json({ error: "Failed to fetch host groups" });
|
res.status(500).json({ error: "Failed to fetch host groups" });
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ const {
|
|||||||
requireManageHosts,
|
requireManageHosts,
|
||||||
requireManageSettings,
|
requireManageSettings,
|
||||||
} = require("../middleware/permissions");
|
} = require("../middleware/permissions");
|
||||||
|
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = getPrismaClient();
|
const prisma = getPrismaClient();
|
||||||
@@ -356,6 +357,26 @@ router.post(
|
|||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Host creation error:", error);
|
console.error("Host creation error:", error);
|
||||||
|
|
||||||
|
// Check if error is related to connection pool exhaustion
|
||||||
|
if (
|
||||||
|
error.message &&
|
||||||
|
(error.message.includes("connection pool") ||
|
||||||
|
error.message.includes("Timed out fetching") ||
|
||||||
|
error.message.includes("pool timeout"))
|
||||||
|
) {
|
||||||
|
console.error("⚠️ DATABASE CONNECTION POOL EXHAUSTED!");
|
||||||
|
console.error(
|
||||||
|
`⚠️ Current limit: DB_CONNECTION_LIMIT=${process.env.DB_CONNECTION_LIMIT || "30"}`,
|
||||||
|
);
|
||||||
|
console.error(
|
||||||
|
`⚠️ Pool timeout: DB_POOL_TIMEOUT=${process.env.DB_POOL_TIMEOUT || "20"}s`,
|
||||||
|
);
|
||||||
|
console.error(
|
||||||
|
"⚠️ Suggestion: Increase DB_CONNECTION_LIMIT in your .env file",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
res.status(500).json({ error: "Failed to create host" });
|
res.status(500).json({ error: "Failed to create host" });
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -786,19 +807,41 @@ router.get("/info", validateApiCredentials, async (req, res) => {
|
|||||||
// Ping endpoint for health checks (now uses API credentials)
|
// Ping endpoint for health checks (now uses API credentials)
|
||||||
router.post("/ping", validateApiCredentials, async (req, res) => {
|
router.post("/ping", validateApiCredentials, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
// Update last update timestamp
|
const now = new Date();
|
||||||
|
const lastUpdate = req.hostRecord.last_update;
|
||||||
|
|
||||||
|
// Detect if this is an agent startup (first ping or after long absence)
|
||||||
|
const timeSinceLastUpdate = lastUpdate ? now - lastUpdate : null;
|
||||||
|
const isStartup =
|
||||||
|
!timeSinceLastUpdate || timeSinceLastUpdate > 5 * 60 * 1000; // 5 minutes
|
||||||
|
|
||||||
|
// Log agent startup
|
||||||
|
if (isStartup) {
|
||||||
|
console.log(
|
||||||
|
`🚀 Agent startup detected: ${req.hostRecord.friendly_name} (${req.hostRecord.hostname || req.hostRecord.api_id})`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if status was previously offline
|
||||||
|
if (req.hostRecord.status === "offline") {
|
||||||
|
console.log(`✅ Agent back online: ${req.hostRecord.friendly_name}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update last update timestamp and set status to active
|
||||||
await prisma.hosts.update({
|
await prisma.hosts.update({
|
||||||
where: { id: req.hostRecord.id },
|
where: { id: req.hostRecord.id },
|
||||||
data: {
|
data: {
|
||||||
last_update: new Date(),
|
last_update: now,
|
||||||
updated_at: new Date(),
|
updated_at: now,
|
||||||
|
status: "active",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const response = {
|
const response = {
|
||||||
message: "Ping successful",
|
message: "Ping successful",
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: now.toISOString(),
|
||||||
friendlyName: req.hostRecord.friendly_name,
|
friendlyName: req.hostRecord.friendly_name,
|
||||||
|
agentStartup: isStartup,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Check if this is a crontab update trigger
|
// Check if this is a crontab update trigger
|
||||||
@@ -1345,6 +1388,66 @@ router.delete(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Force immediate report from agent
|
||||||
|
router.post(
|
||||||
|
"/:hostId/fetch-report",
|
||||||
|
authenticateToken,
|
||||||
|
requireManageHosts,
|
||||||
|
async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { hostId } = req.params;
|
||||||
|
|
||||||
|
// Get host to verify it exists
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id: hostId },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(404).json({ error: "Host not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the agent-commands queue
|
||||||
|
const queue = queueManager.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||||
|
|
||||||
|
if (!queue) {
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "Queue not available",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add job to queue
|
||||||
|
const job = await queue.add(
|
||||||
|
"report_now",
|
||||||
|
{
|
||||||
|
api_id: host.api_id,
|
||||||
|
type: "report_now",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: "Report fetch queued successfully",
|
||||||
|
jobId: job.id,
|
||||||
|
host: {
|
||||||
|
id: host.id,
|
||||||
|
friendlyName: host.friendly_name,
|
||||||
|
apiId: host.api_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Force fetch report error:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch report" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// Toggle agent auto-update setting
|
// Toggle agent auto-update setting
|
||||||
router.patch(
|
router.patch(
|
||||||
"/:hostId/auto-update",
|
"/:hostId/auto-update",
|
||||||
@@ -1388,6 +1491,66 @@ router.patch(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Force agent update for specific host
|
||||||
|
router.post(
|
||||||
|
"/:hostId/force-agent-update",
|
||||||
|
authenticateToken,
|
||||||
|
requireManageHosts,
|
||||||
|
async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { hostId } = req.params;
|
||||||
|
|
||||||
|
// Get host to verify it exists
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id: hostId },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(404).json({ error: "Host not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the agent-commands queue
|
||||||
|
const queue = queueManager.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||||
|
|
||||||
|
if (!queue) {
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "Queue not available",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add job to queue
|
||||||
|
const job = await queue.add(
|
||||||
|
"update_agent",
|
||||||
|
{
|
||||||
|
api_id: host.api_id,
|
||||||
|
type: "update_agent",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: "Agent update queued successfully",
|
||||||
|
jobId: job.id,
|
||||||
|
host: {
|
||||||
|
id: host.id,
|
||||||
|
friendlyName: host.friendly_name,
|
||||||
|
apiId: host.api_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Force agent update error:", error);
|
||||||
|
res.status(500).json({ error: "Failed to force agent update" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// Serve the installation script (requires API authentication)
|
// Serve the installation script (requires API authentication)
|
||||||
router.get("/install", async (req, res) => {
|
router.get("/install", async (req, res) => {
|
||||||
try {
|
try {
|
||||||
@@ -1441,10 +1604,12 @@ router.get("/install", async (req, res) => {
|
|||||||
|
|
||||||
// Determine curl flags dynamically from settings (ignore self-signed)
|
// Determine curl flags dynamically from settings (ignore self-signed)
|
||||||
let curlFlags = "-s";
|
let curlFlags = "-s";
|
||||||
|
let skipSSLVerify = "false";
|
||||||
try {
|
try {
|
||||||
const settings = await prisma.settings.findFirst();
|
const settings = await prisma.settings.findFirst();
|
||||||
if (settings && settings.ignore_ssl_self_signed === true) {
|
if (settings && settings.ignore_ssl_self_signed === true) {
|
||||||
curlFlags = "-sk";
|
curlFlags = "-sk";
|
||||||
|
skipSSLVerify = "true";
|
||||||
}
|
}
|
||||||
} catch (_) {}
|
} catch (_) {}
|
||||||
|
|
||||||
@@ -1454,12 +1619,13 @@ router.get("/install", async (req, res) => {
|
|||||||
// Get architecture parameter (default to amd64)
|
// Get architecture parameter (default to amd64)
|
||||||
const architecture = req.query.arch || "amd64";
|
const architecture = req.query.arch || "amd64";
|
||||||
|
|
||||||
// Inject the API credentials, server URL, curl flags, force flag, and architecture into the script
|
// Inject the API credentials, server URL, curl flags, SSL verify flag, force flag, and architecture into the script
|
||||||
const envVars = `#!/bin/bash
|
const envVars = `#!/bin/bash
|
||||||
export PATCHMON_URL="${serverUrl}"
|
export PATCHMON_URL="${serverUrl}"
|
||||||
export API_ID="${host.api_id}"
|
export API_ID="${host.api_id}"
|
||||||
export API_KEY="${host.api_key}"
|
export API_KEY="${host.api_key}"
|
||||||
export CURL_FLAGS="${curlFlags}"
|
export CURL_FLAGS="${curlFlags}"
|
||||||
|
export SKIP_SSL_VERIFY="${skipSSLVerify}"
|
||||||
export FORCE_INSTALL="${forceInstall ? "true" : "false"}"
|
export FORCE_INSTALL="${forceInstall ? "true" : "false"}"
|
||||||
export ARCHITECTURE="${architecture}"
|
export ARCHITECTURE="${architecture}"
|
||||||
|
|
||||||
|
|||||||
356
backend/src/routes/integrationRoutes.js
Normal file
356
backend/src/routes/integrationRoutes.js
Normal file
@@ -0,0 +1,356 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
|
const prisma = getPrismaClient();
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// POST /api/v1/integrations/docker - Docker data collection endpoint
|
||||||
|
router.post("/docker", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const apiId = req.headers["x-api-id"];
|
||||||
|
const apiKey = req.headers["x-api-key"];
|
||||||
|
const {
|
||||||
|
containers,
|
||||||
|
images,
|
||||||
|
volumes,
|
||||||
|
networks,
|
||||||
|
updates,
|
||||||
|
daemon_info: _daemon_info,
|
||||||
|
hostname,
|
||||||
|
machine_id,
|
||||||
|
agent_version: _agent_version,
|
||||||
|
} = req.body;
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[Docker Integration] Received data from ${hostname || machine_id}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Validate API credentials
|
||||||
|
const host = await prisma.hosts.findFirst({
|
||||||
|
where: { api_id: apiId, api_key: apiKey },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
console.warn("[Docker Integration] Invalid API credentials");
|
||||||
|
return res.status(401).json({ error: "Invalid API credentials" });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[Docker Integration] Processing for host: ${host.friendly_name}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Helper function to validate and parse dates
|
||||||
|
const parseDate = (dateString) => {
|
||||||
|
if (!dateString) return now;
|
||||||
|
const date = new Date(dateString);
|
||||||
|
return Number.isNaN(date.getTime()) ? now : date;
|
||||||
|
};
|
||||||
|
|
||||||
|
let containersProcessed = 0;
|
||||||
|
let imagesProcessed = 0;
|
||||||
|
let volumesProcessed = 0;
|
||||||
|
let networksProcessed = 0;
|
||||||
|
let updatesProcessed = 0;
|
||||||
|
|
||||||
|
// Process containers
|
||||||
|
if (containers && Array.isArray(containers)) {
|
||||||
|
console.log(
|
||||||
|
`[Docker Integration] Processing ${containers.length} containers`,
|
||||||
|
);
|
||||||
|
for (const containerData of containers) {
|
||||||
|
const containerId = uuidv4();
|
||||||
|
|
||||||
|
// Find or create image
|
||||||
|
let imageId = null;
|
||||||
|
if (containerData.image_repository && containerData.image_tag) {
|
||||||
|
const image = await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
source: containerData.image_source || "docker-hub",
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
imageId = image.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert container
|
||||||
|
await prisma.docker_containers.upsert({
|
||||||
|
where: {
|
||||||
|
host_id_container_id: {
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state || containerData.status,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
last_checked: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: containerId,
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state || containerData.status,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
containersProcessed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process standalone images
|
||||||
|
if (images && Array.isArray(images)) {
|
||||||
|
console.log(`[Docker Integration] Processing ${images.length} images`);
|
||||||
|
for (const imageData of images) {
|
||||||
|
await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
digest: imageData.digest || null,
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
digest: imageData.digest,
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
source: imageData.source || "docker-hub",
|
||||||
|
created_at: parseDate(imageData.created_at),
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
imagesProcessed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process volumes
|
||||||
|
if (volumes && Array.isArray(volumes)) {
|
||||||
|
console.log(`[Docker Integration] Processing ${volumes.length} volumes`);
|
||||||
|
for (const volumeData of volumes) {
|
||||||
|
await prisma.docker_volumes.upsert({
|
||||||
|
where: {
|
||||||
|
host_id_volume_id: {
|
||||||
|
host_id: host.id,
|
||||||
|
volume_id: volumeData.volume_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
name: volumeData.name,
|
||||||
|
driver: volumeData.driver || "local",
|
||||||
|
mountpoint: volumeData.mountpoint || null,
|
||||||
|
renderer: volumeData.renderer || null,
|
||||||
|
scope: volumeData.scope || "local",
|
||||||
|
labels: volumeData.labels || null,
|
||||||
|
options: volumeData.options || null,
|
||||||
|
size_bytes: volumeData.size_bytes
|
||||||
|
? BigInt(volumeData.size_bytes)
|
||||||
|
: null,
|
||||||
|
ref_count: volumeData.ref_count || 0,
|
||||||
|
updated_at: now,
|
||||||
|
last_checked: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
host_id: host.id,
|
||||||
|
volume_id: volumeData.volume_id,
|
||||||
|
name: volumeData.name,
|
||||||
|
driver: volumeData.driver || "local",
|
||||||
|
mountpoint: volumeData.mountpoint || null,
|
||||||
|
renderer: volumeData.renderer || null,
|
||||||
|
scope: volumeData.scope || "local",
|
||||||
|
labels: volumeData.labels || null,
|
||||||
|
options: volumeData.options || null,
|
||||||
|
size_bytes: volumeData.size_bytes
|
||||||
|
? BigInt(volumeData.size_bytes)
|
||||||
|
: null,
|
||||||
|
ref_count: volumeData.ref_count || 0,
|
||||||
|
created_at: parseDate(volumeData.created_at),
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
volumesProcessed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process networks
|
||||||
|
if (networks && Array.isArray(networks)) {
|
||||||
|
console.log(
|
||||||
|
`[Docker Integration] Processing ${networks.length} networks`,
|
||||||
|
);
|
||||||
|
for (const networkData of networks) {
|
||||||
|
await prisma.docker_networks.upsert({
|
||||||
|
where: {
|
||||||
|
host_id_network_id: {
|
||||||
|
host_id: host.id,
|
||||||
|
network_id: networkData.network_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
name: networkData.name,
|
||||||
|
driver: networkData.driver,
|
||||||
|
scope: networkData.scope || "local",
|
||||||
|
ipv6_enabled: networkData.ipv6_enabled || false,
|
||||||
|
internal: networkData.internal || false,
|
||||||
|
attachable:
|
||||||
|
networkData.attachable !== undefined
|
||||||
|
? networkData.attachable
|
||||||
|
: true,
|
||||||
|
ingress: networkData.ingress || false,
|
||||||
|
config_only: networkData.config_only || false,
|
||||||
|
labels: networkData.labels || null,
|
||||||
|
ipam: networkData.ipam || null,
|
||||||
|
container_count: networkData.container_count || 0,
|
||||||
|
updated_at: now,
|
||||||
|
last_checked: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
host_id: host.id,
|
||||||
|
network_id: networkData.network_id,
|
||||||
|
name: networkData.name,
|
||||||
|
driver: networkData.driver,
|
||||||
|
scope: networkData.scope || "local",
|
||||||
|
ipv6_enabled: networkData.ipv6_enabled || false,
|
||||||
|
internal: networkData.internal || false,
|
||||||
|
attachable:
|
||||||
|
networkData.attachable !== undefined
|
||||||
|
? networkData.attachable
|
||||||
|
: true,
|
||||||
|
ingress: networkData.ingress || false,
|
||||||
|
config_only: networkData.config_only || false,
|
||||||
|
labels: networkData.labels || null,
|
||||||
|
ipam: networkData.ipam || null,
|
||||||
|
container_count: networkData.container_count || 0,
|
||||||
|
created_at: networkData.created_at
|
||||||
|
? parseDate(networkData.created_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
networksProcessed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process updates
|
||||||
|
if (updates && Array.isArray(updates)) {
|
||||||
|
console.log(`[Docker Integration] Processing ${updates.length} updates`);
|
||||||
|
for (const updateData of updates) {
|
||||||
|
// Find the image by repository and image_id
|
||||||
|
const image = await prisma.docker_images.findFirst({
|
||||||
|
where: {
|
||||||
|
repository: updateData.repository,
|
||||||
|
tag: updateData.current_tag,
|
||||||
|
image_id: updateData.image_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (image) {
|
||||||
|
// Store digest info in changelog_url field as JSON
|
||||||
|
const digestInfo = JSON.stringify({
|
||||||
|
method: "digest_comparison",
|
||||||
|
current_digest: updateData.current_digest,
|
||||||
|
available_digest: updateData.available_digest,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upsert the update record
|
||||||
|
await prisma.docker_image_updates.upsert({
|
||||||
|
where: {
|
||||||
|
image_id_available_tag: {
|
||||||
|
image_id: image.id,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
updated_at: now,
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
severity: "digest_changed",
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
image_id: image.id,
|
||||||
|
current_tag: updateData.current_tag,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
severity: "digest_changed",
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
updatesProcessed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[Docker Integration] Successfully processed: ${containersProcessed} containers, ${imagesProcessed} images, ${volumesProcessed} volumes, ${networksProcessed} networks, ${updatesProcessed} updates`,
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
message: "Docker data collected successfully",
|
||||||
|
containers_received: containersProcessed,
|
||||||
|
images_received: imagesProcessed,
|
||||||
|
volumes_received: volumesProcessed,
|
||||||
|
networks_received: networksProcessed,
|
||||||
|
updates_found: updatesProcessed,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[Docker Integration] Error collecting Docker data:", error);
|
||||||
|
console.error("[Docker Integration] Error stack:", error.stack);
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to collect Docker data",
|
||||||
|
message: error.message,
|
||||||
|
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
148
backend/src/routes/metricsRoutes.js
Normal file
148
backend/src/routes/metricsRoutes.js
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { body, validationResult } = require("express-validator");
|
||||||
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
const { requireManageSettings } = require("../middleware/permissions");
|
||||||
|
const { getSettings, updateSettings } = require("../services/settingsService");
|
||||||
|
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Get metrics settings
|
||||||
|
router.get("/", authenticateToken, requireManageSettings, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const settings = await getSettings();
|
||||||
|
|
||||||
|
// Generate anonymous ID if it doesn't exist
|
||||||
|
if (!settings.metrics_anonymous_id) {
|
||||||
|
const anonymousId = uuidv4();
|
||||||
|
await updateSettings(settings.id, {
|
||||||
|
metrics_anonymous_id: anonymousId,
|
||||||
|
});
|
||||||
|
settings.metrics_anonymous_id = anonymousId;
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
metrics_enabled: settings.metrics_enabled ?? true,
|
||||||
|
metrics_anonymous_id: settings.metrics_anonymous_id,
|
||||||
|
metrics_last_sent: settings.metrics_last_sent,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Metrics settings fetch error:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch metrics settings" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update metrics settings
|
||||||
|
router.put(
|
||||||
|
"/",
|
||||||
|
authenticateToken,
|
||||||
|
requireManageSettings,
|
||||||
|
[
|
||||||
|
body("metrics_enabled")
|
||||||
|
.isBoolean()
|
||||||
|
.withMessage("Metrics enabled must be a boolean"),
|
||||||
|
],
|
||||||
|
async (req, res) => {
|
||||||
|
try {
|
||||||
|
const errors = validationResult(req);
|
||||||
|
if (!errors.isEmpty()) {
|
||||||
|
return res.status(400).json({ errors: errors.array() });
|
||||||
|
}
|
||||||
|
|
||||||
|
const { metrics_enabled } = req.body;
|
||||||
|
const settings = await getSettings();
|
||||||
|
|
||||||
|
await updateSettings(settings.id, {
|
||||||
|
metrics_enabled,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Metrics ${metrics_enabled ? "enabled" : "disabled"} by user`,
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
message: "Metrics settings updated successfully",
|
||||||
|
metrics_enabled,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Metrics settings update error:", error);
|
||||||
|
res.status(500).json({ error: "Failed to update metrics settings" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Regenerate anonymous ID
|
||||||
|
router.post(
|
||||||
|
"/regenerate-id",
|
||||||
|
authenticateToken,
|
||||||
|
requireManageSettings,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const settings = await getSettings();
|
||||||
|
const newAnonymousId = uuidv4();
|
||||||
|
|
||||||
|
await updateSettings(settings.id, {
|
||||||
|
metrics_anonymous_id: newAnonymousId,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("Anonymous ID regenerated");
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
message: "Anonymous ID regenerated successfully",
|
||||||
|
metrics_anonymous_id: newAnonymousId,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Anonymous ID regeneration error:", error);
|
||||||
|
res.status(500).json({ error: "Failed to regenerate anonymous ID" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Manually send metrics now
|
||||||
|
router.post(
|
||||||
|
"/send-now",
|
||||||
|
authenticateToken,
|
||||||
|
requireManageSettings,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const settings = await getSettings();
|
||||||
|
|
||||||
|
if (!settings.metrics_enabled) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Metrics are disabled. Please enable metrics first.",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trigger metrics directly (no queue delay for manual trigger)
|
||||||
|
const metricsReporting =
|
||||||
|
queueManager.automations[QUEUE_NAMES.METRICS_REPORTING];
|
||||||
|
const result = await metricsReporting.process(
|
||||||
|
{ name: "manual-send" },
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
console.log("✅ Manual metrics sent successfully");
|
||||||
|
res.json({
|
||||||
|
message: "Metrics sent successfully",
|
||||||
|
data: result,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.error("❌ Failed to send metrics:", result);
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to send metrics",
|
||||||
|
details: result.reason || result.error,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Send metrics error:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to send metrics",
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -101,37 +101,41 @@ router.get("/", async (req, res) => {
|
|||||||
prisma.packages.count({ where }),
|
prisma.packages.count({ where }),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// Get additional stats for each package
|
// OPTIMIZATION: Batch query all stats instead of N individual queries
|
||||||
const packagesWithStats = await Promise.all(
|
const packageIds = packages.map((pkg) => pkg.id);
|
||||||
packages.map(async (pkg) => {
|
|
||||||
// Build base where clause for this package
|
|
||||||
const baseWhere = { package_id: pkg.id };
|
|
||||||
|
|
||||||
// If host filter is specified, add host filter to all queries
|
// Get all counts and host data in 3 batch queries instead of N*3 queries
|
||||||
const hostWhere = host ? { ...baseWhere, host_id: host } : baseWhere;
|
const [allUpdatesCounts, allSecurityCounts, allPackageHostsData] =
|
||||||
|
await Promise.all([
|
||||||
const [updatesCount, securityCount, packageHosts] = await Promise.all([
|
// Batch count all packages that need updates
|
||||||
prisma.host_packages.count({
|
prisma.host_packages.groupBy({
|
||||||
|
by: ["package_id"],
|
||||||
where: {
|
where: {
|
||||||
...hostWhere,
|
package_id: { in: packageIds },
|
||||||
needs_update: true,
|
needs_update: true,
|
||||||
|
...(host ? { host_id: host } : {}),
|
||||||
},
|
},
|
||||||
|
_count: { id: true },
|
||||||
}),
|
}),
|
||||||
prisma.host_packages.count({
|
// Batch count all packages with security updates
|
||||||
|
prisma.host_packages.groupBy({
|
||||||
|
by: ["package_id"],
|
||||||
where: {
|
where: {
|
||||||
...hostWhere,
|
package_id: { in: packageIds },
|
||||||
needs_update: true,
|
needs_update: true,
|
||||||
is_security_update: true,
|
is_security_update: true,
|
||||||
|
...(host ? { host_id: host } : {}),
|
||||||
},
|
},
|
||||||
|
_count: { id: true },
|
||||||
}),
|
}),
|
||||||
|
// Batch fetch all host data for packages
|
||||||
prisma.host_packages.findMany({
|
prisma.host_packages.findMany({
|
||||||
where: {
|
where: {
|
||||||
...hostWhere,
|
package_id: { in: packageIds },
|
||||||
// If host filter is specified, include all packages for that host
|
...(host ? { host_id: host } : { needs_update: true }),
|
||||||
// Otherwise, only include packages that need updates
|
|
||||||
...(host ? {} : { needs_update: true }),
|
|
||||||
},
|
},
|
||||||
select: {
|
select: {
|
||||||
|
package_id: true,
|
||||||
hosts: {
|
hosts: {
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
@@ -145,14 +149,27 @@ router.get("/", async (req, res) => {
|
|||||||
needs_update: true,
|
needs_update: true,
|
||||||
is_security_update: true,
|
is_security_update: true,
|
||||||
},
|
},
|
||||||
take: 10, // Limit to first 10 for performance
|
// Limit to first 10 per package
|
||||||
|
take: 100, // Increased from package-based limit
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
return {
|
// Create lookup maps for O(1) access
|
||||||
...pkg,
|
const updatesCountMap = new Map(
|
||||||
packageHostsCount: pkg._count.host_packages,
|
allUpdatesCounts.map((item) => [item.package_id, item._count.id]),
|
||||||
packageHosts: packageHosts.map((hp) => ({
|
);
|
||||||
|
const securityCountMap = new Map(
|
||||||
|
allSecurityCounts.map((item) => [item.package_id, item._count.id]),
|
||||||
|
);
|
||||||
|
const packageHostsMap = new Map();
|
||||||
|
|
||||||
|
// Group host data by package_id
|
||||||
|
for (const hp of allPackageHostsData) {
|
||||||
|
if (!packageHostsMap.has(hp.package_id)) {
|
||||||
|
packageHostsMap.set(hp.package_id, []);
|
||||||
|
}
|
||||||
|
const hosts = packageHostsMap.get(hp.package_id);
|
||||||
|
hosts.push({
|
||||||
hostId: hp.hosts.id,
|
hostId: hp.hosts.id,
|
||||||
friendlyName: hp.hosts.friendly_name,
|
friendlyName: hp.hosts.friendly_name,
|
||||||
osType: hp.hosts.os_type,
|
osType: hp.hosts.os_type,
|
||||||
@@ -160,15 +177,31 @@ router.get("/", async (req, res) => {
|
|||||||
availableVersion: hp.available_version,
|
availableVersion: hp.available_version,
|
||||||
needsUpdate: hp.needs_update,
|
needsUpdate: hp.needs_update,
|
||||||
isSecurityUpdate: hp.is_security_update,
|
isSecurityUpdate: hp.is_security_update,
|
||||||
})),
|
});
|
||||||
|
|
||||||
|
// Limit to 10 hosts per package
|
||||||
|
if (hosts.length > 10) {
|
||||||
|
packageHostsMap.set(hp.package_id, hosts.slice(0, 10));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map packages with stats from lookup maps (no more DB queries!)
|
||||||
|
const packagesWithStats = packages.map((pkg) => {
|
||||||
|
const updatesCount = updatesCountMap.get(pkg.id) || 0;
|
||||||
|
const securityCount = securityCountMap.get(pkg.id) || 0;
|
||||||
|
const packageHosts = packageHostsMap.get(pkg.id) || [];
|
||||||
|
|
||||||
|
return {
|
||||||
|
...pkg,
|
||||||
|
packageHostsCount: pkg._count.host_packages,
|
||||||
|
packageHosts,
|
||||||
stats: {
|
stats: {
|
||||||
totalInstalls: pkg._count.host_packages,
|
totalInstalls: pkg._count.host_packages,
|
||||||
updatesNeeded: updatesCount,
|
updatesNeeded: updatesCount,
|
||||||
securityUpdates: securityCount,
|
securityUpdates: securityCount,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}),
|
});
|
||||||
);
|
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
packages: packagesWithStats,
|
packages: packagesWithStats,
|
||||||
|
|||||||
@@ -158,6 +158,7 @@ router.put(
|
|||||||
logoDark,
|
logoDark,
|
||||||
logoLight,
|
logoLight,
|
||||||
favicon,
|
favicon,
|
||||||
|
colorTheme,
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
// Get current settings to check for update interval changes
|
// Get current settings to check for update interval changes
|
||||||
@@ -189,6 +190,7 @@ router.put(
|
|||||||
if (logoDark !== undefined) updateData.logo_dark = logoDark;
|
if (logoDark !== undefined) updateData.logo_dark = logoDark;
|
||||||
if (logoLight !== undefined) updateData.logo_light = logoLight;
|
if (logoLight !== undefined) updateData.logo_light = logoLight;
|
||||||
if (favicon !== undefined) updateData.favicon = favicon;
|
if (favicon !== undefined) updateData.favicon = favicon;
|
||||||
|
if (colorTheme !== undefined) updateData.color_theme = colorTheme;
|
||||||
|
|
||||||
const updatedSettings = await updateSettings(
|
const updatedSettings = await updateSettings(
|
||||||
currentSettings.id,
|
currentSettings.id,
|
||||||
|
|||||||
@@ -261,8 +261,10 @@ router.post(
|
|||||||
body("username").notEmpty().withMessage("Username is required"),
|
body("username").notEmpty().withMessage("Username is required"),
|
||||||
body("token")
|
body("token")
|
||||||
.isLength({ min: 6, max: 6 })
|
.isLength({ min: 6, max: 6 })
|
||||||
.withMessage("Token must be 6 digits"),
|
.withMessage("Token must be 6 characters"),
|
||||||
body("token").isNumeric().withMessage("Token must contain only numbers"),
|
body("token")
|
||||||
|
.matches(/^[A-Z0-9]{6}$/)
|
||||||
|
.withMessage("Token must be 6 alphanumeric characters"),
|
||||||
],
|
],
|
||||||
async (req, res) => {
|
async (req, res) => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
105
backend/src/routes/userPreferencesRoutes.js
Normal file
105
backend/src/routes/userPreferencesRoutes.js
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/v1/user/preferences
|
||||||
|
* Get current user's preferences (theme and color theme)
|
||||||
|
*/
|
||||||
|
router.get("/", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const userId = req.user.id;
|
||||||
|
|
||||||
|
const user = await prisma.users.findUnique({
|
||||||
|
where: { id: userId },
|
||||||
|
select: {
|
||||||
|
theme_preference: true,
|
||||||
|
color_theme: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return res.status(404).json({ error: "User not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
theme_preference: user.theme_preference || "dark",
|
||||||
|
color_theme: user.color_theme || "cyber_blue",
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching user preferences:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch user preferences" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PATCH /api/v1/user/preferences
|
||||||
|
* Update current user's preferences
|
||||||
|
*/
|
||||||
|
router.patch("/", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const userId = req.user.id;
|
||||||
|
const { theme_preference, color_theme } = req.body;
|
||||||
|
|
||||||
|
// Validate inputs
|
||||||
|
const updateData = {};
|
||||||
|
if (theme_preference !== undefined) {
|
||||||
|
if (!["light", "dark"].includes(theme_preference)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Invalid theme preference. Must be 'light' or 'dark'",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
updateData.theme_preference = theme_preference;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (color_theme !== undefined) {
|
||||||
|
const validColorThemes = [
|
||||||
|
"default",
|
||||||
|
"cyber_blue",
|
||||||
|
"neon_purple",
|
||||||
|
"matrix_green",
|
||||||
|
"ocean_blue",
|
||||||
|
"sunset_gradient",
|
||||||
|
];
|
||||||
|
if (!validColorThemes.includes(color_theme)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: `Invalid color theme. Must be one of: ${validColorThemes.join(", ")}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
updateData.color_theme = color_theme;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.keys(updateData).length === 0) {
|
||||||
|
return res
|
||||||
|
.status(400)
|
||||||
|
.json({ error: "No preferences provided to update" });
|
||||||
|
}
|
||||||
|
|
||||||
|
updateData.updated_at = new Date();
|
||||||
|
|
||||||
|
const updatedUser = await prisma.users.update({
|
||||||
|
where: { id: userId },
|
||||||
|
data: updateData,
|
||||||
|
select: {
|
||||||
|
theme_preference: true,
|
||||||
|
color_theme: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
message: "Preferences updated successfully",
|
||||||
|
preferences: {
|
||||||
|
theme_preference: updatedUser.theme_preference,
|
||||||
|
color_theme: updatedUser.color_theme,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error updating user preferences:", error);
|
||||||
|
res.status(500).json({ error: "Failed to update user preferences" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -14,13 +14,16 @@ const router = express.Router();
|
|||||||
function getCurrentVersion() {
|
function getCurrentVersion() {
|
||||||
try {
|
try {
|
||||||
const packageJson = require("../../package.json");
|
const packageJson = require("../../package.json");
|
||||||
return packageJson?.version || "1.3.0";
|
if (!packageJson?.version) {
|
||||||
|
throw new Error("Version not found in package.json");
|
||||||
|
}
|
||||||
|
return packageJson.version;
|
||||||
} catch (packageError) {
|
} catch (packageError) {
|
||||||
console.warn(
|
console.error(
|
||||||
"Could not read version from package.json, using fallback:",
|
"Could not read version from package.json:",
|
||||||
packageError.message,
|
packageError.message,
|
||||||
);
|
);
|
||||||
return "1.3.0";
|
return "unknown";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,31 @@ const {
|
|||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// Get WebSocket connection status by api_id (no database access - pure memory lookup)
|
// Get WebSocket connection status for multiple hosts at once (bulk endpoint)
|
||||||
|
router.get("/status", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiIds } = req.query; // Comma-separated list of api_ids
|
||||||
|
const idArray = apiIds ? apiIds.split(",").filter((id) => id.trim()) : [];
|
||||||
|
|
||||||
|
const statusMap = {};
|
||||||
|
idArray.forEach((apiId) => {
|
||||||
|
statusMap[apiId] = getConnectionInfo(apiId);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: statusMap,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching bulk WebSocket status:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch WebSocket status",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get WebSocket connection status by api_id (single endpoint)
|
||||||
router.get("/status/:apiId", authenticateToken, async (req, res) => {
|
router.get("/status/:apiId", authenticateToken, async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { apiId } = req.params;
|
const { apiId } = req.params;
|
||||||
|
|||||||
@@ -66,8 +66,11 @@ const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
|
|||||||
const gethomepageRoutes = require("./routes/gethomepageRoutes");
|
const gethomepageRoutes = require("./routes/gethomepageRoutes");
|
||||||
const automationRoutes = require("./routes/automationRoutes");
|
const automationRoutes = require("./routes/automationRoutes");
|
||||||
const dockerRoutes = require("./routes/dockerRoutes");
|
const dockerRoutes = require("./routes/dockerRoutes");
|
||||||
|
const integrationRoutes = require("./routes/integrationRoutes");
|
||||||
const wsRoutes = require("./routes/wsRoutes");
|
const wsRoutes = require("./routes/wsRoutes");
|
||||||
const agentVersionRoutes = require("./routes/agentVersionRoutes");
|
const agentVersionRoutes = require("./routes/agentVersionRoutes");
|
||||||
|
const metricsRoutes = require("./routes/metricsRoutes");
|
||||||
|
const userPreferencesRoutes = require("./routes/userPreferencesRoutes");
|
||||||
const { initSettings } = require("./services/settingsService");
|
const { initSettings } = require("./services/settingsService");
|
||||||
const { queueManager } = require("./services/automation");
|
const { queueManager } = require("./services/automation");
|
||||||
const { authenticateToken, requireAdmin } = require("./middleware/auth");
|
const { authenticateToken, requireAdmin } = require("./middleware/auth");
|
||||||
@@ -384,6 +387,7 @@ app.use(
|
|||||||
"Authorization",
|
"Authorization",
|
||||||
"Cookie",
|
"Cookie",
|
||||||
"X-Requested-With",
|
"X-Requested-With",
|
||||||
|
"X-Device-ID", // Allow device ID header for TFA remember-me functionality
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
@@ -471,8 +475,11 @@ app.use(
|
|||||||
app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes);
|
app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes);
|
||||||
app.use(`/api/${apiVersion}/automation`, automationRoutes);
|
app.use(`/api/${apiVersion}/automation`, automationRoutes);
|
||||||
app.use(`/api/${apiVersion}/docker`, dockerRoutes);
|
app.use(`/api/${apiVersion}/docker`, dockerRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/integrations`, integrationRoutes);
|
||||||
app.use(`/api/${apiVersion}/ws`, wsRoutes);
|
app.use(`/api/${apiVersion}/ws`, wsRoutes);
|
||||||
app.use(`/api/${apiVersion}/agent`, agentVersionRoutes);
|
app.use(`/api/${apiVersion}/agent`, agentVersionRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/metrics`, metricsRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/user/preferences`, userPreferencesRoutes);
|
||||||
|
|
||||||
// Bull Board - will be populated after queue manager initializes
|
// Bull Board - will be populated after queue manager initializes
|
||||||
let bullBoardRouter = null;
|
let bullBoardRouter = null;
|
||||||
@@ -552,299 +559,6 @@ app.use(`/bullboard`, (req, res, next) => {
|
|||||||
return res.status(503).json({ error: "Bull Board not initialized yet" });
|
return res.status(503).json({ error: "Bull Board not initialized yet" });
|
||||||
});
|
});
|
||||||
|
|
||||||
/*
|
|
||||||
// OLD MIDDLEWARE - REMOVED FOR SIMPLIFICATION - DO NOT USE
|
|
||||||
if (false) {
|
|
||||||
const sessionId = req.cookies["bull-board-session"];
|
|
||||||
console.log("Bull Board API call - Session ID:", sessionId ? "present" : "missing");
|
|
||||||
console.log("Bull Board API call - Cookies:", req.cookies);
|
|
||||||
console.log("Bull Board API call - Bull Board token cookie:", req.cookies["bull-board-token"] ? "present" : "missing");
|
|
||||||
console.log("Bull Board API call - Query token:", req.query.token ? "present" : "missing");
|
|
||||||
console.log("Bull Board API call - Auth header:", req.headers.authorization ? "present" : "missing");
|
|
||||||
console.log("Bull Board API call - Origin:", req.headers.origin || "missing");
|
|
||||||
console.log("Bull Board API call - Referer:", req.headers.referer || "missing");
|
|
||||||
|
|
||||||
// Check if we have any authentication method available
|
|
||||||
const hasSession = !!sessionId;
|
|
||||||
const hasTokenCookie = !!req.cookies["bull-board-token"];
|
|
||||||
const hasQueryToken = !!req.query.token;
|
|
||||||
const hasAuthHeader = !!req.headers.authorization;
|
|
||||||
const hasReferer = !!req.headers.referer;
|
|
||||||
|
|
||||||
console.log("Bull Board API call - Auth methods available:", {
|
|
||||||
session: hasSession,
|
|
||||||
tokenCookie: hasTokenCookie,
|
|
||||||
queryToken: hasQueryToken,
|
|
||||||
authHeader: hasAuthHeader,
|
|
||||||
referer: hasReferer
|
|
||||||
});
|
|
||||||
|
|
||||||
// Check for valid session first
|
|
||||||
if (sessionId) {
|
|
||||||
const session = bullBoardSessions.get(sessionId);
|
|
||||||
console.log("Bull Board API call - Session found:", !!session);
|
|
||||||
if (session && Date.now() - session.timestamp < 3600000) {
|
|
||||||
// Valid session, extend it
|
|
||||||
session.timestamp = Date.now();
|
|
||||||
console.log("Bull Board API call - Using existing session, proceeding");
|
|
||||||
return next();
|
|
||||||
} else if (session) {
|
|
||||||
// Expired session, remove it
|
|
||||||
console.log("Bull Board API call - Session expired, removing");
|
|
||||||
bullBoardSessions.delete(sessionId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// No valid session, check for token as fallback
|
|
||||||
let token = req.query.token;
|
|
||||||
if (!token && req.headers.authorization) {
|
|
||||||
token = req.headers.authorization.replace("Bearer ", "");
|
|
||||||
}
|
|
||||||
if (!token && req.cookies["bull-board-token"]) {
|
|
||||||
token = req.cookies["bull-board-token"];
|
|
||||||
}
|
|
||||||
|
|
||||||
// For API calls, also check if the token is in the referer URL
|
|
||||||
// This handles cases where the main page hasn't set the cookie yet
|
|
||||||
if (!token && req.headers.referer) {
|
|
||||||
try {
|
|
||||||
const refererUrl = new URL(req.headers.referer);
|
|
||||||
const refererToken = refererUrl.searchParams.get('token');
|
|
||||||
if (refererToken) {
|
|
||||||
token = refererToken;
|
|
||||||
console.log("Bull Board API call - Token found in referer URL:", refererToken.substring(0, 20) + "...");
|
|
||||||
} else {
|
|
||||||
console.log("Bull Board API call - No token found in referer URL");
|
|
||||||
// If no token in referer and no session, return 401 with redirect info
|
|
||||||
if (!sessionId) {
|
|
||||||
console.log("Bull Board API call - No authentication available, returning 401");
|
|
||||||
return res.status(401).json({
|
|
||||||
error: "Authentication required",
|
|
||||||
message: "Please refresh the page to re-authenticate"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.log("Bull Board API call - Error parsing referer URL:", error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token) {
|
|
||||||
console.log("Bull Board API call - Token found, authenticating");
|
|
||||||
// Add token to headers for authentication
|
|
||||||
req.headers.authorization = `Bearer ${token}`;
|
|
||||||
|
|
||||||
// Authenticate the user
|
|
||||||
return authenticateToken(req, res, (err) => {
|
|
||||||
if (err) {
|
|
||||||
console.log("Bull Board API call - Token authentication failed");
|
|
||||||
return res.status(401).json({ error: "Authentication failed" });
|
|
||||||
}
|
|
||||||
return requireAdmin(req, res, (adminErr) => {
|
|
||||||
if (adminErr) {
|
|
||||||
console.log("Bull Board API call - Admin access required");
|
|
||||||
return res.status(403).json({ error: "Admin access required" });
|
|
||||||
}
|
|
||||||
console.log("Bull Board API call - Token authentication successful");
|
|
||||||
return next();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// No valid session or token for API calls, deny access
|
|
||||||
console.log("Bull Board API call - No valid session or token, denying access");
|
|
||||||
return res.status(401).json({ error: "Valid Bull Board session or token required" });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for bull-board-session cookie first
|
|
||||||
const sessionId = req.cookies["bull-board-session"];
|
|
||||||
if (sessionId) {
|
|
||||||
const session = bullBoardSessions.get(sessionId);
|
|
||||||
if (session && Date.now() - session.timestamp < 3600000) {
|
|
||||||
// 1 hour
|
|
||||||
// Valid session, extend it
|
|
||||||
session.timestamp = Date.now();
|
|
||||||
return next();
|
|
||||||
} else if (session) {
|
|
||||||
// Expired session, remove it
|
|
||||||
bullBoardSessions.delete(sessionId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// No valid session, check for token
|
|
||||||
let token = req.query.token;
|
|
||||||
if (!token && req.headers.authorization) {
|
|
||||||
token = req.headers.authorization.replace("Bearer ", "");
|
|
||||||
}
|
|
||||||
if (!token && req.cookies["bull-board-token"]) {
|
|
||||||
token = req.cookies["bull-board-token"];
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no token, deny access
|
|
||||||
if (!token) {
|
|
||||||
return res.status(401).json({ error: "Access token required" });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add token to headers for authentication
|
|
||||||
req.headers.authorization = `Bearer ${token}`;
|
|
||||||
|
|
||||||
// Authenticate the user
|
|
||||||
return authenticateToken(req, res, (err) => {
|
|
||||||
if (err) {
|
|
||||||
return res.status(401).json({ error: "Authentication failed" });
|
|
||||||
}
|
|
||||||
return requireAdmin(req, res, (adminErr) => {
|
|
||||||
if (adminErr) {
|
|
||||||
return res.status(403).json({ error: "Admin access required" });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Authentication successful - create a session
|
|
||||||
const newSessionId = require("node:crypto")
|
|
||||||
.randomBytes(32)
|
|
||||||
.toString("hex");
|
|
||||||
bullBoardSessions.set(newSessionId, {
|
|
||||||
timestamp: Date.now(),
|
|
||||||
userId: req.user.id,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Set session cookie with proper configuration for domain access
|
|
||||||
const isHttps = process.env.NODE_ENV === "production" || process.env.SERVER_PROTOCOL === "https";
|
|
||||||
const cookieOptions = {
|
|
||||||
httpOnly: true,
|
|
||||||
secure: isHttps,
|
|
||||||
maxAge: 3600000, // 1 hour
|
|
||||||
path: "/", // Set path to root so it's available for all Bull Board requests
|
|
||||||
};
|
|
||||||
|
|
||||||
// Configure sameSite based on protocol and environment
|
|
||||||
if (isHttps) {
|
|
||||||
cookieOptions.sameSite = "none"; // Required for HTTPS cross-origin
|
|
||||||
} else {
|
|
||||||
cookieOptions.sameSite = "lax"; // Better for HTTP same-origin
|
|
||||||
}
|
|
||||||
|
|
||||||
res.cookie("bull-board-session", newSessionId, cookieOptions);
|
|
||||||
|
|
||||||
// Clean up old sessions periodically
|
|
||||||
if (bullBoardSessions.size > 100) {
|
|
||||||
const now = Date.now();
|
|
||||||
for (const [sid, session] of bullBoardSessions.entries()) {
|
|
||||||
if (now - session.timestamp > 3600000) {
|
|
||||||
bullBoardSessions.delete(sid);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return next();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Second middleware block - COMMENTED OUT - using simplified version above instead
|
|
||||||
/*
|
|
||||||
app.use(`/bullboard`, (req, res, next) => {
|
|
||||||
if (bullBoardRouter) {
|
|
||||||
// If this is the main Bull Board page (not an API call), inject the token and create session
|
|
||||||
if (!req.path.includes("/api/") && !req.path.includes("/static/") && req.path === "/bullboard") {
|
|
||||||
const token = req.query.token;
|
|
||||||
console.log("Bull Board main page - Token:", token ? "present" : "missing");
|
|
||||||
console.log("Bull Board main page - Query params:", req.query);
|
|
||||||
console.log("Bull Board main page - Origin:", req.headers.origin || "missing");
|
|
||||||
console.log("Bull Board main page - Referer:", req.headers.referer || "missing");
|
|
||||||
console.log("Bull Board main page - Cookies:", req.cookies);
|
|
||||||
|
|
||||||
if (token) {
|
|
||||||
// Authenticate the user and create a session immediately on page load
|
|
||||||
req.headers.authorization = `Bearer ${token}`;
|
|
||||||
|
|
||||||
return authenticateToken(req, res, (err) => {
|
|
||||||
if (err) {
|
|
||||||
console.log("Bull Board main page - Token authentication failed");
|
|
||||||
return res.status(401).json({ error: "Authentication failed" });
|
|
||||||
}
|
|
||||||
return requireAdmin(req, res, (adminErr) => {
|
|
||||||
if (adminErr) {
|
|
||||||
console.log("Bull Board main page - Admin access required");
|
|
||||||
return res.status(403).json({ error: "Admin access required" });
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("Bull Board main page - Token authentication successful, creating session");
|
|
||||||
|
|
||||||
// Create a Bull Board session immediately
|
|
||||||
const newSessionId = require("node:crypto")
|
|
||||||
.randomBytes(32)
|
|
||||||
.toString("hex");
|
|
||||||
bullBoardSessions.set(newSessionId, {
|
|
||||||
timestamp: Date.now(),
|
|
||||||
userId: req.user.id,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Set session cookie with proper configuration for domain access
|
|
||||||
const sessionCookieOptions = {
|
|
||||||
httpOnly: true,
|
|
||||||
secure: false, // Always false for HTTP
|
|
||||||
maxAge: 3600000, // 1 hour
|
|
||||||
path: "/", // Set path to root so it's available for all Bull Board requests
|
|
||||||
sameSite: "lax", // Always lax for HTTP
|
|
||||||
};
|
|
||||||
|
|
||||||
res.cookie("bull-board-session", newSessionId, sessionCookieOptions);
|
|
||||||
console.log("Bull Board main page - Session created:", newSessionId);
|
|
||||||
console.log("Bull Board main page - Cookie options:", sessionCookieOptions);
|
|
||||||
|
|
||||||
// Also set a token cookie for API calls as a fallback
|
|
||||||
const tokenCookieOptions = {
|
|
||||||
httpOnly: false, // Allow JavaScript to access it
|
|
||||||
secure: false, // Always false for HTTP
|
|
||||||
maxAge: 3600000, // 1 hour
|
|
||||||
path: "/", // Set path to root for broader compatibility
|
|
||||||
sameSite: "lax", // Always lax for HTTP
|
|
||||||
};
|
|
||||||
|
|
||||||
res.cookie("bull-board-token", token, tokenCookieOptions);
|
|
||||||
console.log("Bull Board main page - Token cookie also set for API fallback");
|
|
||||||
|
|
||||||
// Clean up old sessions periodically
|
|
||||||
if (bullBoardSessions.size > 100) {
|
|
||||||
const now = Date.now();
|
|
||||||
for (const [sid, session] of bullBoardSessions.entries()) {
|
|
||||||
if (now - session.timestamp > 3600000) {
|
|
||||||
bullBoardSessions.delete(sid);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now proceed to serve the Bull Board page
|
|
||||||
return bullBoardRouter(req, res, next);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
console.log("Bull Board main page - No token provided, checking for existing session");
|
|
||||||
// Check if we have an existing session
|
|
||||||
const sessionId = req.cookies["bull-board-session"];
|
|
||||||
if (sessionId) {
|
|
||||||
const session = bullBoardSessions.get(sessionId);
|
|
||||||
if (session && Date.now() - session.timestamp < 3600000) {
|
|
||||||
console.log("Bull Board main page - Using existing session");
|
|
||||||
// Extend session
|
|
||||||
session.timestamp = Date.now();
|
|
||||||
return bullBoardRouter(req, res, next);
|
|
||||||
} else if (session) {
|
|
||||||
console.log("Bull Board main page - Session expired, removing");
|
|
||||||
bullBoardSessions.delete(sessionId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
console.log("Bull Board main page - No valid session, denying access");
|
|
||||||
return res.status(401).json({ error: "Access token required" });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return bullBoardRouter(req, res, next);
|
|
||||||
}
|
|
||||||
return res.status(503).json({ error: "Bull Board not initialized yet" });
|
|
||||||
});
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Error handler specifically for Bull Board routes
|
// Error handler specifically for Bull Board routes
|
||||||
app.use("/bullboard", (err, req, res, _next) => {
|
app.use("/bullboard", (err, req, res, _next) => {
|
||||||
console.error("Bull Board error on", req.method, req.url);
|
console.error("Bull Board error on", req.method, req.url);
|
||||||
@@ -1198,6 +912,15 @@ async function startServer() {
|
|||||||
initAgentWs(server, prisma);
|
initAgentWs(server, prisma);
|
||||||
await agentVersionService.initialize();
|
await agentVersionService.initialize();
|
||||||
|
|
||||||
|
// Send metrics on startup (silent - no console output)
|
||||||
|
try {
|
||||||
|
const metricsReporting =
|
||||||
|
queueManager.automations[QUEUE_NAMES.METRICS_REPORTING];
|
||||||
|
await metricsReporting.sendSilent();
|
||||||
|
} catch (_error) {
|
||||||
|
// Silent failure - don't block server startup if metrics fail
|
||||||
|
}
|
||||||
|
|
||||||
server.listen(PORT, () => {
|
server.listen(PORT, () => {
|
||||||
if (process.env.ENABLE_LOGGING === "true") {
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
logger.info(`Server running on port ${PORT}`);
|
logger.info(`Server running on port ${PORT}`);
|
||||||
|
|||||||
@@ -428,26 +428,29 @@ class AgentVersionService {
|
|||||||
async getVersionInfo() {
|
async getVersionInfo() {
|
||||||
let hasUpdate = false;
|
let hasUpdate = false;
|
||||||
let updateStatus = "unknown";
|
let updateStatus = "unknown";
|
||||||
let effectiveLatestVersion = this.currentVersion; // Always use local version if available
|
|
||||||
|
|
||||||
// If we have a local version, use it as the latest regardless of GitHub
|
// Latest version should ALWAYS come from GitHub, not from local binaries
|
||||||
if (this.currentVersion) {
|
// currentVersion = what's installed locally
|
||||||
effectiveLatestVersion = this.currentVersion;
|
// latestVersion = what's available on GitHub
|
||||||
|
if (this.latestVersion) {
|
||||||
|
console.log(`📦 Latest version from GitHub: ${this.latestVersion}`);
|
||||||
|
} else {
|
||||||
console.log(
|
console.log(
|
||||||
`🔄 Using local agent version ${this.currentVersion} as latest`,
|
`⚠️ No GitHub release version available (API may be unavailable)`,
|
||||||
);
|
|
||||||
} else if (this.latestVersion) {
|
|
||||||
// Fallback to GitHub version only if no local version
|
|
||||||
effectiveLatestVersion = this.latestVersion;
|
|
||||||
console.log(
|
|
||||||
`🔄 No local version found, using GitHub version ${this.latestVersion}`,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.currentVersion && effectiveLatestVersion) {
|
if (this.currentVersion) {
|
||||||
|
console.log(`💾 Current local agent version: ${this.currentVersion}`);
|
||||||
|
} else {
|
||||||
|
console.log(`⚠️ No local agent binary found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine update status by comparing current vs latest (from GitHub)
|
||||||
|
if (this.currentVersion && this.latestVersion) {
|
||||||
const comparison = compareVersions(
|
const comparison = compareVersions(
|
||||||
this.currentVersion,
|
this.currentVersion,
|
||||||
effectiveLatestVersion,
|
this.latestVersion,
|
||||||
);
|
);
|
||||||
if (comparison < 0) {
|
if (comparison < 0) {
|
||||||
hasUpdate = true;
|
hasUpdate = true;
|
||||||
@@ -459,25 +462,25 @@ class AgentVersionService {
|
|||||||
hasUpdate = false;
|
hasUpdate = false;
|
||||||
updateStatus = "up-to-date";
|
updateStatus = "up-to-date";
|
||||||
}
|
}
|
||||||
} else if (effectiveLatestVersion && !this.currentVersion) {
|
} else if (this.latestVersion && !this.currentVersion) {
|
||||||
hasUpdate = true;
|
hasUpdate = true;
|
||||||
updateStatus = "no-agent";
|
updateStatus = "no-agent";
|
||||||
} else if (this.currentVersion && !effectiveLatestVersion) {
|
} else if (this.currentVersion && !this.latestVersion) {
|
||||||
// We have a current version but no latest version (GitHub API unavailable)
|
// We have a current version but no latest version (GitHub API unavailable)
|
||||||
hasUpdate = false;
|
hasUpdate = false;
|
||||||
updateStatus = "github-unavailable";
|
updateStatus = "github-unavailable";
|
||||||
} else if (!this.currentVersion && !effectiveLatestVersion) {
|
} else if (!this.currentVersion && !this.latestVersion) {
|
||||||
updateStatus = "no-data";
|
updateStatus = "no-data";
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
currentVersion: this.currentVersion,
|
currentVersion: this.currentVersion,
|
||||||
latestVersion: effectiveLatestVersion,
|
latestVersion: this.latestVersion, // Always return GitHub version, not local
|
||||||
hasUpdate: hasUpdate,
|
hasUpdate: hasUpdate,
|
||||||
updateStatus: updateStatus,
|
updateStatus: updateStatus,
|
||||||
lastChecked: this.lastChecked,
|
lastChecked: this.lastChecked,
|
||||||
supportedArchitectures: this.supportedArchitectures,
|
supportedArchitectures: this.supportedArchitectures,
|
||||||
status: effectiveLatestVersion ? "ready" : "no-releases",
|
status: this.latestVersion ? "ready" : "no-releases",
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -99,8 +99,22 @@ function init(server, prismaClient) {
|
|||||||
// Notify subscribers of connection
|
// Notify subscribers of connection
|
||||||
notifyConnectionChange(apiId, true);
|
notifyConnectionChange(apiId, true);
|
||||||
|
|
||||||
ws.on("message", () => {
|
ws.on("message", async (data) => {
|
||||||
// Currently we don't need to handle agent->server messages
|
// Handle incoming messages from agent (e.g., Docker status updates)
|
||||||
|
try {
|
||||||
|
const message = JSON.parse(data.toString());
|
||||||
|
|
||||||
|
if (message.type === "docker_status") {
|
||||||
|
// Handle Docker container status events
|
||||||
|
await handleDockerStatusEvent(apiId, message);
|
||||||
|
}
|
||||||
|
// Add more message types here as needed
|
||||||
|
} catch (err) {
|
||||||
|
console.error(
|
||||||
|
`[agent-ws] error parsing message from ${apiId}:`,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
ws.on("close", () => {
|
ws.on("close", () => {
|
||||||
@@ -162,6 +176,15 @@ function pushSettingsUpdate(apiId, newInterval) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function pushUpdateAgent(apiId) {
|
||||||
|
const ws = apiIdToSocket.get(apiId);
|
||||||
|
safeSend(ws, JSON.stringify({ type: "update_agent" }));
|
||||||
|
}
|
||||||
|
|
||||||
|
function getConnectionByApiId(apiId) {
|
||||||
|
return apiIdToSocket.get(apiId);
|
||||||
|
}
|
||||||
|
|
||||||
function pushUpdateNotification(apiId, updateInfo) {
|
function pushUpdateNotification(apiId, updateInfo) {
|
||||||
const ws = apiIdToSocket.get(apiId);
|
const ws = apiIdToSocket.get(apiId);
|
||||||
if (ws && ws.readyState === WebSocket.OPEN) {
|
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||||
@@ -255,15 +278,73 @@ function subscribeToConnectionChanges(apiId, callback) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle Docker container status events from agent
|
||||||
|
async function handleDockerStatusEvent(apiId, message) {
|
||||||
|
try {
|
||||||
|
const { event: _event, container_id, name, status, timestamp } = message;
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[Docker Event] ${apiId}: Container ${name} (${container_id}) - ${status}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Find the host
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { api_id: apiId },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
console.error(`[Docker Event] Host not found for api_id: ${apiId}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update container status in database
|
||||||
|
const container = await prisma.docker_containers.findUnique({
|
||||||
|
where: {
|
||||||
|
host_id_container_id: {
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: container_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (container) {
|
||||||
|
await prisma.docker_containers.update({
|
||||||
|
where: { id: container.id },
|
||||||
|
data: {
|
||||||
|
status: status,
|
||||||
|
state: status,
|
||||||
|
updated_at: new Date(timestamp || Date.now()),
|
||||||
|
last_checked: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[Docker Event] Updated container ${name} status to ${status}`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
`[Docker Event] Container ${name} not found in database (may be new)`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Broadcast to connected dashboard clients via SSE or WebSocket
|
||||||
|
// This would notify the frontend UI in real-time
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`[Docker Event] Error handling Docker status event:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
init,
|
init,
|
||||||
broadcastSettingsUpdate,
|
broadcastSettingsUpdate,
|
||||||
pushReportNow,
|
pushReportNow,
|
||||||
pushSettingsUpdate,
|
pushSettingsUpdate,
|
||||||
|
pushUpdateAgent,
|
||||||
pushUpdateNotification,
|
pushUpdateNotification,
|
||||||
pushUpdateNotificationToAll,
|
pushUpdateNotificationToAll,
|
||||||
// Expose read-only view of connected agents
|
// Expose read-only view of connected agents
|
||||||
getConnectedApiIds: () => Array.from(apiIdToSocket.keys()),
|
getConnectedApiIds: () => Array.from(apiIdToSocket.keys()),
|
||||||
|
getConnectionByApiId,
|
||||||
isConnected: (apiId) => {
|
isConnected: (apiId) => {
|
||||||
const ws = apiIdToSocket.get(apiId);
|
const ws = apiIdToSocket.get(apiId);
|
||||||
return !!ws && ws.readyState === WebSocket.OPEN;
|
return !!ws && ws.readyState === WebSocket.OPEN;
|
||||||
|
|||||||
343
backend/src/services/automation/dockerImageUpdateCheck.js
Normal file
343
backend/src/services/automation/dockerImageUpdateCheck.js
Normal file
@@ -0,0 +1,343 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
const https = require("node:https");
|
||||||
|
const http = require("node:http");
|
||||||
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Docker Image Update Check Automation
|
||||||
|
* Checks for Docker image updates by comparing local digests with remote registry digests
|
||||||
|
*/
|
||||||
|
class DockerImageUpdateCheck {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "docker-image-update-check";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get remote digest from Docker registry using HEAD request
|
||||||
|
* Supports Docker Hub, GHCR, and other OCI-compliant registries
|
||||||
|
*/
|
||||||
|
async getRemoteDigest(imageName, tag = "latest") {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
// Parse image name to determine registry
|
||||||
|
const registryInfo = this.parseImageName(imageName);
|
||||||
|
|
||||||
|
// Construct manifest URL
|
||||||
|
const manifestPath = `/v2/${registryInfo.repository}/manifests/${tag}`;
|
||||||
|
const options = {
|
||||||
|
hostname: registryInfo.registry,
|
||||||
|
path: manifestPath,
|
||||||
|
method: "HEAD",
|
||||||
|
headers: {
|
||||||
|
Accept:
|
||||||
|
"application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.list.v2+json, application/vnd.oci.image.manifest.v1+json, application/vnd.oci.image.index.v1+json",
|
||||||
|
"User-Agent": "PatchMon/1.0",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add authentication token for Docker Hub if needed
|
||||||
|
if (
|
||||||
|
registryInfo.registry === "registry-1.docker.io" &&
|
||||||
|
registryInfo.isPublic
|
||||||
|
) {
|
||||||
|
// For anonymous public images, we may need to get an auth token first
|
||||||
|
// For now, try without auth (works for public images)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Choose HTTP or HTTPS
|
||||||
|
const client = registryInfo.isSecure ? https : http;
|
||||||
|
|
||||||
|
const req = client.request(options, (res) => {
|
||||||
|
if (res.statusCode === 401 || res.statusCode === 403) {
|
||||||
|
// Authentication required - skip for now (would need to implement auth)
|
||||||
|
return reject(
|
||||||
|
new Error(`Authentication required for ${imageName}:${tag}`),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res.statusCode !== 200) {
|
||||||
|
return reject(
|
||||||
|
new Error(
|
||||||
|
`Registry returned status ${res.statusCode} for ${imageName}:${tag}`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get digest from Docker-Content-Digest header
|
||||||
|
const digest = res.headers["docker-content-digest"];
|
||||||
|
if (!digest) {
|
||||||
|
return reject(
|
||||||
|
new Error(
|
||||||
|
`No Docker-Content-Digest header for ${imageName}:${tag}`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up digest (remove sha256: prefix if present)
|
||||||
|
const cleanDigest = digest.startsWith("sha256:")
|
||||||
|
? digest.substring(7)
|
||||||
|
: digest;
|
||||||
|
resolve(cleanDigest);
|
||||||
|
});
|
||||||
|
|
||||||
|
req.on("error", (error) => {
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
req.setTimeout(10000, () => {
|
||||||
|
req.destroy();
|
||||||
|
reject(new Error(`Timeout getting digest for ${imageName}:${tag}`));
|
||||||
|
});
|
||||||
|
|
||||||
|
req.end();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse image name to extract registry, repository, and determine if secure
|
||||||
|
*/
|
||||||
|
parseImageName(imageName) {
|
||||||
|
let registry = "registry-1.docker.io";
|
||||||
|
let repository = imageName;
|
||||||
|
const isSecure = true;
|
||||||
|
let isPublic = true;
|
||||||
|
|
||||||
|
// Handle explicit registries (ghcr.io, quay.io, etc.)
|
||||||
|
if (imageName.includes("/")) {
|
||||||
|
const parts = imageName.split("/");
|
||||||
|
const firstPart = parts[0];
|
||||||
|
|
||||||
|
// Check for known registries
|
||||||
|
if (firstPart.includes(".") || firstPart === "localhost") {
|
||||||
|
registry = firstPart;
|
||||||
|
repository = parts.slice(1).join("/");
|
||||||
|
isPublic = false; // Assume private registries need auth for now
|
||||||
|
} else {
|
||||||
|
// Docker Hub - registry-1.docker.io
|
||||||
|
repository = imageName;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Docker Hub official images (no namespace)
|
||||||
|
if (!repository.includes("/")) {
|
||||||
|
repository = `library/${repository}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
registry,
|
||||||
|
repository,
|
||||||
|
isSecure,
|
||||||
|
isPublic,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process Docker image update check job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🐳 Starting Docker image update check...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get all Docker images that have a digest and repository
|
||||||
|
const images = await prisma.docker_images.findMany({
|
||||||
|
where: {
|
||||||
|
digest: {
|
||||||
|
not: null,
|
||||||
|
},
|
||||||
|
repository: {
|
||||||
|
not: null,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`📦 Found ${images.length} images to check for updates`);
|
||||||
|
|
||||||
|
let checkedCount = 0;
|
||||||
|
let updateCount = 0;
|
||||||
|
let errorCount = 0;
|
||||||
|
const errors = [];
|
||||||
|
|
||||||
|
// Process images in batches to avoid overwhelming the API
|
||||||
|
const batchSize = 10;
|
||||||
|
for (let i = 0; i < images.length; i += batchSize) {
|
||||||
|
const batch = images.slice(i, i + batchSize);
|
||||||
|
|
||||||
|
// Process batch concurrently with Promise.allSettled for error tolerance
|
||||||
|
const _results = await Promise.allSettled(
|
||||||
|
batch.map(async (image) => {
|
||||||
|
try {
|
||||||
|
checkedCount++;
|
||||||
|
|
||||||
|
// Skip local images (no digest means they're local)
|
||||||
|
if (!image.digest || image.digest.trim() === "") {
|
||||||
|
return { image, skipped: true, reason: "No digest" };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get clean digest (remove sha256: prefix if present)
|
||||||
|
const localDigest = image.digest.startsWith("sha256:")
|
||||||
|
? image.digest.substring(7)
|
||||||
|
: image.digest;
|
||||||
|
|
||||||
|
// Get remote digest from registry
|
||||||
|
const remoteDigest = await this.getRemoteDigest(
|
||||||
|
image.repository,
|
||||||
|
image.tag || "latest",
|
||||||
|
);
|
||||||
|
|
||||||
|
// Compare digests
|
||||||
|
if (localDigest !== remoteDigest) {
|
||||||
|
console.log(
|
||||||
|
`🔄 Update found: ${image.repository}:${image.tag} (local: ${localDigest.substring(0, 12)}..., remote: ${remoteDigest.substring(0, 12)}...)`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Store digest info in changelog_url field as JSON
|
||||||
|
const digestInfo = JSON.stringify({
|
||||||
|
method: "digest_comparison",
|
||||||
|
current_digest: localDigest,
|
||||||
|
available_digest: remoteDigest,
|
||||||
|
checked_at: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upsert the update record
|
||||||
|
await prisma.docker_image_updates.upsert({
|
||||||
|
where: {
|
||||||
|
image_id_available_tag: {
|
||||||
|
image_id: image.id,
|
||||||
|
available_tag: image.tag || "latest",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
updated_at: new Date(),
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
severity: "digest_changed",
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
image_id: image.id,
|
||||||
|
current_tag: image.tag || "latest",
|
||||||
|
available_tag: image.tag || "latest",
|
||||||
|
severity: "digest_changed",
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update last_checked timestamp on image
|
||||||
|
await prisma.docker_images.update({
|
||||||
|
where: { id: image.id },
|
||||||
|
data: { last_checked: new Date() },
|
||||||
|
});
|
||||||
|
|
||||||
|
updateCount++;
|
||||||
|
return { image, updated: true };
|
||||||
|
} else {
|
||||||
|
// No update - still update last_checked
|
||||||
|
await prisma.docker_images.update({
|
||||||
|
where: { id: image.id },
|
||||||
|
data: { last_checked: new Date() },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Remove existing update record if digest matches now
|
||||||
|
const existingUpdate = image.docker_image_updates?.find(
|
||||||
|
(u) => u.available_tag === (image.tag || "latest"),
|
||||||
|
);
|
||||||
|
if (existingUpdate) {
|
||||||
|
await prisma.docker_image_updates.delete({
|
||||||
|
where: { id: existingUpdate.id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { image, updated: false };
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
errorCount++;
|
||||||
|
const errorMsg = `Error checking ${image.repository}:${image.tag}: ${error.message}`;
|
||||||
|
errors.push(errorMsg);
|
||||||
|
console.error(`❌ ${errorMsg}`);
|
||||||
|
|
||||||
|
// Still update last_checked even on error
|
||||||
|
try {
|
||||||
|
await prisma.docker_images.update({
|
||||||
|
where: { id: image.id },
|
||||||
|
data: { last_checked: new Date() },
|
||||||
|
});
|
||||||
|
} catch (_updateError) {
|
||||||
|
// Ignore update errors
|
||||||
|
}
|
||||||
|
|
||||||
|
return { image, error: error.message };
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Log batch progress
|
||||||
|
if (i + batchSize < images.length) {
|
||||||
|
console.log(
|
||||||
|
`⏳ Processed ${Math.min(i + batchSize, images.length)}/${images.length} images...`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Small delay between batches to be respectful to registries
|
||||||
|
if (i + batchSize < images.length) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Docker image update check completed in ${executionTime}ms - Checked: ${checkedCount}, Updates: ${updateCount}, Errors: ${errorCount}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
checked: checkedCount,
|
||||||
|
updates: updateCount,
|
||||||
|
errors: errorCount,
|
||||||
|
executionTime,
|
||||||
|
errorDetails: errors,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Docker image update check failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring Docker image update check (daily at 2 AM)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"docker-image-update-check",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 2 * * *" }, // Daily at 2 AM
|
||||||
|
jobId: "docker-image-update-check-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Docker image update check scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual Docker image update check
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"docker-image-update-check-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual Docker image update check triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = DockerImageUpdateCheck;
|
||||||
164
backend/src/services/automation/dockerInventoryCleanup.js
Normal file
164
backend/src/services/automation/dockerInventoryCleanup.js
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Docker Inventory Cleanup Automation
|
||||||
|
* Removes Docker containers and images for hosts that no longer exist
|
||||||
|
*/
|
||||||
|
class DockerInventoryCleanup {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "docker-inventory-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process Docker inventory cleanup job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🧹 Starting Docker inventory cleanup...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Step 1: Find and delete orphaned containers (containers for non-existent hosts)
|
||||||
|
const orphanedContainers = await prisma.docker_containers.findMany({
|
||||||
|
where: {
|
||||||
|
host_id: {
|
||||||
|
// Find containers where the host doesn't exist
|
||||||
|
notIn: await prisma.hosts
|
||||||
|
.findMany({ select: { id: true } })
|
||||||
|
.then((hosts) => hosts.map((h) => h.id)),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let deletedContainersCount = 0;
|
||||||
|
const deletedContainers = [];
|
||||||
|
|
||||||
|
for (const container of orphanedContainers) {
|
||||||
|
try {
|
||||||
|
await prisma.docker_containers.delete({
|
||||||
|
where: { id: container.id },
|
||||||
|
});
|
||||||
|
deletedContainersCount++;
|
||||||
|
deletedContainers.push({
|
||||||
|
id: container.id,
|
||||||
|
container_id: container.container_id,
|
||||||
|
name: container.name,
|
||||||
|
image_name: container.image_name,
|
||||||
|
host_id: container.host_id,
|
||||||
|
});
|
||||||
|
console.log(
|
||||||
|
`🗑️ Deleted orphaned container: ${container.name} (host_id: ${container.host_id})`,
|
||||||
|
);
|
||||||
|
} catch (deleteError) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to delete container ${container.id}:`,
|
||||||
|
deleteError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2: Find and delete orphaned images (images with no containers using them)
|
||||||
|
const orphanedImages = await prisma.docker_images.findMany({
|
||||||
|
where: {
|
||||||
|
docker_containers: {
|
||||||
|
none: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
docker_containers: true,
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let deletedImagesCount = 0;
|
||||||
|
const deletedImages = [];
|
||||||
|
|
||||||
|
for (const image of orphanedImages) {
|
||||||
|
try {
|
||||||
|
// First delete any image updates associated with this image
|
||||||
|
if (image._count.docker_image_updates > 0) {
|
||||||
|
await prisma.docker_image_updates.deleteMany({
|
||||||
|
where: { image_id: image.id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then delete the image itself
|
||||||
|
await prisma.docker_images.delete({
|
||||||
|
where: { id: image.id },
|
||||||
|
});
|
||||||
|
deletedImagesCount++;
|
||||||
|
deletedImages.push({
|
||||||
|
id: image.id,
|
||||||
|
repository: image.repository,
|
||||||
|
tag: image.tag,
|
||||||
|
image_id: image.image_id,
|
||||||
|
});
|
||||||
|
console.log(
|
||||||
|
`🗑️ Deleted orphaned image: ${image.repository}:${image.tag}`,
|
||||||
|
);
|
||||||
|
} catch (deleteError) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to delete image ${image.id}:`,
|
||||||
|
deleteError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Docker inventory cleanup completed in ${executionTime}ms - Deleted ${deletedContainersCount} containers and ${deletedImagesCount} images`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
deletedContainersCount,
|
||||||
|
deletedImagesCount,
|
||||||
|
deletedContainers,
|
||||||
|
deletedImages,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Docker inventory cleanup failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring Docker inventory cleanup (daily at 4 AM)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"docker-inventory-cleanup",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 4 * * *" }, // Daily at 4 AM
|
||||||
|
jobId: "docker-inventory-cleanup-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Docker inventory cleanup scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual Docker inventory cleanup
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"docker-inventory-cleanup-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual Docker inventory cleanup triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = DockerInventoryCleanup;
|
||||||
@@ -52,17 +52,24 @@ class GitHubUpdateCheck {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read version from package.json
|
// Read version from package.json
|
||||||
let currentVersion = "1.3.0"; // fallback
|
let currentVersion = null;
|
||||||
try {
|
try {
|
||||||
const packageJson = require("../../../package.json");
|
const packageJson = require("../../../package.json");
|
||||||
if (packageJson?.version) {
|
if (packageJson?.version) {
|
||||||
currentVersion = packageJson.version;
|
currentVersion = packageJson.version;
|
||||||
}
|
}
|
||||||
} catch (packageError) {
|
} catch (packageError) {
|
||||||
console.warn(
|
console.error(
|
||||||
"Could not read version from package.json:",
|
"Could not read version from package.json:",
|
||||||
packageError.message,
|
packageError.message,
|
||||||
);
|
);
|
||||||
|
throw new Error(
|
||||||
|
"Could not determine current version from package.json",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!currentVersion) {
|
||||||
|
throw new Error("Version not found in package.json");
|
||||||
}
|
}
|
||||||
|
|
||||||
const isUpdateAvailable =
|
const isUpdateAvailable =
|
||||||
|
|||||||
@@ -2,12 +2,16 @@ const { Queue, Worker } = require("bullmq");
|
|||||||
const { redis, redisConnection } = require("./shared/redis");
|
const { redis, redisConnection } = require("./shared/redis");
|
||||||
const { prisma } = require("./shared/prisma");
|
const { prisma } = require("./shared/prisma");
|
||||||
const agentWs = require("../agentWs");
|
const agentWs = require("../agentWs");
|
||||||
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
// Import automation classes
|
// Import automation classes
|
||||||
const GitHubUpdateCheck = require("./githubUpdateCheck");
|
const GitHubUpdateCheck = require("./githubUpdateCheck");
|
||||||
const SessionCleanup = require("./sessionCleanup");
|
const SessionCleanup = require("./sessionCleanup");
|
||||||
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
|
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
|
||||||
const OrphanedPackageCleanup = require("./orphanedPackageCleanup");
|
const OrphanedPackageCleanup = require("./orphanedPackageCleanup");
|
||||||
|
const DockerInventoryCleanup = require("./dockerInventoryCleanup");
|
||||||
|
const DockerImageUpdateCheck = require("./dockerImageUpdateCheck");
|
||||||
|
const MetricsReporting = require("./metricsReporting");
|
||||||
|
|
||||||
// Queue names
|
// Queue names
|
||||||
const QUEUE_NAMES = {
|
const QUEUE_NAMES = {
|
||||||
@@ -15,6 +19,9 @@ const QUEUE_NAMES = {
|
|||||||
SESSION_CLEANUP: "session-cleanup",
|
SESSION_CLEANUP: "session-cleanup",
|
||||||
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
|
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
|
||||||
ORPHANED_PACKAGE_CLEANUP: "orphaned-package-cleanup",
|
ORPHANED_PACKAGE_CLEANUP: "orphaned-package-cleanup",
|
||||||
|
DOCKER_INVENTORY_CLEANUP: "docker-inventory-cleanup",
|
||||||
|
DOCKER_IMAGE_UPDATE_CHECK: "docker-image-update-check",
|
||||||
|
METRICS_REPORTING: "metrics-reporting",
|
||||||
AGENT_COMMANDS: "agent-commands",
|
AGENT_COMMANDS: "agent-commands",
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -91,6 +98,13 @@ class QueueManager {
|
|||||||
new OrphanedRepoCleanup(this);
|
new OrphanedRepoCleanup(this);
|
||||||
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] =
|
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] =
|
||||||
new OrphanedPackageCleanup(this);
|
new OrphanedPackageCleanup(this);
|
||||||
|
this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP] =
|
||||||
|
new DockerInventoryCleanup(this);
|
||||||
|
this.automations[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK] =
|
||||||
|
new DockerImageUpdateCheck(this);
|
||||||
|
this.automations[QUEUE_NAMES.METRICS_REPORTING] = new MetricsReporting(
|
||||||
|
this,
|
||||||
|
);
|
||||||
|
|
||||||
console.log("✅ All automation classes initialized");
|
console.log("✅ All automation classes initialized");
|
||||||
}
|
}
|
||||||
@@ -149,6 +163,33 @@ class QueueManager {
|
|||||||
workerOptions,
|
workerOptions,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Docker Inventory Cleanup Worker
|
||||||
|
this.workers[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP] = new Worker(
|
||||||
|
QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP,
|
||||||
|
this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP],
|
||||||
|
),
|
||||||
|
workerOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Docker Image Update Check Worker
|
||||||
|
this.workers[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK] = new Worker(
|
||||||
|
QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK,
|
||||||
|
this.automations[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK],
|
||||||
|
),
|
||||||
|
workerOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Metrics Reporting Worker
|
||||||
|
this.workers[QUEUE_NAMES.METRICS_REPORTING] = new Worker(
|
||||||
|
QUEUE_NAMES.METRICS_REPORTING,
|
||||||
|
this.automations[QUEUE_NAMES.METRICS_REPORTING].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.METRICS_REPORTING],
|
||||||
|
),
|
||||||
|
workerOptions,
|
||||||
|
);
|
||||||
|
|
||||||
// Agent Commands Worker
|
// Agent Commands Worker
|
||||||
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
|
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
|
||||||
QUEUE_NAMES.AGENT_COMMANDS,
|
QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
@@ -156,6 +197,36 @@ class QueueManager {
|
|||||||
const { api_id, type } = job.data;
|
const { api_id, type } = job.data;
|
||||||
console.log(`Processing agent command: ${type} for ${api_id}`);
|
console.log(`Processing agent command: ${type} for ${api_id}`);
|
||||||
|
|
||||||
|
// Log job to job_history
|
||||||
|
let historyRecord = null;
|
||||||
|
try {
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { api_id },
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (host) {
|
||||||
|
historyRecord = await prisma.job_history.create({
|
||||||
|
data: {
|
||||||
|
id: uuidv4(),
|
||||||
|
job_id: job.id,
|
||||||
|
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
|
job_name: type,
|
||||||
|
host_id: host.id,
|
||||||
|
api_id: api_id,
|
||||||
|
status: "active",
|
||||||
|
attempt_number: job.attemptsMade + 1,
|
||||||
|
created_at: new Date(),
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(`📝 Logged job to job_history: ${job.id} (${type})`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to log job to job_history:", error);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
// Send command via WebSocket based on type
|
// Send command via WebSocket based on type
|
||||||
if (type === "report_now") {
|
if (type === "report_now") {
|
||||||
agentWs.pushReportNow(api_id);
|
agentWs.pushReportNow(api_id);
|
||||||
@@ -163,9 +234,51 @@ class QueueManager {
|
|||||||
// For settings update, we need additional data
|
// For settings update, we need additional data
|
||||||
const { update_interval } = job.data;
|
const { update_interval } = job.data;
|
||||||
agentWs.pushSettingsUpdate(api_id, update_interval);
|
agentWs.pushSettingsUpdate(api_id, update_interval);
|
||||||
|
} else if (type === "update_agent") {
|
||||||
|
// Force agent to update by sending WebSocket command
|
||||||
|
const ws = agentWs.getConnectionByApiId(api_id);
|
||||||
|
if (ws && ws.readyState === 1) {
|
||||||
|
// WebSocket.OPEN
|
||||||
|
agentWs.pushUpdateAgent(api_id);
|
||||||
|
console.log(`✅ Update command sent to agent ${api_id}`);
|
||||||
|
} else {
|
||||||
|
console.error(`❌ Agent ${api_id} is not connected`);
|
||||||
|
throw new Error(
|
||||||
|
`Agent ${api_id} is not connected. Cannot send update command.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
console.error(`Unknown agent command type: ${type}`);
|
console.error(`Unknown agent command type: ${type}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update job history to completed
|
||||||
|
if (historyRecord) {
|
||||||
|
await prisma.job_history.updateMany({
|
||||||
|
where: { job_id: job.id },
|
||||||
|
data: {
|
||||||
|
status: "completed",
|
||||||
|
completed_at: new Date(),
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(`✅ Marked job as completed in job_history: ${job.id}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Update job history to failed
|
||||||
|
if (historyRecord) {
|
||||||
|
await prisma.job_history.updateMany({
|
||||||
|
where: { job_id: job.id },
|
||||||
|
data: {
|
||||||
|
status: "failed",
|
||||||
|
error_message: error.message,
|
||||||
|
completed_at: new Date(),
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(`❌ Marked job as failed in job_history: ${job.id}`);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
},
|
},
|
||||||
workerOptions,
|
workerOptions,
|
||||||
);
|
);
|
||||||
@@ -194,6 +307,7 @@ class QueueManager {
|
|||||||
console.log(`✅ Job '${job.id}' in queue '${queueName}' completed.`);
|
console.log(`✅ Job '${job.id}' in queue '${queueName}' completed.`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("✅ Queue events initialized");
|
console.log("✅ Queue events initialized");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -205,6 +319,9 @@ class QueueManager {
|
|||||||
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
|
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
|
||||||
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
|
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
|
||||||
await this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].schedule();
|
await this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.METRICS_REPORTING].schedule();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -228,6 +345,22 @@ class QueueManager {
|
|||||||
].triggerManual();
|
].triggerManual();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async triggerDockerInventoryCleanup() {
|
||||||
|
return this.automations[
|
||||||
|
QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP
|
||||||
|
].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerDockerImageUpdateCheck() {
|
||||||
|
return this.automations[
|
||||||
|
QUEUE_NAMES.DOCKER_IMAGE_UPDATE_CHECK
|
||||||
|
].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerMetricsReporting() {
|
||||||
|
return this.automations[QUEUE_NAMES.METRICS_REPORTING].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get queue statistics
|
* Get queue statistics
|
||||||
*/
|
*/
|
||||||
|
|||||||
172
backend/src/services/automation/metricsReporting.js
Normal file
172
backend/src/services/automation/metricsReporting.js
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
const axios = require("axios");
|
||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
const { updateSettings } = require("../../services/settingsService");
|
||||||
|
|
||||||
|
const METRICS_API_URL =
|
||||||
|
process.env.METRICS_API_URL || "https://metrics.patchmon.cloud";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Metrics Reporting Automation
|
||||||
|
* Sends anonymous usage metrics every 24 hours
|
||||||
|
*/
|
||||||
|
class MetricsReporting {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "metrics-reporting";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process metrics reporting job
|
||||||
|
*/
|
||||||
|
async process(_job, silent = false) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
if (!silent) console.log("📊 Starting metrics reporting...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Fetch fresh settings directly from database (bypass cache)
|
||||||
|
const settings = await prisma.settings.findFirst({
|
||||||
|
orderBy: { updated_at: "desc" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if metrics are enabled
|
||||||
|
if (settings.metrics_enabled !== true) {
|
||||||
|
if (!silent) console.log("📊 Metrics reporting is disabled");
|
||||||
|
return { success: false, reason: "disabled" };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we have an anonymous ID
|
||||||
|
if (!settings.metrics_anonymous_id) {
|
||||||
|
if (!silent) console.log("📊 No anonymous ID found, skipping metrics");
|
||||||
|
return { success: false, reason: "no_id" };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get host count
|
||||||
|
const hostCount = await prisma.hosts.count();
|
||||||
|
|
||||||
|
// Get version
|
||||||
|
const packageJson = require("../../../package.json");
|
||||||
|
const version = packageJson.version;
|
||||||
|
|
||||||
|
// Prepare metrics data
|
||||||
|
const metricsData = {
|
||||||
|
anonymous_id: settings.metrics_anonymous_id,
|
||||||
|
host_count: hostCount,
|
||||||
|
version,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!silent)
|
||||||
|
console.log(
|
||||||
|
`📊 Sending metrics: ${hostCount} hosts, version ${version}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Send to metrics API
|
||||||
|
try {
|
||||||
|
const response = await axios.post(
|
||||||
|
`${METRICS_API_URL}/metrics/submit`,
|
||||||
|
metricsData,
|
||||||
|
{
|
||||||
|
timeout: 10000,
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update last sent timestamp
|
||||||
|
await updateSettings(settings.id, {
|
||||||
|
metrics_last_sent: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
if (!silent)
|
||||||
|
console.log(
|
||||||
|
`✅ Metrics sent successfully in ${executionTime}ms:`,
|
||||||
|
response.data,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
data: response.data,
|
||||||
|
hostCount,
|
||||||
|
version,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (apiError) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
if (!silent)
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to send metrics to API after ${executionTime}ms:`,
|
||||||
|
apiError.message,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
reason: "api_error",
|
||||||
|
error: apiError.message,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
if (!silent)
|
||||||
|
console.error(
|
||||||
|
`❌ Error in metrics reporting after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
// Don't throw on silent mode, just return failure
|
||||||
|
if (silent) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
reason: "error",
|
||||||
|
error: error.message,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring metrics reporting (daily at 2 AM)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"metrics-reporting",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 2 * * *" }, // Daily at 2 AM
|
||||||
|
jobId: "metrics-reporting-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Metrics reporting scheduled (daily at 2 AM)");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual metrics reporting
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"metrics-reporting-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual metrics reporting triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send metrics immediately (silent mode)
|
||||||
|
* Used for automatic sending on server startup
|
||||||
|
*/
|
||||||
|
async sendSilent() {
|
||||||
|
try {
|
||||||
|
const result = await this.process({ name: "startup-silent" }, true);
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
// Silent failure on startup
|
||||||
|
return { success: false, reason: "error", error: error.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = MetricsReporting;
|
||||||
@@ -33,7 +33,8 @@ async function checkPublicRepo(owner, repo) {
|
|||||||
try {
|
try {
|
||||||
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
||||||
|
|
||||||
let currentVersion = "1.3.0"; // fallback
|
// Get current version for User-Agent (or use generic if unavailable)
|
||||||
|
let currentVersion = "unknown";
|
||||||
try {
|
try {
|
||||||
const packageJson = require("../../../package.json");
|
const packageJson = require("../../../package.json");
|
||||||
if (packageJson?.version) {
|
if (packageJson?.version) {
|
||||||
@@ -41,7 +42,7 @@ async function checkPublicRepo(owner, repo) {
|
|||||||
}
|
}
|
||||||
} catch (packageError) {
|
} catch (packageError) {
|
||||||
console.warn(
|
console.warn(
|
||||||
"Could not read version from package.json for User-Agent, using fallback:",
|
"Could not read version from package.json for User-Agent:",
|
||||||
packageError.message,
|
packageError.message,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
179
backend/src/utils/docker.js
Normal file
179
backend/src/utils/docker.js
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
/**
|
||||||
|
* Docker-related utility functions
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a registry link for a Docker image based on its repository and source
|
||||||
|
* Inspired by diun's registry link generation
|
||||||
|
* @param {string} repository - The full repository name (e.g., "ghcr.io/owner/repo")
|
||||||
|
* @param {string} source - The detected source (github, gitlab, docker-hub, etc.)
|
||||||
|
* @returns {string|null} - The URL to the registry page, or null if unknown
|
||||||
|
*/
|
||||||
|
function generateRegistryLink(repository, source) {
|
||||||
|
if (!repository) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the domain and path from the repository
|
||||||
|
const parts = repository.split("/");
|
||||||
|
let domain = "";
|
||||||
|
let path = "";
|
||||||
|
|
||||||
|
// Check if repository has a domain (contains a dot)
|
||||||
|
if (parts[0].includes(".") || parts[0].includes(":")) {
|
||||||
|
domain = parts[0];
|
||||||
|
path = parts.slice(1).join("/");
|
||||||
|
} else {
|
||||||
|
// No domain means Docker Hub
|
||||||
|
domain = "docker.io";
|
||||||
|
path = repository;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (source) {
|
||||||
|
case "docker-hub":
|
||||||
|
case "docker.io": {
|
||||||
|
// Docker Hub: https://hub.docker.com/r/{path} or https://hub.docker.com/_/{path} for official images
|
||||||
|
// Official images are those without a namespace (e.g., "postgres" not "user/postgres")
|
||||||
|
// or explicitly prefixed with "library/"
|
||||||
|
if (path.startsWith("library/")) {
|
||||||
|
const cleanPath = path.replace("library/", "");
|
||||||
|
return `https://hub.docker.com/_/${cleanPath}`;
|
||||||
|
}
|
||||||
|
// Check if it's an official image (single part, no slash after removing library/)
|
||||||
|
if (!path.includes("/")) {
|
||||||
|
return `https://hub.docker.com/_/${path}`;
|
||||||
|
}
|
||||||
|
// Regular user/org image
|
||||||
|
return `https://hub.docker.com/r/${path}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "github":
|
||||||
|
case "ghcr.io": {
|
||||||
|
// GitHub Container Registry
|
||||||
|
// Format: ghcr.io/{owner}/{package} or ghcr.io/{owner}/{repo}/{package}
|
||||||
|
// URL format: https://github.com/{owner}/{repo}/pkgs/container/{package}
|
||||||
|
if (domain === "ghcr.io" && path) {
|
||||||
|
const pathParts = path.split("/");
|
||||||
|
if (pathParts.length === 2) {
|
||||||
|
// Simple case: ghcr.io/owner/package -> github.com/owner/owner/pkgs/container/package
|
||||||
|
// OR: ghcr.io/owner/repo -> github.com/owner/repo/pkgs/container/{package}
|
||||||
|
// Actually, for 2 parts it's owner/package, and repo is same as owner typically
|
||||||
|
const owner = pathParts[0];
|
||||||
|
const packageName = pathParts[1];
|
||||||
|
return `https://github.com/${owner}/${owner}/pkgs/container/${packageName}`;
|
||||||
|
} else if (pathParts.length >= 3) {
|
||||||
|
// Extended case: ghcr.io/owner/repo/package -> github.com/owner/repo/pkgs/container/package
|
||||||
|
const owner = pathParts[0];
|
||||||
|
const repo = pathParts[1];
|
||||||
|
const packageName = pathParts.slice(2).join("/");
|
||||||
|
return `https://github.com/${owner}/${repo}/pkgs/container/${packageName}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Legacy GitHub Packages
|
||||||
|
if (domain === "docker.pkg.github.com" && path) {
|
||||||
|
const pathParts = path.split("/");
|
||||||
|
if (pathParts.length >= 1) {
|
||||||
|
return `https://github.com/${pathParts[0]}/packages`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "gitlab":
|
||||||
|
case "registry.gitlab.com": {
|
||||||
|
// GitLab Container Registry: https://gitlab.com/{path}/container_registry
|
||||||
|
if (path) {
|
||||||
|
return `https://gitlab.com/${path}/container_registry`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "google":
|
||||||
|
case "gcr.io": {
|
||||||
|
// Google Container Registry: https://gcr.io/{path}
|
||||||
|
if (domain.includes("gcr.io") || domain.includes("pkg.dev")) {
|
||||||
|
return `https://console.cloud.google.com/gcr/images/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "quay":
|
||||||
|
case "quay.io": {
|
||||||
|
// Quay.io: https://quay.io/repository/{path}
|
||||||
|
if (path) {
|
||||||
|
return `https://quay.io/repository/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "redhat":
|
||||||
|
case "registry.access.redhat.com": {
|
||||||
|
// Red Hat: https://access.redhat.com/containers/#/registry.access.redhat.com/{path}
|
||||||
|
if (path) {
|
||||||
|
return `https://access.redhat.com/containers/#/registry.access.redhat.com/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "azure":
|
||||||
|
case "azurecr.io": {
|
||||||
|
// Azure Container Registry - link to portal
|
||||||
|
// Format: {registry}.azurecr.io/{repository}
|
||||||
|
if (domain.includes("azurecr.io")) {
|
||||||
|
const registryName = domain.split(".")[0];
|
||||||
|
return `https://portal.azure.com/#view/Microsoft_Azure_ContainerRegistries/RepositoryBlade/registryName/${registryName}/repositoryName/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "aws":
|
||||||
|
case "amazonaws.com": {
|
||||||
|
// AWS ECR - link to console
|
||||||
|
// Format: {account}.dkr.ecr.{region}.amazonaws.com/{repository}
|
||||||
|
if (domain.includes("amazonaws.com")) {
|
||||||
|
const domainParts = domain.split(".");
|
||||||
|
const region = domainParts[3]; // Extract region
|
||||||
|
return `https://${region}.console.aws.amazon.com/ecr/repositories/private/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "private":
|
||||||
|
// For private registries, try to construct a basic URL
|
||||||
|
if (domain) {
|
||||||
|
return `https://${domain}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a user-friendly display name for a registry source
|
||||||
|
* @param {string} source - The source identifier
|
||||||
|
* @returns {string} - Human-readable source name
|
||||||
|
*/
|
||||||
|
function getSourceDisplayName(source) {
|
||||||
|
const sourceNames = {
|
||||||
|
"docker-hub": "Docker Hub",
|
||||||
|
github: "GitHub",
|
||||||
|
gitlab: "GitLab",
|
||||||
|
google: "Google",
|
||||||
|
quay: "Quay.io",
|
||||||
|
redhat: "Red Hat",
|
||||||
|
azure: "Azure",
|
||||||
|
aws: "AWS ECR",
|
||||||
|
private: "Private Registry",
|
||||||
|
local: "Local",
|
||||||
|
unknown: "Unknown",
|
||||||
|
};
|
||||||
|
|
||||||
|
return sourceNames[source] || source;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
generateRegistryLink,
|
||||||
|
getSourceDisplayName,
|
||||||
|
};
|
||||||
@@ -84,21 +84,20 @@ function parse_expiration(expiration_string) {
|
|||||||
* Generate device fingerprint from request data
|
* Generate device fingerprint from request data
|
||||||
*/
|
*/
|
||||||
function generate_device_fingerprint(req) {
|
function generate_device_fingerprint(req) {
|
||||||
const components = [
|
// Use the X-Device-ID header from frontend (unique per browser profile/localStorage)
|
||||||
req.get("user-agent") || "",
|
const deviceId = req.get("x-device-id");
|
||||||
req.get("accept-language") || "",
|
|
||||||
req.get("accept-encoding") || "",
|
|
||||||
req.ip || "",
|
|
||||||
];
|
|
||||||
|
|
||||||
// Create a simple hash of device characteristics
|
if (deviceId) {
|
||||||
const fingerprint = crypto
|
// Hash the device ID for consistent storage format
|
||||||
|
return crypto
|
||||||
.createHash("sha256")
|
.createHash("sha256")
|
||||||
.update(components.join("|"))
|
.update(deviceId)
|
||||||
.digest("hex")
|
.digest("hex")
|
||||||
.substring(0, 32); // Use first 32 chars for storage efficiency
|
.substring(0, 32);
|
||||||
|
}
|
||||||
|
|
||||||
return fingerprint;
|
// No device ID - return null (user needs to provide device ID for remember-me)
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://biomejs.dev/schemas/2.2.4/schema.json",
|
"$schema": "https://biomejs.dev/schemas/2.3.0/schema.json",
|
||||||
"vcs": {
|
"vcs": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"clientKind": "git",
|
"clientKind": "git",
|
||||||
"useIgnoreFile": true
|
"useIgnoreFile": true
|
||||||
},
|
},
|
||||||
|
"files": {
|
||||||
|
"includes": ["**", "!**/*.css"]
|
||||||
|
},
|
||||||
"formatter": {
|
"formatter": {
|
||||||
"enabled": true
|
"enabled": true
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -136,6 +136,24 @@ When you do this, updating to a new version requires manually updating the image
|
|||||||
| `PM_DB_CONN_MAX_ATTEMPTS` | Maximum database connection attempts | `30` |
|
| `PM_DB_CONN_MAX_ATTEMPTS` | Maximum database connection attempts | `30` |
|
||||||
| `PM_DB_CONN_WAIT_INTERVAL` | Wait interval between connection attempts in seconds | `2` |
|
| `PM_DB_CONN_WAIT_INTERVAL` | Wait interval between connection attempts in seconds | `2` |
|
||||||
|
|
||||||
|
##### Database Connection Pool Configuration (Prisma)
|
||||||
|
|
||||||
|
| Variable | Description | Default |
|
||||||
|
| --------------------- | ---------------------------------------------------------- | ------- |
|
||||||
|
| `DB_CONNECTION_LIMIT` | Maximum number of database connections per instance | `30` |
|
||||||
|
| `DB_POOL_TIMEOUT` | Seconds to wait for an available connection before timeout | `20` |
|
||||||
|
| `DB_CONNECT_TIMEOUT` | Seconds to wait for initial database connection | `10` |
|
||||||
|
| `DB_IDLE_TIMEOUT` | Seconds before closing idle connections | `300` |
|
||||||
|
| `DB_MAX_LIFETIME` | Maximum lifetime of a connection in seconds | `1800` |
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
> The connection pool limit should be adjusted based on your deployment size:
|
||||||
|
> - **Small deployment (1-10 hosts)**: `DB_CONNECTION_LIMIT=15` is sufficient
|
||||||
|
> - **Medium deployment (10-50 hosts)**: `DB_CONNECTION_LIMIT=30` (default)
|
||||||
|
> - **Large deployment (50+ hosts)**: `DB_CONNECTION_LIMIT=50` or higher
|
||||||
|
>
|
||||||
|
> Each connection pool serves one backend instance. If you have concurrent operations (multiple users, background jobs, agent checkins), increase the pool size accordingly.
|
||||||
|
|
||||||
##### Redis Configuration
|
##### Redis Configuration
|
||||||
|
|
||||||
| Variable | Description | Default |
|
| Variable | Description | Default |
|
||||||
|
|||||||
@@ -46,8 +46,10 @@ COPY --chown=node:node backend/ ./backend/
|
|||||||
|
|
||||||
WORKDIR /app/backend
|
WORKDIR /app/backend
|
||||||
|
|
||||||
RUN npm ci --ignore-scripts &&\
|
RUN npm cache clean --force &&\
|
||||||
npx prisma generate &&\
|
rm -rf node_modules ~/.npm /root/.npm &&\
|
||||||
|
npm ci --ignore-scripts --legacy-peer-deps --no-audit --prefer-online --fetch-retries=3 --fetch-retry-mintimeout=20000 --fetch-retry-maxtimeout=120000 &&\
|
||||||
|
PRISMA_CLI_BINARY_TYPE=binary npm run db:generate &&\
|
||||||
npm prune --omit=dev &&\
|
npm prune --omit=dev &&\
|
||||||
npm cache clean --force
|
npm cache clean --force
|
||||||
|
|
||||||
|
|||||||
@@ -50,6 +50,12 @@ services:
|
|||||||
SERVER_HOST: localhost
|
SERVER_HOST: localhost
|
||||||
SERVER_PORT: 3000
|
SERVER_PORT: 3000
|
||||||
CORS_ORIGIN: http://localhost:3000
|
CORS_ORIGIN: http://localhost:3000
|
||||||
|
# Database Connection Pool Configuration (Prisma)
|
||||||
|
DB_CONNECTION_LIMIT: 30
|
||||||
|
DB_POOL_TIMEOUT: 20
|
||||||
|
DB_CONNECT_TIMEOUT: 10
|
||||||
|
DB_IDLE_TIMEOUT: 300
|
||||||
|
DB_MAX_LIFETIME: 1800
|
||||||
# Rate Limiting (times in milliseconds)
|
# Rate Limiting (times in milliseconds)
|
||||||
RATE_LIMIT_WINDOW_MS: 900000
|
RATE_LIMIT_WINDOW_MS: 900000
|
||||||
RATE_LIMIT_MAX: 5000
|
RATE_LIMIT_MAX: 5000
|
||||||
|
|||||||
@@ -56,6 +56,12 @@ services:
|
|||||||
SERVER_HOST: localhost
|
SERVER_HOST: localhost
|
||||||
SERVER_PORT: 3000
|
SERVER_PORT: 3000
|
||||||
CORS_ORIGIN: http://localhost:3000
|
CORS_ORIGIN: http://localhost:3000
|
||||||
|
# Database Connection Pool Configuration (Prisma)
|
||||||
|
DB_CONNECTION_LIMIT: 30
|
||||||
|
DB_POOL_TIMEOUT: 20
|
||||||
|
DB_CONNECT_TIMEOUT: 10
|
||||||
|
DB_IDLE_TIMEOUT: 300
|
||||||
|
DB_MAX_LIFETIME: 1800
|
||||||
# Rate Limiting (times in milliseconds)
|
# Rate Limiting (times in milliseconds)
|
||||||
RATE_LIMIT_WINDOW_MS: 900000
|
RATE_LIMIT_WINDOW_MS: 900000
|
||||||
RATE_LIMIT_MAX: 5000
|
RATE_LIMIT_MAX: 5000
|
||||||
|
|||||||
@@ -17,16 +17,21 @@ CMD ["npm", "run", "dev", "--", "--host", "0.0.0.0", "--port", "3000"]
|
|||||||
# Builder stage for production
|
# Builder stage for production
|
||||||
FROM node:lts-alpine AS builder
|
FROM node:lts-alpine AS builder
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app/frontend
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY frontend/package*.json ./
|
||||||
COPY frontend/package*.json ./frontend/
|
|
||||||
|
|
||||||
RUN npm ci --ignore-scripts
|
RUN echo "=== Starting npm install ===" &&\
|
||||||
|
npm cache clean --force &&\
|
||||||
|
rm -rf node_modules ~/.npm /root/.npm &&\
|
||||||
|
echo "=== npm install ===" &&\
|
||||||
|
npm install --ignore-scripts --legacy-peer-deps --no-audit --prefer-online --fetch-retries=3 --fetch-retry-mintimeout=20000 --fetch-retry-maxtimeout=120000 &&\
|
||||||
|
echo "=== npm install completed ===" &&\
|
||||||
|
npm cache clean --force
|
||||||
|
|
||||||
COPY frontend/ ./frontend/
|
COPY frontend/ ./
|
||||||
|
|
||||||
RUN npm run build:frontend
|
RUN npm run build
|
||||||
|
|
||||||
# Production stage
|
# Production stage
|
||||||
FROM nginxinc/nginx-unprivileged:alpine
|
FROM nginxinc/nginx-unprivileged:alpine
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
server {
|
server {
|
||||||
listen 3000;
|
listen 3000;
|
||||||
|
listen [::]:3000;
|
||||||
server_name localhost;
|
server_name localhost;
|
||||||
root /usr/share/nginx/html;
|
root /usr/share/nginx/html;
|
||||||
index index.html;
|
index index.html;
|
||||||
|
|||||||
@@ -6,5 +6,5 @@ VITE_API_URL=http://localhost:3001/api/v1
|
|||||||
|
|
||||||
# Application Metadata
|
# Application Metadata
|
||||||
VITE_APP_NAME=PatchMon
|
VITE_APP_NAME=PatchMon
|
||||||
VITE_APP_VERSION=1.3.0
|
VITE_APP_VERSION=1.3.1
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon-frontend",
|
"name": "patchmon-frontend",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "1.3.0",
|
"version": "1.3.2",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ import ProtectedRoute from "./components/ProtectedRoute";
|
|||||||
import SettingsLayout from "./components/SettingsLayout";
|
import SettingsLayout from "./components/SettingsLayout";
|
||||||
import { isAuthPhase } from "./constants/authPhases";
|
import { isAuthPhase } from "./constants/authPhases";
|
||||||
import { AuthProvider, useAuth } from "./contexts/AuthContext";
|
import { AuthProvider, useAuth } from "./contexts/AuthContext";
|
||||||
|
import { ColorThemeProvider } from "./contexts/ColorThemeContext";
|
||||||
|
import { SettingsProvider } from "./contexts/SettingsContext";
|
||||||
import { ThemeProvider } from "./contexts/ThemeContext";
|
import { ThemeProvider } from "./contexts/ThemeContext";
|
||||||
import { UpdateNotificationProvider } from "./contexts/UpdateNotificationContext";
|
import { UpdateNotificationProvider } from "./contexts/UpdateNotificationContext";
|
||||||
|
|
||||||
@@ -27,6 +29,8 @@ const DockerContainerDetail = lazy(
|
|||||||
);
|
);
|
||||||
const DockerImageDetail = lazy(() => import("./pages/docker/ImageDetail"));
|
const DockerImageDetail = lazy(() => import("./pages/docker/ImageDetail"));
|
||||||
const DockerHostDetail = lazy(() => import("./pages/docker/HostDetail"));
|
const DockerHostDetail = lazy(() => import("./pages/docker/HostDetail"));
|
||||||
|
const DockerVolumeDetail = lazy(() => import("./pages/docker/VolumeDetail"));
|
||||||
|
const DockerNetworkDetail = lazy(() => import("./pages/docker/NetworkDetail"));
|
||||||
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
|
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
|
||||||
const Integrations = lazy(() => import("./pages/settings/Integrations"));
|
const Integrations = lazy(() => import("./pages/settings/Integrations"));
|
||||||
const Notifications = lazy(() => import("./pages/settings/Notifications"));
|
const Notifications = lazy(() => import("./pages/settings/Notifications"));
|
||||||
@@ -41,6 +45,7 @@ const SettingsServerConfig = lazy(
|
|||||||
() => import("./pages/settings/SettingsServerConfig"),
|
() => import("./pages/settings/SettingsServerConfig"),
|
||||||
);
|
);
|
||||||
const SettingsUsers = lazy(() => import("./pages/settings/SettingsUsers"));
|
const SettingsUsers = lazy(() => import("./pages/settings/SettingsUsers"));
|
||||||
|
const SettingsMetrics = lazy(() => import("./pages/settings/SettingsMetrics"));
|
||||||
|
|
||||||
// Loading fallback component
|
// Loading fallback component
|
||||||
const LoadingFallback = () => (
|
const LoadingFallback = () => (
|
||||||
@@ -192,6 +197,26 @@ function AppRoutes() {
|
|||||||
</ProtectedRoute>
|
</ProtectedRoute>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/volumes/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerVolumeDetail />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/networks/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerNetworkDetail />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
<Route
|
<Route
|
||||||
path="/users"
|
path="/users"
|
||||||
element={
|
element={
|
||||||
@@ -388,6 +413,16 @@ function AppRoutes() {
|
|||||||
</ProtectedRoute>
|
</ProtectedRoute>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
|
<Route
|
||||||
|
path="/settings/metrics"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_manage_settings">
|
||||||
|
<Layout>
|
||||||
|
<SettingsMetrics />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
<Route
|
<Route
|
||||||
path="/options"
|
path="/options"
|
||||||
element={
|
element={
|
||||||
@@ -415,15 +450,19 @@ function AppRoutes() {
|
|||||||
|
|
||||||
function App() {
|
function App() {
|
||||||
return (
|
return (
|
||||||
<ThemeProvider>
|
|
||||||
<AuthProvider>
|
<AuthProvider>
|
||||||
|
<ThemeProvider>
|
||||||
|
<SettingsProvider>
|
||||||
|
<ColorThemeProvider>
|
||||||
<UpdateNotificationProvider>
|
<UpdateNotificationProvider>
|
||||||
<LogoProvider>
|
<LogoProvider>
|
||||||
<AppRoutes />
|
<AppRoutes />
|
||||||
</LogoProvider>
|
</LogoProvider>
|
||||||
</UpdateNotificationProvider>
|
</UpdateNotificationProvider>
|
||||||
</AuthProvider>
|
</ColorThemeProvider>
|
||||||
|
</SettingsProvider>
|
||||||
</ThemeProvider>
|
</ThemeProvider>
|
||||||
|
</AuthProvider>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,9 +26,10 @@ import {
|
|||||||
Zap,
|
Zap,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { useCallback, useEffect, useRef, useState } from "react";
|
import { useCallback, useEffect, useRef, useState } from "react";
|
||||||
import { FaYoutube } from "react-icons/fa";
|
import { FaReddit, FaYoutube } from "react-icons/fa";
|
||||||
import { Link, useLocation, useNavigate } from "react-router-dom";
|
import { Link, useLocation, useNavigate } from "react-router-dom";
|
||||||
import { useAuth } from "../contexts/AuthContext";
|
import { useAuth } from "../contexts/AuthContext";
|
||||||
|
import { useColorTheme } from "../contexts/ColorThemeContext";
|
||||||
import { useUpdateNotification } from "../contexts/UpdateNotificationContext";
|
import { useUpdateNotification } from "../contexts/UpdateNotificationContext";
|
||||||
import { dashboardAPI, versionAPI } from "../utils/api";
|
import { dashboardAPI, versionAPI } from "../utils/api";
|
||||||
import DiscordIcon from "./DiscordIcon";
|
import DiscordIcon from "./DiscordIcon";
|
||||||
@@ -61,7 +62,9 @@ const Layout = ({ children }) => {
|
|||||||
canManageSettings,
|
canManageSettings,
|
||||||
} = useAuth();
|
} = useAuth();
|
||||||
const { updateAvailable } = useUpdateNotification();
|
const { updateAvailable } = useUpdateNotification();
|
||||||
|
const { themeConfig } = useColorTheme();
|
||||||
const userMenuRef = useRef(null);
|
const userMenuRef = useRef(null);
|
||||||
|
const bgCanvasRef = useRef(null);
|
||||||
|
|
||||||
// Fetch dashboard stats for the "Last updated" info
|
// Fetch dashboard stats for the "Last updated" info
|
||||||
const {
|
const {
|
||||||
@@ -233,27 +236,165 @@ const Layout = ({ children }) => {
|
|||||||
navigate("/hosts?action=add");
|
navigate("/hosts?action=add");
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Generate clean radial gradient background with subtle triangular accents for dark mode
|
||||||
|
useEffect(() => {
|
||||||
|
const generateBackground = () => {
|
||||||
|
if (
|
||||||
|
!bgCanvasRef.current ||
|
||||||
|
!themeConfig?.login ||
|
||||||
|
!document.documentElement.classList.contains("dark")
|
||||||
|
) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const canvas = bgCanvasRef.current;
|
||||||
|
canvas.width = window.innerWidth;
|
||||||
|
canvas.height = window.innerHeight;
|
||||||
|
const ctx = canvas.getContext("2d");
|
||||||
|
|
||||||
|
// Get theme colors - pick first color from each palette
|
||||||
|
const xColors = themeConfig.login.xColors || [
|
||||||
|
"#667eea",
|
||||||
|
"#764ba2",
|
||||||
|
"#f093fb",
|
||||||
|
"#4facfe",
|
||||||
|
];
|
||||||
|
const yColors = themeConfig.login.yColors || [
|
||||||
|
"#667eea",
|
||||||
|
"#764ba2",
|
||||||
|
"#f093fb",
|
||||||
|
"#4facfe",
|
||||||
|
];
|
||||||
|
|
||||||
|
// Use date for daily color rotation
|
||||||
|
const today = new Date();
|
||||||
|
const seed =
|
||||||
|
today.getFullYear() * 10000 + today.getMonth() * 100 + today.getDate();
|
||||||
|
const random = (s) => {
|
||||||
|
const x = Math.sin(s) * 10000;
|
||||||
|
return x - Math.floor(x);
|
||||||
|
};
|
||||||
|
|
||||||
|
const color1 = xColors[Math.floor(random(seed) * xColors.length)];
|
||||||
|
const color2 = yColors[Math.floor(random(seed + 1000) * yColors.length)];
|
||||||
|
|
||||||
|
// Create clean radial gradient from center to bottom-right corner
|
||||||
|
const gradient = ctx.createRadialGradient(
|
||||||
|
canvas.width * 0.3, // Center slightly left
|
||||||
|
canvas.height * 0.3, // Center slightly up
|
||||||
|
0,
|
||||||
|
canvas.width * 0.5, // Expand to cover screen
|
||||||
|
canvas.height * 0.5,
|
||||||
|
Math.max(canvas.width, canvas.height) * 1.2,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Subtle gradient with darker corners
|
||||||
|
gradient.addColorStop(0, color1);
|
||||||
|
gradient.addColorStop(0.6, color2);
|
||||||
|
gradient.addColorStop(1, "#0a0a0a"); // Very dark edges
|
||||||
|
|
||||||
|
ctx.fillStyle = gradient;
|
||||||
|
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
||||||
|
|
||||||
|
// Add subtle triangular shapes as accents across entire background
|
||||||
|
const cellSize = 180;
|
||||||
|
const cols = Math.ceil(canvas.width / cellSize) + 1;
|
||||||
|
const rows = Math.ceil(canvas.height / cellSize) + 1;
|
||||||
|
|
||||||
|
for (let y = 0; y < rows; y++) {
|
||||||
|
for (let x = 0; x < cols; x++) {
|
||||||
|
const idx = y * cols + x;
|
||||||
|
// Draw more triangles (less sparse)
|
||||||
|
if (random(seed + idx + 5000) > 0.4) {
|
||||||
|
const baseX =
|
||||||
|
x * cellSize + random(seed + idx * 3) * cellSize * 0.8;
|
||||||
|
const baseY =
|
||||||
|
y * cellSize + random(seed + idx * 3 + 100) * cellSize * 0.8;
|
||||||
|
const size = 50 + random(seed + idx * 4) * 100;
|
||||||
|
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(baseX, baseY);
|
||||||
|
ctx.lineTo(baseX + size, baseY);
|
||||||
|
ctx.lineTo(baseX + size / 2, baseY - size * 0.866);
|
||||||
|
ctx.closePath();
|
||||||
|
|
||||||
|
// More visible white with slightly higher opacity
|
||||||
|
ctx.fillStyle = `rgba(255, 255, 255, ${0.05 + random(seed + idx * 5) * 0.08})`;
|
||||||
|
ctx.fill();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
generateBackground();
|
||||||
|
|
||||||
|
// Regenerate on window resize or theme change
|
||||||
|
const handleResize = () => {
|
||||||
|
generateBackground();
|
||||||
|
};
|
||||||
|
|
||||||
|
window.addEventListener("resize", handleResize);
|
||||||
|
|
||||||
|
// Watch for dark mode changes
|
||||||
|
const observer = new MutationObserver((mutations) => {
|
||||||
|
mutations.forEach((mutation) => {
|
||||||
|
if (mutation.attributeName === "class") {
|
||||||
|
generateBackground();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
observer.observe(document.documentElement, {
|
||||||
|
attributes: true,
|
||||||
|
attributeFilter: ["class"],
|
||||||
|
});
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener("resize", handleResize);
|
||||||
|
observer.disconnect();
|
||||||
|
};
|
||||||
|
}, [themeConfig]);
|
||||||
|
|
||||||
// Fetch GitHub stars count
|
// Fetch GitHub stars count
|
||||||
const fetchGitHubStars = useCallback(async () => {
|
const fetchGitHubStars = useCallback(async () => {
|
||||||
// Skip if already fetched recently
|
// Try to load cached star count first
|
||||||
|
const cachedStars = localStorage.getItem("githubStarsCount");
|
||||||
|
if (cachedStars) {
|
||||||
|
setGithubStars(parseInt(cachedStars, 10));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip API call if fetched recently
|
||||||
const lastFetch = localStorage.getItem("githubStarsFetchTime");
|
const lastFetch = localStorage.getItem("githubStarsFetchTime");
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
if (lastFetch && now - parseInt(lastFetch, 15) < 600000) {
|
if (lastFetch && now - parseInt(lastFetch, 10) < 600000) {
|
||||||
// 15 minute cache
|
// 10 minute cache
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(
|
const response = await fetch(
|
||||||
"https://api.github.com/repos/9technologygroup/patchmon.net",
|
"https://api.github.com/repos/9technologygroup/patchmon.net",
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
},
|
||||||
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
setGithubStars(data.stargazers_count);
|
setGithubStars(data.stargazers_count);
|
||||||
|
localStorage.setItem(
|
||||||
|
"githubStarsCount",
|
||||||
|
data.stargazers_count.toString(),
|
||||||
|
);
|
||||||
localStorage.setItem("githubStarsFetchTime", now.toString());
|
localStorage.setItem("githubStarsFetchTime", now.toString());
|
||||||
|
} else if (response.status === 403 || response.status === 429) {
|
||||||
|
console.warn("GitHub API rate limit exceeded, using cached value");
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to fetch GitHub stars:", error);
|
console.error("Failed to fetch GitHub stars:", error);
|
||||||
|
// Keep using cached value if available
|
||||||
}
|
}
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
@@ -303,11 +444,76 @@ const Layout = ({ children }) => {
|
|||||||
fetchGitHubStars();
|
fetchGitHubStars();
|
||||||
}, [fetchGitHubStars]);
|
}, [fetchGitHubStars]);
|
||||||
|
|
||||||
|
// Set CSS custom properties for glassmorphism and theme colors in dark mode
|
||||||
|
useEffect(() => {
|
||||||
|
const updateThemeStyles = () => {
|
||||||
|
const isDark = document.documentElement.classList.contains("dark");
|
||||||
|
const root = document.documentElement;
|
||||||
|
|
||||||
|
if (isDark && themeConfig?.app) {
|
||||||
|
// Glass navigation bars - very light for pattern visibility
|
||||||
|
root.style.setProperty("--sidebar-bg", "rgba(0, 0, 0, 0.15)");
|
||||||
|
root.style.setProperty("--sidebar-blur", "blur(12px)");
|
||||||
|
root.style.setProperty("--topbar-bg", "rgba(0, 0, 0, 0.15)");
|
||||||
|
root.style.setProperty("--topbar-blur", "blur(12px)");
|
||||||
|
root.style.setProperty("--button-bg", "rgba(255, 255, 255, 0.15)");
|
||||||
|
root.style.setProperty("--button-blur", "blur(8px)");
|
||||||
|
|
||||||
|
// Theme-colored cards and buttons - darker to stand out
|
||||||
|
root.style.setProperty("--card-bg", themeConfig.app.cardBg);
|
||||||
|
root.style.setProperty("--card-border", themeConfig.app.cardBorder);
|
||||||
|
root.style.setProperty("--card-bg-hover", themeConfig.app.bgTertiary);
|
||||||
|
root.style.setProperty("--theme-button-bg", themeConfig.app.buttonBg);
|
||||||
|
root.style.setProperty(
|
||||||
|
"--theme-button-hover",
|
||||||
|
themeConfig.app.buttonHover,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// Light mode - standard colors
|
||||||
|
root.style.setProperty("--sidebar-bg", "white");
|
||||||
|
root.style.setProperty("--sidebar-blur", "none");
|
||||||
|
root.style.setProperty("--topbar-bg", "white");
|
||||||
|
root.style.setProperty("--topbar-blur", "none");
|
||||||
|
root.style.setProperty("--button-bg", "white");
|
||||||
|
root.style.setProperty("--button-blur", "none");
|
||||||
|
root.style.setProperty("--card-bg", "white");
|
||||||
|
root.style.setProperty("--card-border", "#e5e7eb");
|
||||||
|
root.style.setProperty("--card-bg-hover", "#f9fafb");
|
||||||
|
root.style.setProperty("--theme-button-bg", "#f3f4f6");
|
||||||
|
root.style.setProperty("--theme-button-hover", "#e5e7eb");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
updateThemeStyles();
|
||||||
|
|
||||||
|
// Watch for dark mode changes
|
||||||
|
const observer = new MutationObserver(() => {
|
||||||
|
updateThemeStyles();
|
||||||
|
});
|
||||||
|
|
||||||
|
observer.observe(document.documentElement, {
|
||||||
|
attributes: true,
|
||||||
|
attributeFilter: ["class"],
|
||||||
|
});
|
||||||
|
|
||||||
|
return () => observer.disconnect();
|
||||||
|
}, [themeConfig]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="min-h-screen bg-secondary-50">
|
<div className="min-h-screen bg-secondary-50 dark:bg-black relative overflow-hidden">
|
||||||
|
{/* Full-screen Trianglify Background (Dark Mode Only) */}
|
||||||
|
<canvas
|
||||||
|
ref={bgCanvasRef}
|
||||||
|
className="fixed inset-0 w-full h-full hidden dark:block"
|
||||||
|
style={{ zIndex: 0 }}
|
||||||
|
/>
|
||||||
|
<div
|
||||||
|
className="fixed inset-0 bg-gradient-to-br from-black/10 to-black/20 hidden dark:block pointer-events-none"
|
||||||
|
style={{ zIndex: 1 }}
|
||||||
|
/>
|
||||||
{/* Mobile sidebar */}
|
{/* Mobile sidebar */}
|
||||||
<div
|
<div
|
||||||
className={`fixed inset-0 z-50 lg:hidden ${sidebarOpen ? "block" : "hidden"}`}
|
className={`fixed inset-0 z-[60] lg:hidden ${sidebarOpen ? "block" : "hidden"}`}
|
||||||
>
|
>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -315,7 +521,14 @@ const Layout = ({ children }) => {
|
|||||||
onClick={() => setSidebarOpen(false)}
|
onClick={() => setSidebarOpen(false)}
|
||||||
aria-label="Close sidebar"
|
aria-label="Close sidebar"
|
||||||
/>
|
/>
|
||||||
<div className="relative flex w-full max-w-[280px] flex-col bg-white dark:bg-secondary-800 pb-4 pt-5 shadow-xl">
|
<div
|
||||||
|
className="relative flex w-full max-w-[280px] flex-col bg-white dark:border-r dark:border-white/10 pb-4 pt-5 shadow-xl"
|
||||||
|
style={{
|
||||||
|
backgroundColor: "var(--sidebar-bg, white)",
|
||||||
|
backdropFilter: "var(--sidebar-blur, none)",
|
||||||
|
WebkitBackdropFilter: "var(--sidebar-blur, none)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
<div className="absolute right-0 top-0 -mr-12 pt-2">
|
<div className="absolute right-0 top-0 -mr-12 pt-2">
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -534,17 +747,43 @@ const Layout = ({ children }) => {
|
|||||||
|
|
||||||
{/* Desktop sidebar */}
|
{/* Desktop sidebar */}
|
||||||
<div
|
<div
|
||||||
className={`hidden lg:fixed lg:inset-y-0 lg:z-50 lg:flex lg:flex-col transition-all duration-300 relative ${
|
className={`hidden lg:fixed lg:inset-y-0 z-[100] lg:flex lg:flex-col transition-all duration-300 relative ${
|
||||||
sidebarCollapsed ? "lg:w-16" : "lg:w-56"
|
sidebarCollapsed ? "lg:w-16" : "lg:w-56"
|
||||||
} bg-white dark:bg-secondary-800`}
|
} bg-white dark:bg-transparent`}
|
||||||
>
|
>
|
||||||
|
{/* Collapse/Expand button on border */}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => setSidebarCollapsed(!sidebarCollapsed)}
|
||||||
|
className="absolute top-5 -right-3 z-[200] flex items-center justify-center w-6 h-6 rounded-full bg-white border border-secondary-300 dark:border-white/20 shadow-md hover:bg-secondary-50 transition-colors"
|
||||||
|
style={{
|
||||||
|
backgroundColor: "var(--button-bg, white)",
|
||||||
|
backdropFilter: "var(--button-blur, none)",
|
||||||
|
WebkitBackdropFilter: "var(--button-blur, none)",
|
||||||
|
}}
|
||||||
|
title={sidebarCollapsed ? "Expand sidebar" : "Collapse sidebar"}
|
||||||
|
>
|
||||||
|
{sidebarCollapsed ? (
|
||||||
|
<ChevronRight className="h-4 w-4 text-secondary-700 dark:text-white" />
|
||||||
|
) : (
|
||||||
|
<ChevronLeft className="h-4 w-4 text-secondary-700 dark:text-white" />
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
className={`flex grow flex-col gap-y-5 overflow-y-auto border-r border-secondary-200 dark:border-secondary-600 bg-white dark:bg-secondary-800 ${
|
className={`flex grow flex-col gap-y-5 border-r border-secondary-200 dark:border-white/10 bg-white ${
|
||||||
sidebarCollapsed ? "px-2 shadow-lg" : "px-6"
|
sidebarCollapsed ? "px-2 shadow-lg" : "px-6"
|
||||||
}`}
|
}`}
|
||||||
|
style={{
|
||||||
|
backgroundColor: "var(--sidebar-bg, white)",
|
||||||
|
backdropFilter: "var(--sidebar-blur, none)",
|
||||||
|
WebkitBackdropFilter: "var(--sidebar-blur, none)",
|
||||||
|
overflowY: "auto",
|
||||||
|
overflowX: "visible",
|
||||||
|
}}
|
||||||
>
|
>
|
||||||
<div
|
<div
|
||||||
className={`flex h-16 shrink-0 items-center border-b border-secondary-200 dark:border-secondary-600 ${
|
className={`flex h-16 shrink-0 items-center border-b border-secondary-200 dark:border-white/10 ${
|
||||||
sidebarCollapsed ? "justify-center" : "justify-center"
|
sidebarCollapsed ? "justify-center" : "justify-center"
|
||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
@@ -562,19 +801,6 @@ const Layout = ({ children }) => {
|
|||||||
</Link>
|
</Link>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
{/* Collapse/Expand button on border */}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => setSidebarCollapsed(!sidebarCollapsed)}
|
|
||||||
className="absolute top-5 -right-3 z-10 flex items-center justify-center w-6 h-6 rounded-full bg-white dark:bg-secondary-800 border border-secondary-300 dark:border-secondary-600 shadow-md hover:bg-secondary-50 dark:hover:bg-secondary-700 transition-colors"
|
|
||||||
title={sidebarCollapsed ? "Expand sidebar" : "Collapse sidebar"}
|
|
||||||
>
|
|
||||||
{sidebarCollapsed ? (
|
|
||||||
<ChevronRight className="h-4 w-4 text-secondary-700 dark:text-white" />
|
|
||||||
) : (
|
|
||||||
<ChevronLeft className="h-4 w-4 text-secondary-700 dark:text-white" />
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
<nav className="flex flex-1 flex-col">
|
<nav className="flex flex-1 flex-col">
|
||||||
<ul className="flex flex-1 flex-col gap-y-6">
|
<ul className="flex flex-1 flex-col gap-y-6">
|
||||||
{/* Show message for users with very limited permissions */}
|
{/* Show message for users with very limited permissions */}
|
||||||
@@ -930,12 +1156,19 @@ const Layout = ({ children }) => {
|
|||||||
|
|
||||||
{/* Main content */}
|
{/* Main content */}
|
||||||
<div
|
<div
|
||||||
className={`flex flex-col min-h-screen transition-all duration-300 ${
|
className={`flex flex-col min-h-screen transition-all duration-300 relative z-10 ${
|
||||||
sidebarCollapsed ? "lg:pl-16" : "lg:pl-56"
|
sidebarCollapsed ? "lg:pl-16" : "lg:pl-56"
|
||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
{/* Top bar */}
|
{/* Top bar */}
|
||||||
<div className="sticky top-0 z-40 flex h-16 shrink-0 items-center gap-x-4 border-b border-secondary-200 dark:border-secondary-600 bg-white dark:bg-secondary-800 px-4 shadow-sm sm:gap-x-6 sm:px-6 lg:px-8">
|
<div
|
||||||
|
className="sticky top-0 z-[90] flex h-16 shrink-0 items-center gap-x-4 border-b border-secondary-200 dark:border-white/10 bg-white px-4 shadow-sm sm:gap-x-6 sm:px-6 lg:px-8"
|
||||||
|
style={{
|
||||||
|
backgroundColor: "var(--topbar-bg, white)",
|
||||||
|
backdropFilter: "var(--topbar-blur, none)",
|
||||||
|
WebkitBackdropFilter: "var(--topbar-blur, none)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
className="-m-2.5 p-2.5 text-secondary-700 dark:text-white lg:hidden"
|
className="-m-2.5 p-2.5 text-secondary-700 dark:text-white lg:hidden"
|
||||||
@@ -987,8 +1220,8 @@ const Layout = ({ children }) => {
|
|||||||
>
|
>
|
||||||
<Github className="h-5 w-5 flex-shrink-0" />
|
<Github className="h-5 w-5 flex-shrink-0" />
|
||||||
{githubStars !== null && (
|
{githubStars !== null && (
|
||||||
<div className="flex items-center gap-0.5">
|
<div className="flex items-center gap-1">
|
||||||
<Star className="h-3 w-3 fill-current text-yellow-500" />
|
<Star className="h-4 w-4 fill-current text-yellow-500" />
|
||||||
<span className="text-sm font-medium">{githubStars}</span>
|
<span className="text-sm font-medium">{githubStars}</span>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
@@ -1059,7 +1292,17 @@ const Layout = ({ children }) => {
|
|||||||
>
|
>
|
||||||
<FaYoutube className="h-5 w-5" />
|
<FaYoutube className="h-5 w-5" />
|
||||||
</a>
|
</a>
|
||||||
{/* 7) Web */}
|
{/* 8) Reddit */}
|
||||||
|
<a
|
||||||
|
href="https://www.reddit.com/r/patchmon"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="flex items-center justify-center w-10 h-10 bg-gray-50 dark:bg-gray-800 text-secondary-600 dark:text-secondary-300 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg transition-colors shadow-sm"
|
||||||
|
title="Reddit Community"
|
||||||
|
>
|
||||||
|
<FaReddit className="h-5 w-5" />
|
||||||
|
</a>
|
||||||
|
{/* 9) Web */}
|
||||||
<a
|
<a
|
||||||
href="https://patchmon.net"
|
href="https://patchmon.net"
|
||||||
target="_blank"
|
target="_blank"
|
||||||
@@ -1074,7 +1317,7 @@ const Layout = ({ children }) => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<main className="flex-1 py-6 bg-secondary-50 dark:bg-secondary-800">
|
<main className="flex-1 py-6 bg-secondary-50 dark:bg-transparent">
|
||||||
<div className="px-4 sm:px-6 lg:px-8">{children}</div>
|
<div className="px-4 sm:px-6 lg:px-8">{children}</div>
|
||||||
</main>
|
</main>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,17 +1,8 @@
|
|||||||
import { useQuery } from "@tanstack/react-query";
|
|
||||||
import { useEffect } from "react";
|
import { useEffect } from "react";
|
||||||
import { isAuthReady } from "../constants/authPhases";
|
import { useSettings } from "../contexts/SettingsContext";
|
||||||
import { useAuth } from "../contexts/AuthContext";
|
|
||||||
import { settingsAPI } from "../utils/api";
|
|
||||||
|
|
||||||
const LogoProvider = ({ children }) => {
|
const LogoProvider = ({ children }) => {
|
||||||
const { authPhase, isAuthenticated } = useAuth();
|
const { settings } = useSettings();
|
||||||
|
|
||||||
const { data: settings } = useQuery({
|
|
||||||
queryKey: ["settings"],
|
|
||||||
queryFn: () => settingsAPI.get().then((res) => res.data),
|
|
||||||
enabled: isAuthReady(authPhase, isAuthenticated()),
|
|
||||||
});
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Use custom favicon or fallback to default
|
// Use custom favicon or fallback to default
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import {
|
import {
|
||||||
|
BarChart3,
|
||||||
Bell,
|
Bell,
|
||||||
ChevronLeft,
|
ChevronLeft,
|
||||||
ChevronRight,
|
ChevronRight,
|
||||||
@@ -141,6 +142,11 @@ const SettingsLayout = ({ children }) => {
|
|||||||
href: "/settings/server-version",
|
href: "/settings/server-version",
|
||||||
icon: Code,
|
icon: Code,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Metrics",
|
||||||
|
href: "/settings/metrics",
|
||||||
|
icon: BarChart3,
|
||||||
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -102,7 +102,9 @@ const BrandingTab = () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div className="space-y-8">
|
||||||
|
{/* Header */}
|
||||||
|
<div>
|
||||||
<div className="flex items-center mb-6">
|
<div className="flex items-center mb-6">
|
||||||
<Image className="h-6 w-6 text-primary-600 mr-3" />
|
<Image className="h-6 w-6 text-primary-600 mr-3" />
|
||||||
<h2 className="text-xl font-semibold text-secondary-900 dark:text-white">
|
<h2 className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
@@ -113,6 +115,15 @@ const BrandingTab = () => {
|
|||||||
Customize your PatchMon installation with custom logos and favicon.
|
Customize your PatchMon installation with custom logos and favicon.
|
||||||
These will be displayed throughout the application.
|
These will be displayed throughout the application.
|
||||||
</p>
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Logo Section Header */}
|
||||||
|
<div className="flex items-center mb-4">
|
||||||
|
<Image className="h-5 w-5 text-primary-600 mr-2" />
|
||||||
|
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
|
||||||
|
Logos
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="grid grid-cols-1 md:grid-cols-3 gap-6">
|
<div className="grid grid-cols-1 md:grid-cols-3 gap-6">
|
||||||
{/* Dark Logo */}
|
{/* Dark Logo */}
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ const UsersTab = () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Update user mutation
|
// Update user mutation
|
||||||
const _updateUserMutation = useMutation({
|
const updateUserMutation = useMutation({
|
||||||
mutationFn: ({ id, data }) => adminUsersAPI.update(id, data),
|
mutationFn: ({ id, data }) => adminUsersAPI.update(id, data),
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
queryClient.invalidateQueries(["users"]);
|
queryClient.invalidateQueries(["users"]);
|
||||||
|
|||||||
@@ -91,10 +91,29 @@ export const AuthProvider = ({ children }) => {
|
|||||||
|
|
||||||
const login = async (username, password) => {
|
const login = async (username, password) => {
|
||||||
try {
|
try {
|
||||||
|
// Get or generate device ID for TFA remember-me
|
||||||
|
let deviceId = localStorage.getItem("device_id");
|
||||||
|
if (!deviceId) {
|
||||||
|
if (typeof crypto !== "undefined" && crypto.randomUUID) {
|
||||||
|
deviceId = crypto.randomUUID();
|
||||||
|
} else {
|
||||||
|
deviceId = "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(
|
||||||
|
/[xy]/g,
|
||||||
|
(c) => {
|
||||||
|
const r = (Math.random() * 16) | 0;
|
||||||
|
const v = c === "x" ? r : (r & 0x3) | 0x8;
|
||||||
|
return v.toString(16);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
localStorage.setItem("device_id", deviceId);
|
||||||
|
}
|
||||||
|
|
||||||
const response = await fetch("/api/v1/auth/login", {
|
const response = await fetch("/api/v1/auth/login", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
|
"X-Device-ID": deviceId,
|
||||||
},
|
},
|
||||||
body: JSON.stringify({ username, password }),
|
body: JSON.stringify({ username, password }),
|
||||||
});
|
});
|
||||||
@@ -119,6 +138,9 @@ export const AuthProvider = ({ children }) => {
|
|||||||
setPermissions(userPermissions);
|
setPermissions(userPermissions);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Note: User preferences will be automatically fetched by ColorThemeContext
|
||||||
|
// when the component mounts, so no need to invalidate here
|
||||||
|
|
||||||
return { success: true };
|
return { success: true };
|
||||||
} else {
|
} else {
|
||||||
// Handle HTTP error responses (like 500 CORS errors)
|
// Handle HTTP error responses (like 500 CORS errors)
|
||||||
@@ -205,8 +227,19 @@ export const AuthProvider = ({ children }) => {
|
|||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
|
// Validate that we received user data with expected fields
|
||||||
|
if (!data.user || !data.user.id) {
|
||||||
|
console.error("Invalid user data in response:", data);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "Invalid response from server",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update both state and localStorage atomically
|
||||||
setUser(data.user);
|
setUser(data.user);
|
||||||
localStorage.setItem("user", JSON.stringify(data.user));
|
localStorage.setItem("user", JSON.stringify(data.user));
|
||||||
|
|
||||||
return { success: true, user: data.user };
|
return { success: true, user: data.user };
|
||||||
} else {
|
} else {
|
||||||
// Handle HTTP error responses (like 500 CORS errors)
|
// Handle HTTP error responses (like 500 CORS errors)
|
||||||
|
|||||||
251
frontend/src/contexts/ColorThemeContext.jsx
Normal file
251
frontend/src/contexts/ColorThemeContext.jsx
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
import { useQuery, useQueryClient } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
createContext,
|
||||||
|
useCallback,
|
||||||
|
useContext,
|
||||||
|
useEffect,
|
||||||
|
useMemo,
|
||||||
|
useRef,
|
||||||
|
useState,
|
||||||
|
} from "react";
|
||||||
|
import { userPreferencesAPI } from "../utils/api";
|
||||||
|
import { useAuth } from "./AuthContext";
|
||||||
|
|
||||||
|
const ColorThemeContext = createContext();
|
||||||
|
|
||||||
|
// Theme configurations matching the login backgrounds
|
||||||
|
export const THEME_PRESETS = {
|
||||||
|
default: {
|
||||||
|
name: "Normal Dark",
|
||||||
|
login: {
|
||||||
|
cellSize: 90,
|
||||||
|
variance: 0.85,
|
||||||
|
xColors: ["#0f172a", "#1e293b", "#334155", "#475569", "#64748b"],
|
||||||
|
yColors: ["#0f172a", "#1e293b", "#334155", "#475569", "#64748b"],
|
||||||
|
},
|
||||||
|
app: {
|
||||||
|
bgPrimary: "#1e293b",
|
||||||
|
bgSecondary: "#1e293b",
|
||||||
|
bgTertiary: "#334155",
|
||||||
|
borderColor: "#475569",
|
||||||
|
cardBg: "#1e293b",
|
||||||
|
cardBorder: "#334155",
|
||||||
|
buttonBg: "#334155",
|
||||||
|
buttonHover: "#475569",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
cyber_blue: {
|
||||||
|
name: "Cyber Blue",
|
||||||
|
login: {
|
||||||
|
cellSize: 90,
|
||||||
|
variance: 0.85,
|
||||||
|
xColors: ["#0a0820", "#1a1f3a", "#2d3561", "#4a5584", "#667eaf"],
|
||||||
|
yColors: ["#0a0820", "#1a1f3a", "#2d3561", "#4a5584", "#667eaf"],
|
||||||
|
},
|
||||||
|
app: {
|
||||||
|
bgPrimary: "#0a0820",
|
||||||
|
bgSecondary: "#1a1f3a",
|
||||||
|
bgTertiary: "#2d3561",
|
||||||
|
borderColor: "#4a5584",
|
||||||
|
cardBg: "#1a1f3a",
|
||||||
|
cardBorder: "#2d3561",
|
||||||
|
buttonBg: "#2d3561",
|
||||||
|
buttonHover: "#4a5584",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
neon_purple: {
|
||||||
|
name: "Neon Purple",
|
||||||
|
login: {
|
||||||
|
cellSize: 80,
|
||||||
|
variance: 0.9,
|
||||||
|
xColors: ["#0f0a1e", "#1e0f3e", "#4a0082", "#7209b7", "#b5179e"],
|
||||||
|
yColors: ["#0f0a1e", "#1e0f3e", "#4a0082", "#7209b7", "#b5179e"],
|
||||||
|
},
|
||||||
|
app: {
|
||||||
|
bgPrimary: "#0f0a1e",
|
||||||
|
bgSecondary: "#1e0f3e",
|
||||||
|
bgTertiary: "#4a0082",
|
||||||
|
borderColor: "#7209b7",
|
||||||
|
cardBg: "#1e0f3e",
|
||||||
|
cardBorder: "#4a0082",
|
||||||
|
buttonBg: "#4a0082",
|
||||||
|
buttonHover: "#7209b7",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
matrix_green: {
|
||||||
|
name: "Matrix Green",
|
||||||
|
login: {
|
||||||
|
cellSize: 70,
|
||||||
|
variance: 0.7,
|
||||||
|
xColors: ["#001a00", "#003300", "#004d00", "#006600", "#00b300"],
|
||||||
|
yColors: ["#001a00", "#003300", "#004d00", "#006600", "#00b300"],
|
||||||
|
},
|
||||||
|
app: {
|
||||||
|
bgPrimary: "#001a00",
|
||||||
|
bgSecondary: "#003300",
|
||||||
|
bgTertiary: "#004d00",
|
||||||
|
borderColor: "#006600",
|
||||||
|
cardBg: "#003300",
|
||||||
|
cardBorder: "#004d00",
|
||||||
|
buttonBg: "#004d00",
|
||||||
|
buttonHover: "#006600",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ocean_blue: {
|
||||||
|
name: "Ocean Blue",
|
||||||
|
login: {
|
||||||
|
cellSize: 85,
|
||||||
|
variance: 0.8,
|
||||||
|
xColors: ["#001845", "#023e7d", "#0077b6", "#0096c7", "#00b4d8"],
|
||||||
|
yColors: ["#001845", "#023e7d", "#0077b6", "#0096c7", "#00b4d8"],
|
||||||
|
},
|
||||||
|
app: {
|
||||||
|
bgPrimary: "#001845",
|
||||||
|
bgSecondary: "#023e7d",
|
||||||
|
bgTertiary: "#0077b6",
|
||||||
|
borderColor: "#0096c7",
|
||||||
|
cardBg: "#023e7d",
|
||||||
|
cardBorder: "#0077b6",
|
||||||
|
buttonBg: "#0077b6",
|
||||||
|
buttonHover: "#0096c7",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
sunset_gradient: {
|
||||||
|
name: "Sunset Gradient",
|
||||||
|
login: {
|
||||||
|
cellSize: 95,
|
||||||
|
variance: 0.75,
|
||||||
|
xColors: ["#1a0033", "#330066", "#4d0099", "#6600cc", "#9933ff"],
|
||||||
|
yColors: ["#1a0033", "#660033", "#990033", "#cc0066", "#ff0099"],
|
||||||
|
},
|
||||||
|
app: {
|
||||||
|
bgPrimary: "#1a0033",
|
||||||
|
bgSecondary: "#330066",
|
||||||
|
bgTertiary: "#4d0099",
|
||||||
|
borderColor: "#6600cc",
|
||||||
|
cardBg: "#330066",
|
||||||
|
cardBorder: "#4d0099",
|
||||||
|
buttonBg: "#4d0099",
|
||||||
|
buttonHover: "#6600cc",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const ColorThemeProvider = ({ children }) => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const lastThemeRef = useRef(null);
|
||||||
|
|
||||||
|
// Use reactive authentication state from AuthContext
|
||||||
|
// This ensures the query re-enables when user logs in
|
||||||
|
const { user } = useAuth();
|
||||||
|
const isAuthenticated = !!user;
|
||||||
|
|
||||||
|
// Source of truth: Database (via userPreferences query)
|
||||||
|
// localStorage is only used as a temporary cache until DB loads
|
||||||
|
// Only fetch if user is authenticated to avoid 401 errors on login page
|
||||||
|
const { data: userPreferences, isLoading: preferencesLoading } = useQuery({
|
||||||
|
queryKey: ["userPreferences"],
|
||||||
|
queryFn: () => userPreferencesAPI.get().then((res) => res.data),
|
||||||
|
enabled: isAuthenticated, // Only run query if user is authenticated
|
||||||
|
retry: 2,
|
||||||
|
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||||
|
refetchOnWindowFocus: true, // Refetch when user returns to tab
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get theme from database (source of truth), fallback to user object from login, then localStorage cache, then default
|
||||||
|
// Memoize to prevent recalculation on every render
|
||||||
|
const colorThemeValue = useMemo(() => {
|
||||||
|
return (
|
||||||
|
userPreferences?.color_theme ||
|
||||||
|
user?.color_theme ||
|
||||||
|
localStorage.getItem("colorTheme") ||
|
||||||
|
"cyber_blue"
|
||||||
|
);
|
||||||
|
}, [userPreferences?.color_theme, user?.color_theme]);
|
||||||
|
|
||||||
|
// Only update state if the theme value actually changed (prevent loops)
|
||||||
|
const [colorTheme, setColorTheme] = useState(() => colorThemeValue);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
// Only update if the value actually changed from what we last saw (prevent loops)
|
||||||
|
if (colorThemeValue !== lastThemeRef.current) {
|
||||||
|
setColorTheme(colorThemeValue);
|
||||||
|
lastThemeRef.current = colorThemeValue;
|
||||||
|
}
|
||||||
|
}, [colorThemeValue]);
|
||||||
|
|
||||||
|
const isLoading = preferencesLoading;
|
||||||
|
|
||||||
|
// Sync localStorage cache when DB data is available (for offline/performance)
|
||||||
|
useEffect(() => {
|
||||||
|
if (userPreferences?.color_theme) {
|
||||||
|
localStorage.setItem("colorTheme", userPreferences.color_theme);
|
||||||
|
}
|
||||||
|
}, [userPreferences?.color_theme]);
|
||||||
|
|
||||||
|
const updateColorTheme = useCallback(
|
||||||
|
async (theme) => {
|
||||||
|
// Store previous theme for potential revert
|
||||||
|
const previousTheme = colorTheme;
|
||||||
|
|
||||||
|
// Immediately update state for instant UI feedback
|
||||||
|
setColorTheme(theme);
|
||||||
|
lastThemeRef.current = theme;
|
||||||
|
|
||||||
|
// Also update localStorage cache
|
||||||
|
localStorage.setItem("colorTheme", theme);
|
||||||
|
|
||||||
|
// Save to backend (source of truth)
|
||||||
|
try {
|
||||||
|
await userPreferencesAPI.update({ color_theme: theme });
|
||||||
|
|
||||||
|
// Invalidate and refetch user preferences to ensure sync across tabs/browsers
|
||||||
|
await queryClient.invalidateQueries({ queryKey: ["userPreferences"] });
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to save color theme preference:", error);
|
||||||
|
// Revert to previous theme if save failed
|
||||||
|
setColorTheme(previousTheme);
|
||||||
|
lastThemeRef.current = previousTheme;
|
||||||
|
localStorage.setItem("colorTheme", previousTheme);
|
||||||
|
|
||||||
|
// Invalidate to refresh from DB
|
||||||
|
await queryClient.invalidateQueries({ queryKey: ["userPreferences"] });
|
||||||
|
|
||||||
|
// Show error to user if possible (could add toast notification here)
|
||||||
|
throw error; // Re-throw so calling code can handle it
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[colorTheme, queryClient],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Memoize themeConfig to prevent unnecessary re-renders
|
||||||
|
const themeConfig = useMemo(
|
||||||
|
() => THEME_PRESETS[colorTheme] || THEME_PRESETS.default,
|
||||||
|
[colorTheme],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Memoize the context value to prevent unnecessary re-renders
|
||||||
|
const value = useMemo(
|
||||||
|
() => ({
|
||||||
|
colorTheme,
|
||||||
|
setColorTheme: updateColorTheme,
|
||||||
|
themeConfig,
|
||||||
|
isLoading,
|
||||||
|
}),
|
||||||
|
[colorTheme, themeConfig, isLoading, updateColorTheme],
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ColorThemeContext.Provider value={value}>
|
||||||
|
{children}
|
||||||
|
</ColorThemeContext.Provider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useColorTheme = () => {
|
||||||
|
const context = useContext(ColorThemeContext);
|
||||||
|
if (!context) {
|
||||||
|
throw new Error("useColorTheme must be used within ColorThemeProvider");
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
};
|
||||||
45
frontend/src/contexts/SettingsContext.jsx
Normal file
45
frontend/src/contexts/SettingsContext.jsx
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import { createContext, useContext } from "react";
|
||||||
|
import { isAuthReady } from "../constants/authPhases";
|
||||||
|
import { settingsAPI } from "../utils/api";
|
||||||
|
import { useAuth } from "./AuthContext";
|
||||||
|
|
||||||
|
const SettingsContext = createContext();
|
||||||
|
|
||||||
|
export const useSettings = () => {
|
||||||
|
const context = useContext(SettingsContext);
|
||||||
|
if (!context) {
|
||||||
|
throw new Error("useSettings must be used within a SettingsProvider");
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const SettingsProvider = ({ children }) => {
|
||||||
|
const { authPhase, isAuthenticated } = useAuth();
|
||||||
|
|
||||||
|
const {
|
||||||
|
data: settings,
|
||||||
|
isLoading,
|
||||||
|
error,
|
||||||
|
refetch,
|
||||||
|
} = useQuery({
|
||||||
|
queryKey: ["settings"],
|
||||||
|
queryFn: () => settingsAPI.get().then((res) => res.data),
|
||||||
|
staleTime: 5 * 60 * 1000, // Settings stay fresh for 5 minutes
|
||||||
|
refetchOnWindowFocus: false,
|
||||||
|
enabled: isAuthReady(authPhase, isAuthenticated()),
|
||||||
|
});
|
||||||
|
|
||||||
|
const value = {
|
||||||
|
settings,
|
||||||
|
isLoading,
|
||||||
|
error,
|
||||||
|
refetch,
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SettingsContext.Provider value={value}>
|
||||||
|
{children}
|
||||||
|
</SettingsContext.Provider>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -1,4 +1,7 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
import { createContext, useContext, useEffect, useState } from "react";
|
import { createContext, useContext, useEffect, useState } from "react";
|
||||||
|
import { userPreferencesAPI } from "../utils/api";
|
||||||
|
import { useAuth } from "./AuthContext";
|
||||||
|
|
||||||
const ThemeContext = createContext();
|
const ThemeContext = createContext();
|
||||||
|
|
||||||
@@ -12,7 +15,7 @@ export const useTheme = () => {
|
|||||||
|
|
||||||
export const ThemeProvider = ({ children }) => {
|
export const ThemeProvider = ({ children }) => {
|
||||||
const [theme, setTheme] = useState(() => {
|
const [theme, setTheme] = useState(() => {
|
||||||
// Check localStorage first, then system preference
|
// Check localStorage first for immediate render
|
||||||
const savedTheme = localStorage.getItem("theme");
|
const savedTheme = localStorage.getItem("theme");
|
||||||
if (savedTheme) {
|
if (savedTheme) {
|
||||||
return savedTheme;
|
return savedTheme;
|
||||||
@@ -24,6 +27,30 @@ export const ThemeProvider = ({ children }) => {
|
|||||||
return "light";
|
return "light";
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Use reactive authentication state from AuthContext
|
||||||
|
// This ensures the query re-enables when user logs in
|
||||||
|
const { user } = useAuth();
|
||||||
|
const isAuthenticated = !!user;
|
||||||
|
|
||||||
|
// Fetch user preferences from backend (only if authenticated)
|
||||||
|
const { data: userPreferences } = useQuery({
|
||||||
|
queryKey: ["userPreferences"],
|
||||||
|
queryFn: () => userPreferencesAPI.get().then((res) => res.data),
|
||||||
|
enabled: isAuthenticated, // Only run query if user is authenticated
|
||||||
|
retry: 1,
|
||||||
|
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||||
|
});
|
||||||
|
|
||||||
|
// Sync with user preferences from backend or user object from login
|
||||||
|
useEffect(() => {
|
||||||
|
const preferredTheme =
|
||||||
|
userPreferences?.theme_preference || user?.theme_preference;
|
||||||
|
if (preferredTheme) {
|
||||||
|
setTheme(preferredTheme);
|
||||||
|
localStorage.setItem("theme", preferredTheme);
|
||||||
|
}
|
||||||
|
}, [userPreferences, user?.theme_preference]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Apply theme to document
|
// Apply theme to document
|
||||||
if (theme === "dark") {
|
if (theme === "dark") {
|
||||||
@@ -36,8 +63,17 @@ export const ThemeProvider = ({ children }) => {
|
|||||||
localStorage.setItem("theme", theme);
|
localStorage.setItem("theme", theme);
|
||||||
}, [theme]);
|
}, [theme]);
|
||||||
|
|
||||||
const toggleTheme = () => {
|
const toggleTheme = async () => {
|
||||||
setTheme((prevTheme) => (prevTheme === "light" ? "dark" : "light"));
|
const newTheme = theme === "light" ? "dark" : "light";
|
||||||
|
setTheme(newTheme);
|
||||||
|
|
||||||
|
// Save to backend
|
||||||
|
try {
|
||||||
|
await userPreferencesAPI.update({ theme_preference: newTheme });
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to save theme preference:", error);
|
||||||
|
// Theme is already set locally, so user still sees the change
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const value = {
|
const value = {
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
import { useQuery } from "@tanstack/react-query";
|
|
||||||
import { createContext, useContext, useState } from "react";
|
import { createContext, useContext, useState } from "react";
|
||||||
import { isAuthReady } from "../constants/authPhases";
|
import { useSettings } from "./SettingsContext";
|
||||||
import { settingsAPI } from "../utils/api";
|
|
||||||
import { useAuth } from "./AuthContext";
|
|
||||||
|
|
||||||
const UpdateNotificationContext = createContext();
|
const UpdateNotificationContext = createContext();
|
||||||
|
|
||||||
@@ -18,17 +15,7 @@ export const useUpdateNotification = () => {
|
|||||||
|
|
||||||
export const UpdateNotificationProvider = ({ children }) => {
|
export const UpdateNotificationProvider = ({ children }) => {
|
||||||
const [dismissed, setDismissed] = useState(false);
|
const [dismissed, setDismissed] = useState(false);
|
||||||
const { authPhase, isAuthenticated } = useAuth();
|
const { settings, isLoading: settingsLoading } = useSettings();
|
||||||
|
|
||||||
// Ensure settings are loaded - but only after auth is fully ready
|
|
||||||
// This reads cached update info from backend (updated by scheduler)
|
|
||||||
const { data: settings, isLoading: settingsLoading } = useQuery({
|
|
||||||
queryKey: ["settings"],
|
|
||||||
queryFn: () => settingsAPI.get().then((res) => res.data),
|
|
||||||
staleTime: 5 * 60 * 1000, // Settings stay fresh for 5 minutes
|
|
||||||
refetchOnWindowFocus: false,
|
|
||||||
enabled: isAuthReady(authPhase, isAuthenticated()),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Read cached update information from settings (no GitHub API calls)
|
// Read cached update information from settings (no GitHub API calls)
|
||||||
// The backend scheduler updates this data periodically
|
// The backend scheduler updates this data periodically
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
body {
|
body {
|
||||||
@apply bg-secondary-50 dark:bg-secondary-800 text-secondary-900 dark:text-secondary-100 antialiased;
|
@apply bg-secondary-50 dark:bg-transparent text-secondary-900 dark:text-secondary-100 antialiased;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -39,19 +39,46 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.btn-outline {
|
.btn-outline {
|
||||||
@apply btn border-secondary-300 dark:border-secondary-600 text-secondary-700 dark:text-secondary-200 bg-white dark:bg-secondary-800 hover:bg-secondary-50 dark:hover:bg-secondary-700 focus:ring-secondary-500;
|
@apply btn border-secondary-300 text-secondary-700 bg-white hover:bg-secondary-50 focus:ring-secondary-500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .btn-outline {
|
||||||
|
background-color: var(--theme-button-bg, #1e293b);
|
||||||
|
border-color: var(--card-border, #334155);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .btn-outline:hover {
|
||||||
|
background-color: var(--theme-button-hover, #334155);
|
||||||
}
|
}
|
||||||
|
|
||||||
.card {
|
.card {
|
||||||
@apply bg-white dark:bg-secondary-800 rounded-lg shadow-card dark:shadow-card-dark border border-secondary-200 dark:border-secondary-600;
|
@apply bg-white rounded-lg shadow-card border border-secondary-200;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .card {
|
||||||
|
background-color: var(--card-bg, #1e293b);
|
||||||
|
border-color: var(--card-border, #334155);
|
||||||
|
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.3), 0 2px 4px -1px rgba(0, 0, 0, 0.2);
|
||||||
}
|
}
|
||||||
|
|
||||||
.card-hover {
|
.card-hover {
|
||||||
@apply card hover:shadow-card-hover transition-shadow duration-150;
|
@apply card transition-all duration-150;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .card-hover:hover {
|
||||||
|
background-color: var(--card-bg-hover, #334155);
|
||||||
|
box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.4), 0 4px 6px -2px rgba(0, 0, 0, 0.3);
|
||||||
}
|
}
|
||||||
|
|
||||||
.input {
|
.input {
|
||||||
@apply block w-full px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md shadow-sm focus:outline-none focus:ring-primary-500 focus:border-primary-500 sm:text-sm bg-white dark:bg-secondary-800 text-secondary-900 dark:text-secondary-100;
|
@apply block w-full px-3 py-2 border border-secondary-300 rounded-md shadow-sm focus:outline-none focus:ring-primary-500 focus:border-primary-500 sm:text-sm bg-white text-secondary-900;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .input {
|
||||||
|
background-color: var(--card-bg, #1e293b);
|
||||||
|
border-color: var(--card-border, #334155);
|
||||||
|
color: white;
|
||||||
}
|
}
|
||||||
|
|
||||||
.label {
|
.label {
|
||||||
@@ -84,6 +111,27 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
@layer utilities {
|
@layer utilities {
|
||||||
|
/* Theme-aware backgrounds for general elements */
|
||||||
|
.dark .bg-secondary-800 {
|
||||||
|
background-color: var(--card-bg, #1e293b) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .bg-secondary-700 {
|
||||||
|
background-color: var(--card-bg-hover, #334155) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .bg-secondary-900 {
|
||||||
|
background-color: var(--theme-button-bg, #1e293b) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .border-secondary-600 {
|
||||||
|
border-color: var(--card-border, #334155) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark .border-secondary-700 {
|
||||||
|
border-color: var(--theme-button-hover, #475569) !important;
|
||||||
|
}
|
||||||
|
|
||||||
.text-shadow {
|
.text-shadow {
|
||||||
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.05);
|
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.05);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -169,6 +169,20 @@ const Automation = () => {
|
|||||||
year: "numeric",
|
year: "numeric",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
if (schedule === "Daily at 4 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(4, 0, 0, 0);
|
||||||
|
return tomorrow.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
if (schedule === "Every hour") {
|
if (schedule === "Every hour") {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const nextHour = new Date(now);
|
const nextHour = new Date(now);
|
||||||
@@ -209,6 +223,13 @@ const Automation = () => {
|
|||||||
tomorrow.setHours(3, 0, 0, 0);
|
tomorrow.setHours(3, 0, 0, 0);
|
||||||
return tomorrow.getTime();
|
return tomorrow.getTime();
|
||||||
}
|
}
|
||||||
|
if (schedule === "Daily at 4 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(4, 0, 0, 0);
|
||||||
|
return tomorrow.getTime();
|
||||||
|
}
|
||||||
if (schedule === "Every hour") {
|
if (schedule === "Every hour") {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const nextHour = new Date(now);
|
const nextHour = new Date(now);
|
||||||
@@ -269,6 +290,8 @@ const Automation = () => {
|
|||||||
endpoint = "/automation/trigger/orphaned-repo-cleanup";
|
endpoint = "/automation/trigger/orphaned-repo-cleanup";
|
||||||
} else if (jobType === "orphaned-packages") {
|
} else if (jobType === "orphaned-packages") {
|
||||||
endpoint = "/automation/trigger/orphaned-package-cleanup";
|
endpoint = "/automation/trigger/orphaned-package-cleanup";
|
||||||
|
} else if (jobType === "docker-inventory") {
|
||||||
|
endpoint = "/automation/trigger/docker-inventory-cleanup";
|
||||||
} else if (jobType === "agent-collection") {
|
} else if (jobType === "agent-collection") {
|
||||||
endpoint = "/automation/trigger/agent-collection";
|
endpoint = "/automation/trigger/agent-collection";
|
||||||
}
|
}
|
||||||
@@ -584,6 +607,10 @@ const Automation = () => {
|
|||||||
automation.queue.includes("orphaned-package")
|
automation.queue.includes("orphaned-package")
|
||||||
) {
|
) {
|
||||||
triggerManualJob("orphaned-packages");
|
triggerManualJob("orphaned-packages");
|
||||||
|
} else if (
|
||||||
|
automation.queue.includes("docker-inventory")
|
||||||
|
) {
|
||||||
|
triggerManualJob("docker-inventory");
|
||||||
} else if (
|
} else if (
|
||||||
automation.queue.includes("agent-commands")
|
automation.queue.includes("agent-commands")
|
||||||
) {
|
) {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -12,6 +12,7 @@ import {
|
|||||||
Copy,
|
Copy,
|
||||||
Cpu,
|
Cpu,
|
||||||
Database,
|
Database,
|
||||||
|
Download,
|
||||||
Eye,
|
Eye,
|
||||||
EyeOff,
|
EyeOff,
|
||||||
HardDrive,
|
HardDrive,
|
||||||
@@ -53,6 +54,8 @@ const HostDetail = () => {
|
|||||||
const [historyLimit] = useState(10);
|
const [historyLimit] = useState(10);
|
||||||
const [notes, setNotes] = useState("");
|
const [notes, setNotes] = useState("");
|
||||||
const [notesMessage, setNotesMessage] = useState({ text: "", type: "" });
|
const [notesMessage, setNotesMessage] = useState({ text: "", type: "" });
|
||||||
|
const [updateMessage, setUpdateMessage] = useState({ text: "", jobId: "" });
|
||||||
|
const [reportMessage, setReportMessage] = useState({ text: "", jobId: "" });
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: host,
|
data: host,
|
||||||
@@ -187,6 +190,57 @@ const HostDetail = () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Force agent update mutation
|
||||||
|
const forceAgentUpdateMutation = useMutation({
|
||||||
|
mutationFn: () =>
|
||||||
|
adminHostsAPI.forceAgentUpdate(hostId).then((res) => res.data),
|
||||||
|
onSuccess: (data) => {
|
||||||
|
queryClient.invalidateQueries(["host", hostId]);
|
||||||
|
queryClient.invalidateQueries(["hosts"]);
|
||||||
|
// Show success message with job ID
|
||||||
|
if (data?.jobId) {
|
||||||
|
setUpdateMessage({
|
||||||
|
text: "Update queued successfully",
|
||||||
|
jobId: data.jobId,
|
||||||
|
});
|
||||||
|
// Clear message after 5 seconds
|
||||||
|
setTimeout(() => setUpdateMessage({ text: "", jobId: "" }), 5000);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
setUpdateMessage({
|
||||||
|
text: error.response?.data?.error || "Failed to queue update",
|
||||||
|
jobId: "",
|
||||||
|
});
|
||||||
|
setTimeout(() => setUpdateMessage({ text: "", jobId: "" }), 5000);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch report mutation
|
||||||
|
const fetchReportMutation = useMutation({
|
||||||
|
mutationFn: () => adminHostsAPI.fetchReport(hostId).then((res) => res.data),
|
||||||
|
onSuccess: (data) => {
|
||||||
|
queryClient.invalidateQueries(["host", hostId]);
|
||||||
|
queryClient.invalidateQueries(["hosts"]);
|
||||||
|
// Show success message with job ID
|
||||||
|
if (data?.jobId) {
|
||||||
|
setReportMessage({
|
||||||
|
text: "Report fetch queued successfully",
|
||||||
|
jobId: data.jobId,
|
||||||
|
});
|
||||||
|
// Clear message after 5 seconds
|
||||||
|
setTimeout(() => setReportMessage({ text: "", jobId: "" }), 5000);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
setReportMessage({
|
||||||
|
text: error.response?.data?.error || "Failed to fetch report",
|
||||||
|
jobId: "",
|
||||||
|
});
|
||||||
|
setTimeout(() => setReportMessage({ text: "", jobId: "" }), 5000);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const updateFriendlyNameMutation = useMutation({
|
const updateFriendlyNameMutation = useMutation({
|
||||||
mutationFn: (friendlyName) =>
|
mutationFn: (friendlyName) =>
|
||||||
adminHostsAPI
|
adminHostsAPI
|
||||||
@@ -399,20 +453,53 @@ const HostDetail = () => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
|
<div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => fetchReportMutation.mutate()}
|
||||||
|
disabled={fetchReportMutation.isPending || !wsStatus?.connected}
|
||||||
|
className="btn-outline flex items-center gap-2 text-sm"
|
||||||
|
title={
|
||||||
|
!wsStatus?.connected
|
||||||
|
? "Agent is not connected"
|
||||||
|
: "Fetch package data from agent"
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<Download
|
||||||
|
className={`h-4 w-4 ${
|
||||||
|
fetchReportMutation.isPending ? "animate-spin" : ""
|
||||||
|
}`}
|
||||||
|
/>
|
||||||
|
Fetch Report
|
||||||
|
</button>
|
||||||
|
{reportMessage.text && (
|
||||||
|
<p className="text-xs mt-1.5 text-secondary-600 dark:text-secondary-400">
|
||||||
|
{reportMessage.text}
|
||||||
|
{reportMessage.jobId && (
|
||||||
|
<span className="ml-1 font-mono text-secondary-500">
|
||||||
|
(Job #{reportMessage.jobId})
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => setShowCredentialsModal(true)}
|
onClick={() => setShowCredentialsModal(true)}
|
||||||
className="btn-outline flex items-center gap-2 text-sm"
|
className={`btn-outline flex items-center text-sm ${
|
||||||
|
host?.machine_id ? "justify-center p-2" : "gap-2"
|
||||||
|
}`}
|
||||||
|
title="View credentials"
|
||||||
>
|
>
|
||||||
<Key className="h-4 w-4" />
|
<Key className="h-4 w-4" />
|
||||||
Deploy Agent
|
{!host?.machine_id && <span>Deploy Agent</span>}
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => refetch()}
|
onClick={() => refetch()}
|
||||||
disabled={isFetching}
|
disabled={isFetching}
|
||||||
className="btn-outline flex items-center justify-center p-2 text-sm"
|
className="btn-outline flex items-center justify-center p-2 text-sm"
|
||||||
title="Refresh host data"
|
title="Refresh dashboard"
|
||||||
>
|
>
|
||||||
<RefreshCw
|
<RefreshCw
|
||||||
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
||||||
@@ -703,6 +790,49 @@ const HostDetail = () => {
|
|||||||
/>
|
/>
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<p className="text-xs text-secondary-500 dark:text-secondary-300 mb-1.5">
|
||||||
|
Force Agent Version Upgrade
|
||||||
|
</p>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => forceAgentUpdateMutation.mutate()}
|
||||||
|
disabled={
|
||||||
|
forceAgentUpdateMutation.isPending ||
|
||||||
|
!wsStatus?.connected
|
||||||
|
}
|
||||||
|
title={
|
||||||
|
!wsStatus?.connected
|
||||||
|
? "Agent is not connected"
|
||||||
|
: "Force agent to update now"
|
||||||
|
}
|
||||||
|
className="flex items-center gap-1.5 px-3 py-1.5 text-xs font-medium text-primary-600 dark:text-primary-400 bg-primary-50 dark:bg-primary-900/20 border border-primary-200 dark:border-primary-800 rounded-md hover:bg-primary-100 dark:hover:bg-primary-900/40 transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||||
|
>
|
||||||
|
<RefreshCw
|
||||||
|
className={`h-3 w-3 ${
|
||||||
|
forceAgentUpdateMutation.isPending
|
||||||
|
? "animate-spin"
|
||||||
|
: ""
|
||||||
|
}`}
|
||||||
|
/>
|
||||||
|
{forceAgentUpdateMutation.isPending
|
||||||
|
? "Updating..."
|
||||||
|
: wsStatus?.connected
|
||||||
|
? "Update Now"
|
||||||
|
: "Offline"}
|
||||||
|
</button>
|
||||||
|
{updateMessage.text && (
|
||||||
|
<p className="text-xs mt-1.5 text-secondary-600 dark:text-secondary-400">
|
||||||
|
{updateMessage.text}
|
||||||
|
{updateMessage.jobId && (
|
||||||
|
<span className="ml-1 font-mono text-secondary-500">
|
||||||
|
(Job #{updateMessage.jobId})
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -470,9 +470,18 @@ const EditHostGroupModal = ({ group, onClose, onSubmit, isLoading }) => {
|
|||||||
|
|
||||||
// Delete Confirmation Modal
|
// Delete Confirmation Modal
|
||||||
const DeleteHostGroupModal = ({ group, onClose, onConfirm, isLoading }) => {
|
const DeleteHostGroupModal = ({ group, onClose, onConfirm, isLoading }) => {
|
||||||
|
// Fetch hosts for this group
|
||||||
|
const { data: hostsData } = useQuery({
|
||||||
|
queryKey: ["hostGroupHosts", group?.id],
|
||||||
|
queryFn: () => hostGroupsAPI.getHosts(group.id).then((res) => res.data),
|
||||||
|
enabled: !!group && group._count?.hosts > 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hosts = hostsData || [];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md">
|
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md max-h-[90vh] overflow-y-auto">
|
||||||
<div className="flex items-center gap-3 mb-4">
|
<div className="flex items-center gap-3 mb-4">
|
||||||
<div className="w-10 h-10 bg-danger-100 rounded-full flex items-center justify-center">
|
<div className="w-10 h-10 bg-danger-100 rounded-full flex items-center justify-center">
|
||||||
<AlertTriangle className="h-5 w-5 text-danger-600" />
|
<AlertTriangle className="h-5 w-5 text-danger-600" />
|
||||||
@@ -494,12 +503,30 @@ const DeleteHostGroupModal = ({ group, onClose, onConfirm, isLoading }) => {
|
|||||||
</p>
|
</p>
|
||||||
{group._count.hosts > 0 && (
|
{group._count.hosts > 0 && (
|
||||||
<div className="mt-3 p-3 bg-warning-50 border border-warning-200 rounded-md">
|
<div className="mt-3 p-3 bg-warning-50 border border-warning-200 rounded-md">
|
||||||
<p className="text-sm text-warning-800">
|
<p className="text-sm text-warning-800 mb-2">
|
||||||
<strong>Warning:</strong> This group contains{" "}
|
<strong>Warning:</strong> This group contains{" "}
|
||||||
{group._count.hosts} host
|
{group._count.hosts} host
|
||||||
{group._count.hosts !== 1 ? "s" : ""}. You must move or remove
|
{group._count.hosts !== 1 ? "s" : ""}. You must move or remove
|
||||||
these hosts before deleting the group.
|
these hosts before deleting the group.
|
||||||
</p>
|
</p>
|
||||||
|
{hosts.length > 0 && (
|
||||||
|
<div className="mt-2">
|
||||||
|
<p className="text-xs font-medium text-warning-900 mb-1">
|
||||||
|
Hosts in this group:
|
||||||
|
</p>
|
||||||
|
<div className="max-h-32 overflow-y-auto bg-warning-100 rounded p-2">
|
||||||
|
{hosts.map((host) => (
|
||||||
|
<div
|
||||||
|
key={host.id}
|
||||||
|
className="text-xs text-warning-900 flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Server className="h-3 w-3" />
|
||||||
|
{host.friendly_name || host.hostname}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -402,105 +402,71 @@ const Hosts = () => {
|
|||||||
const token = localStorage.getItem("token");
|
const token = localStorage.getItem("token");
|
||||||
if (!token) return;
|
if (!token) return;
|
||||||
|
|
||||||
|
// Fetch initial WebSocket status for all hosts
|
||||||
// Fetch initial WebSocket status for all hosts
|
// Fetch initial WebSocket status for all hosts
|
||||||
const fetchInitialStatus = async () => {
|
const fetchInitialStatus = async () => {
|
||||||
const statusPromises = hosts
|
const apiIds = hosts
|
||||||
.filter((host) => host.api_id)
|
.filter((host) => host.api_id)
|
||||||
.map(async (host) => {
|
.map((host) => host.api_id);
|
||||||
|
|
||||||
|
if (apiIds.length === 0) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/v1/ws/status/${host.api_id}`, {
|
const response = await fetch(
|
||||||
|
`/api/v1/ws/status?apiIds=${apiIds.join(",")}`,
|
||||||
|
{
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${token}`,
|
Authorization: `Bearer ${token}`,
|
||||||
},
|
},
|
||||||
});
|
},
|
||||||
|
);
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
const data = await response.json();
|
const result = await response.json();
|
||||||
return { apiId: host.api_id, status: data.data };
|
setWsStatusMap(result.data);
|
||||||
}
|
}
|
||||||
} catch (_error) {
|
} catch (_error) {
|
||||||
// Silently handle errors
|
// Silently handle errors
|
||||||
}
|
}
|
||||||
return {
|
|
||||||
apiId: host.api_id,
|
|
||||||
status: { connected: false, secure: false },
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
const results = await Promise.all(statusPromises);
|
|
||||||
const initialStatusMap = {};
|
|
||||||
results.forEach(({ apiId, status }) => {
|
|
||||||
initialStatusMap[apiId] = status;
|
|
||||||
});
|
|
||||||
|
|
||||||
setWsStatusMap(initialStatusMap);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fetchInitialStatus();
|
fetchInitialStatus();
|
||||||
}, [hosts]);
|
}, [hosts]);
|
||||||
|
|
||||||
// Subscribe to WebSocket status changes for all hosts via SSE
|
// Subscribe to WebSocket status changes for all hosts via polling (lightweight alternative to SSE)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!hosts || hosts.length === 0) return;
|
if (!hosts || hosts.length === 0) return;
|
||||||
|
|
||||||
const token = localStorage.getItem("token");
|
const token = localStorage.getItem("token");
|
||||||
if (!token) return;
|
if (!token) return;
|
||||||
|
|
||||||
const eventSources = new Map();
|
// Use polling instead of SSE to avoid connection pool issues
|
||||||
let isMounted = true;
|
// Poll every 10 seconds instead of 19 persistent connections
|
||||||
|
const pollInterval = setInterval(() => {
|
||||||
|
const apiIds = hosts
|
||||||
|
.filter((host) => host.api_id)
|
||||||
|
.map((host) => host.api_id);
|
||||||
|
|
||||||
const connectHost = (apiId) => {
|
if (apiIds.length === 0) return;
|
||||||
if (!isMounted || eventSources.has(apiId)) return;
|
|
||||||
|
|
||||||
try {
|
fetch(`/api/v1/ws/status?apiIds=${apiIds.join(",")}`, {
|
||||||
const es = new EventSource(
|
headers: {
|
||||||
`/api/v1/ws/status/${apiId}/stream?token=${encodeURIComponent(token)}`,
|
Authorization: `Bearer ${token}`,
|
||||||
);
|
},
|
||||||
|
})
|
||||||
es.onmessage = (event) => {
|
.then((response) => response.json())
|
||||||
try {
|
.then((result) => {
|
||||||
const data = JSON.parse(event.data);
|
if (result.success && result.data) {
|
||||||
if (isMounted) {
|
setWsStatusMap(result.data);
|
||||||
setWsStatusMap((prev) => {
|
}
|
||||||
const newMap = { ...prev, [apiId]: data };
|
})
|
||||||
return newMap;
|
.catch(() => {
|
||||||
|
// Silently handle errors
|
||||||
});
|
});
|
||||||
}
|
}, 10000); // Poll every 10 seconds
|
||||||
} catch (_err) {
|
|
||||||
// Silently handle parse errors
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
es.onerror = (_error) => {
|
|
||||||
console.log(`[SSE] Connection error for ${apiId}, retrying...`);
|
|
||||||
es?.close();
|
|
||||||
eventSources.delete(apiId);
|
|
||||||
if (isMounted) {
|
|
||||||
// Retry connection after 5 seconds with exponential backoff
|
|
||||||
setTimeout(() => connectHost(apiId), 5000);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
eventSources.set(apiId, es);
|
|
||||||
} catch (_err) {
|
|
||||||
// Silently handle connection errors
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Connect to all hosts
|
|
||||||
for (const host of hosts) {
|
|
||||||
if (host.api_id) {
|
|
||||||
connectHost(host.api_id);
|
|
||||||
} else {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cleanup function
|
// Cleanup function
|
||||||
return () => {
|
return () => {
|
||||||
isMounted = false;
|
clearInterval(pollInterval);
|
||||||
for (const es of eventSources.values()) {
|
|
||||||
es.close();
|
|
||||||
}
|
|
||||||
eventSources.clear();
|
|
||||||
};
|
};
|
||||||
}, [hosts]);
|
}, [hosts]);
|
||||||
|
|
||||||
@@ -565,12 +531,11 @@ const Hosts = () => {
|
|||||||
"with new data:",
|
"with new data:",
|
||||||
data.host,
|
data.host,
|
||||||
);
|
);
|
||||||
// Ensure hostGroupId is set correctly
|
// Host already has host_group_memberships from backend
|
||||||
const updatedHost = {
|
const updatedHost = {
|
||||||
...data.host,
|
...data.host,
|
||||||
hostGroupId: data.host.host_groups?.id || null,
|
|
||||||
};
|
};
|
||||||
console.log("Updated host with hostGroupId:", updatedHost);
|
console.log("Updated host in cache:", updatedHost);
|
||||||
return updatedHost;
|
return updatedHost;
|
||||||
}
|
}
|
||||||
return host;
|
return host;
|
||||||
@@ -688,11 +653,15 @@ const Hosts = () => {
|
|||||||
host.os_type?.toLowerCase().includes(searchTerm.toLowerCase()) ||
|
host.os_type?.toLowerCase().includes(searchTerm.toLowerCase()) ||
|
||||||
host.notes?.toLowerCase().includes(searchTerm.toLowerCase());
|
host.notes?.toLowerCase().includes(searchTerm.toLowerCase());
|
||||||
|
|
||||||
// Group filter
|
// Group filter - handle multiple groups per host
|
||||||
|
const memberships = host.host_group_memberships || [];
|
||||||
const matchesGroup =
|
const matchesGroup =
|
||||||
groupFilter === "all" ||
|
groupFilter === "all" ||
|
||||||
(groupFilter === "ungrouped" && !host.host_groups) ||
|
(groupFilter === "ungrouped" && memberships.length === 0) ||
|
||||||
(groupFilter !== "ungrouped" && host.host_groups?.id === groupFilter);
|
(groupFilter !== "ungrouped" &&
|
||||||
|
memberships.some(
|
||||||
|
(membership) => membership.host_groups?.id === groupFilter,
|
||||||
|
));
|
||||||
|
|
||||||
// Status filter
|
// Status filter
|
||||||
const matchesStatus =
|
const matchesStatus =
|
||||||
@@ -745,10 +714,30 @@ const Hosts = () => {
|
|||||||
aValue = a.ip?.toLowerCase() || "zzz_no_ip";
|
aValue = a.ip?.toLowerCase() || "zzz_no_ip";
|
||||||
bValue = b.ip?.toLowerCase() || "zzz_no_ip";
|
bValue = b.ip?.toLowerCase() || "zzz_no_ip";
|
||||||
break;
|
break;
|
||||||
case "group":
|
case "group": {
|
||||||
aValue = a.host_groups?.name || "zzz_ungrouped";
|
// Handle multiple groups per host - use first group alphabetically for sorting
|
||||||
bValue = b.host_groups?.name || "zzz_ungrouped";
|
const aGroups = a.host_group_memberships || [];
|
||||||
|
const bGroups = b.host_group_memberships || [];
|
||||||
|
if (aGroups.length === 0) {
|
||||||
|
aValue = "zzz_ungrouped";
|
||||||
|
} else {
|
||||||
|
const aGroupNames = aGroups
|
||||||
|
.map((m) => m.host_groups?.name || "")
|
||||||
|
.filter((name) => name)
|
||||||
|
.sort();
|
||||||
|
aValue = aGroupNames[0] || "zzz_ungrouped";
|
||||||
|
}
|
||||||
|
if (bGroups.length === 0) {
|
||||||
|
bValue = "zzz_ungrouped";
|
||||||
|
} else {
|
||||||
|
const bGroupNames = bGroups
|
||||||
|
.map((m) => m.host_groups?.name || "")
|
||||||
|
.filter((name) => name)
|
||||||
|
.sort();
|
||||||
|
bValue = bGroupNames[0] || "zzz_ungrouped";
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
case "os":
|
case "os":
|
||||||
aValue = a.os_type?.toLowerCase() || "zzz_unknown";
|
aValue = a.os_type?.toLowerCase() || "zzz_unknown";
|
||||||
bValue = b.os_type?.toLowerCase() || "zzz_unknown";
|
bValue = b.os_type?.toLowerCase() || "zzz_unknown";
|
||||||
@@ -821,11 +810,29 @@ const Hosts = () => {
|
|||||||
|
|
||||||
const groups = {};
|
const groups = {};
|
||||||
filteredAndSortedHosts.forEach((host) => {
|
filteredAndSortedHosts.forEach((host) => {
|
||||||
|
if (groupBy === "group") {
|
||||||
|
// Handle multiple groups per host
|
||||||
|
const memberships = host.host_group_memberships || [];
|
||||||
|
if (memberships.length === 0) {
|
||||||
|
// Host has no groups, add to "Ungrouped"
|
||||||
|
if (!groups.Ungrouped) {
|
||||||
|
groups.Ungrouped = [];
|
||||||
|
}
|
||||||
|
groups.Ungrouped.push(host);
|
||||||
|
} else {
|
||||||
|
// Host has one or more groups, add to each group
|
||||||
|
memberships.forEach((membership) => {
|
||||||
|
const groupName = membership.host_groups?.name || "Unknown";
|
||||||
|
if (!groups[groupName]) {
|
||||||
|
groups[groupName] = [];
|
||||||
|
}
|
||||||
|
groups[groupName].push(host);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Other grouping types (status, os, etc.)
|
||||||
let groupKey;
|
let groupKey;
|
||||||
switch (groupBy) {
|
switch (groupBy) {
|
||||||
case "group":
|
|
||||||
groupKey = host.host_groups?.name || "Ungrouped";
|
|
||||||
break;
|
|
||||||
case "status":
|
case "status":
|
||||||
groupKey =
|
groupKey =
|
||||||
(host.effectiveStatus || host.status).charAt(0).toUpperCase() +
|
(host.effectiveStatus || host.status).charAt(0).toUpperCase() +
|
||||||
@@ -842,6 +849,7 @@ const Hosts = () => {
|
|||||||
groups[groupKey] = [];
|
groups[groupKey] = [];
|
||||||
}
|
}
|
||||||
groups[groupKey].push(host);
|
groups[groupKey].push(host);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return groups;
|
return groups;
|
||||||
@@ -1428,14 +1436,6 @@ const Hosts = () => {
|
|||||||
<AlertTriangle className="h-4 w-4" />
|
<AlertTriangle className="h-4 w-4" />
|
||||||
Hide Stale
|
Hide Stale
|
||||||
</button>
|
</button>
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => setShowAddModal(true)}
|
|
||||||
className="btn-primary flex items-center gap-2"
|
|
||||||
>
|
|
||||||
<Plus className="h-4 w-4" />
|
|
||||||
Add Host
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -1,18 +1,25 @@
|
|||||||
import {
|
import {
|
||||||
AlertCircle,
|
AlertCircle,
|
||||||
ArrowLeft,
|
ArrowLeft,
|
||||||
|
BookOpen,
|
||||||
Eye,
|
Eye,
|
||||||
EyeOff,
|
EyeOff,
|
||||||
|
Github,
|
||||||
|
Globe,
|
||||||
Lock,
|
Lock,
|
||||||
Mail,
|
Mail,
|
||||||
Smartphone,
|
Route,
|
||||||
|
Star,
|
||||||
User,
|
User,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
|
|
||||||
import { useEffect, useId, useState } from "react";
|
import { useEffect, useId, useRef, useState } from "react";
|
||||||
|
import { FaReddit, FaYoutube } from "react-icons/fa";
|
||||||
|
|
||||||
import { useNavigate } from "react-router-dom";
|
import { useNavigate } from "react-router-dom";
|
||||||
|
import DiscordIcon from "../components/DiscordIcon";
|
||||||
import { useAuth } from "../contexts/AuthContext";
|
import { useAuth } from "../contexts/AuthContext";
|
||||||
|
import { useColorTheme } from "../contexts/ColorThemeContext";
|
||||||
import { authAPI, isCorsError } from "../utils/api";
|
import { authAPI, isCorsError } from "../utils/api";
|
||||||
|
|
||||||
const Login = () => {
|
const Login = () => {
|
||||||
@@ -42,9 +49,108 @@ const Login = () => {
|
|||||||
const [requiresTfa, setRequiresTfa] = useState(false);
|
const [requiresTfa, setRequiresTfa] = useState(false);
|
||||||
const [tfaUsername, setTfaUsername] = useState("");
|
const [tfaUsername, setTfaUsername] = useState("");
|
||||||
const [signupEnabled, setSignupEnabled] = useState(false);
|
const [signupEnabled, setSignupEnabled] = useState(false);
|
||||||
|
const [latestRelease, setLatestRelease] = useState(null);
|
||||||
|
const [githubStars, setGithubStars] = useState(null);
|
||||||
|
const canvasRef = useRef(null);
|
||||||
|
const { themeConfig } = useColorTheme();
|
||||||
|
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
// Generate clean radial gradient background with subtle triangular accents
|
||||||
|
useEffect(() => {
|
||||||
|
const generateBackground = () => {
|
||||||
|
if (!canvasRef.current || !themeConfig?.login) return;
|
||||||
|
|
||||||
|
const canvas = canvasRef.current;
|
||||||
|
canvas.width = canvas.offsetWidth;
|
||||||
|
canvas.height = canvas.offsetHeight;
|
||||||
|
const ctx = canvas.getContext("2d");
|
||||||
|
|
||||||
|
// Get theme colors - pick first color from each palette
|
||||||
|
const xColors = themeConfig.login.xColors || [
|
||||||
|
"#667eea",
|
||||||
|
"#764ba2",
|
||||||
|
"#f093fb",
|
||||||
|
"#4facfe",
|
||||||
|
];
|
||||||
|
const yColors = themeConfig.login.yColors || [
|
||||||
|
"#667eea",
|
||||||
|
"#764ba2",
|
||||||
|
"#f093fb",
|
||||||
|
"#4facfe",
|
||||||
|
];
|
||||||
|
|
||||||
|
// Use date for daily color rotation
|
||||||
|
const today = new Date();
|
||||||
|
const seed =
|
||||||
|
today.getFullYear() * 10000 + today.getMonth() * 100 + today.getDate();
|
||||||
|
const random = (s) => {
|
||||||
|
const x = Math.sin(s) * 10000;
|
||||||
|
return x - Math.floor(x);
|
||||||
|
};
|
||||||
|
|
||||||
|
const color1 = xColors[Math.floor(random(seed) * xColors.length)];
|
||||||
|
const color2 = yColors[Math.floor(random(seed + 1000) * yColors.length)];
|
||||||
|
|
||||||
|
// Create clean radial gradient from center to bottom-right corner
|
||||||
|
const gradient = ctx.createRadialGradient(
|
||||||
|
canvas.width * 0.3, // Center slightly left
|
||||||
|
canvas.height * 0.3, // Center slightly up
|
||||||
|
0,
|
||||||
|
canvas.width * 0.5, // Expand to cover screen
|
||||||
|
canvas.height * 0.5,
|
||||||
|
Math.max(canvas.width, canvas.height) * 1.2,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Subtle gradient with darker corners
|
||||||
|
gradient.addColorStop(0, color1);
|
||||||
|
gradient.addColorStop(0.6, color2);
|
||||||
|
gradient.addColorStop(1, "#0a0a0a"); // Very dark edges
|
||||||
|
|
||||||
|
ctx.fillStyle = gradient;
|
||||||
|
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
||||||
|
|
||||||
|
// Add subtle triangular shapes as accents across entire background
|
||||||
|
const cellSize = 180;
|
||||||
|
const cols = Math.ceil(canvas.width / cellSize) + 1;
|
||||||
|
const rows = Math.ceil(canvas.height / cellSize) + 1;
|
||||||
|
|
||||||
|
for (let y = 0; y < rows; y++) {
|
||||||
|
for (let x = 0; x < cols; x++) {
|
||||||
|
const idx = y * cols + x;
|
||||||
|
// Draw more triangles (less sparse)
|
||||||
|
if (random(seed + idx + 5000) > 0.4) {
|
||||||
|
const baseX =
|
||||||
|
x * cellSize + random(seed + idx * 3) * cellSize * 0.8;
|
||||||
|
const baseY =
|
||||||
|
y * cellSize + random(seed + idx * 3 + 100) * cellSize * 0.8;
|
||||||
|
const size = 50 + random(seed + idx * 4) * 100;
|
||||||
|
|
||||||
|
ctx.beginPath();
|
||||||
|
ctx.moveTo(baseX, baseY);
|
||||||
|
ctx.lineTo(baseX + size, baseY);
|
||||||
|
ctx.lineTo(baseX + size / 2, baseY - size * 0.866);
|
||||||
|
ctx.closePath();
|
||||||
|
|
||||||
|
// More visible white with slightly higher opacity
|
||||||
|
ctx.fillStyle = `rgba(255, 255, 255, ${0.05 + random(seed + idx * 5) * 0.08})`;
|
||||||
|
ctx.fill();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
generateBackground();
|
||||||
|
|
||||||
|
// Regenerate on window resize
|
||||||
|
const handleResize = () => {
|
||||||
|
generateBackground();
|
||||||
|
};
|
||||||
|
|
||||||
|
window.addEventListener("resize", handleResize);
|
||||||
|
return () => window.removeEventListener("resize", handleResize);
|
||||||
|
}, [themeConfig]);
|
||||||
|
|
||||||
// Check if signup is enabled
|
// Check if signup is enabled
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const checkSignupEnabled = async () => {
|
const checkSignupEnabled = async () => {
|
||||||
@@ -63,6 +169,100 @@ const Login = () => {
|
|||||||
checkSignupEnabled();
|
checkSignupEnabled();
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
// Fetch latest release and stars from GitHub
|
||||||
|
useEffect(() => {
|
||||||
|
const fetchGitHubData = async () => {
|
||||||
|
try {
|
||||||
|
// Try to get cached data first
|
||||||
|
const cachedRelease = localStorage.getItem("githubLatestRelease");
|
||||||
|
const cachedStars = localStorage.getItem("githubStarsCount");
|
||||||
|
const cacheTime = localStorage.getItem("githubReleaseCacheTime");
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
// Load cached data immediately
|
||||||
|
if (cachedRelease) {
|
||||||
|
setLatestRelease(JSON.parse(cachedRelease));
|
||||||
|
}
|
||||||
|
if (cachedStars) {
|
||||||
|
setGithubStars(parseInt(cachedStars, 10));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use cache if less than 1 hour old
|
||||||
|
if (cacheTime && now - parseInt(cacheTime, 10) < 3600000) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch repository info (includes star count)
|
||||||
|
const repoResponse = await fetch(
|
||||||
|
"https://api.github.com/repos/PatchMon/PatchMon",
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if (repoResponse.ok) {
|
||||||
|
const repoData = await repoResponse.json();
|
||||||
|
setGithubStars(repoData.stargazers_count);
|
||||||
|
localStorage.setItem(
|
||||||
|
"githubStarsCount",
|
||||||
|
repoData.stargazers_count.toString(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch latest release
|
||||||
|
const releaseResponse = await fetch(
|
||||||
|
"https://api.github.com/repos/PatchMon/PatchMon/releases/latest",
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if (releaseResponse.ok) {
|
||||||
|
const data = await releaseResponse.json();
|
||||||
|
const releaseInfo = {
|
||||||
|
version: data.tag_name,
|
||||||
|
name: data.name,
|
||||||
|
publishedAt: new Date(data.published_at).toLocaleDateString(
|
||||||
|
"en-US",
|
||||||
|
{
|
||||||
|
year: "numeric",
|
||||||
|
month: "long",
|
||||||
|
day: "numeric",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
body: data.body?.split("\n").slice(0, 3).join("\n") || "", // First 3 lines
|
||||||
|
};
|
||||||
|
|
||||||
|
setLatestRelease(releaseInfo);
|
||||||
|
localStorage.setItem(
|
||||||
|
"githubLatestRelease",
|
||||||
|
JSON.stringify(releaseInfo),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
localStorage.setItem("githubReleaseCacheTime", now.toString());
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to fetch GitHub data:", error);
|
||||||
|
// Set fallback data if nothing cached
|
||||||
|
const cachedRelease = localStorage.getItem("githubLatestRelease");
|
||||||
|
if (!cachedRelease) {
|
||||||
|
setLatestRelease({
|
||||||
|
version: "v1.3.0",
|
||||||
|
name: "Latest Release",
|
||||||
|
publishedAt: "Recently",
|
||||||
|
body: "Monitor and manage your Linux package updates",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchGitHubData();
|
||||||
|
}, []); // Run once on mount
|
||||||
|
|
||||||
const handleSubmit = async (e) => {
|
const handleSubmit = async (e) => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
@@ -208,7 +408,12 @@ const Login = () => {
|
|||||||
setTfaData({
|
setTfaData({
|
||||||
...tfaData,
|
...tfaData,
|
||||||
[name]:
|
[name]:
|
||||||
type === "checkbox" ? checked : value.replace(/\D/g, "").slice(0, 6),
|
type === "checkbox"
|
||||||
|
? checked
|
||||||
|
: value
|
||||||
|
.toUpperCase()
|
||||||
|
.replace(/[^A-Z0-9]/g, "")
|
||||||
|
.slice(0, 6),
|
||||||
});
|
});
|
||||||
// Clear error when user starts typing
|
// Clear error when user starts typing
|
||||||
if (error) {
|
if (error) {
|
||||||
@@ -239,16 +444,227 @@ const Login = () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="min-h-screen flex items-center justify-center bg-secondary-50 py-12 px-4 sm:px-6 lg:px-8">
|
<div className="min-h-screen relative flex">
|
||||||
<div className="max-w-md w-full space-y-8">
|
{/* Full-screen Trianglify Background */}
|
||||||
|
<canvas ref={canvasRef} className="absolute inset-0 w-full h-full" />
|
||||||
|
<div className="absolute inset-0 bg-gradient-to-br from-black/40 to-black/60" />
|
||||||
|
|
||||||
|
{/* Left side - Info Panel (hidden on mobile) */}
|
||||||
|
<div className="hidden lg:flex lg:w-1/2 xl:w-3/5 relative z-10">
|
||||||
|
<div className="flex flex-col justify-between text-white p-12 h-full w-full">
|
||||||
|
<div className="flex-1 flex flex-col justify-center items-start max-w-xl mx-auto">
|
||||||
|
<div className="space-y-6">
|
||||||
<div>
|
<div>
|
||||||
<div className="mx-auto h-12 w-12 flex items-center justify-center rounded-full bg-primary-100">
|
<img
|
||||||
<Lock size={24} color="#2563eb" strokeWidth={2} />
|
src="/assets/logo_dark.png"
|
||||||
|
alt="PatchMon"
|
||||||
|
className="h-16 mb-4"
|
||||||
|
/>
|
||||||
|
<p className="text-sm text-blue-200 font-medium tracking-wide uppercase">
|
||||||
|
Linux Patch Monitoring
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<h2 className="mt-6 text-center text-3xl font-extrabold text-secondary-900">
|
|
||||||
|
{latestRelease ? (
|
||||||
|
<div className="space-y-4 bg-black/20 backdrop-blur-sm rounded-lg p-6 border border-white/10">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className="w-2 h-2 bg-green-400 rounded-full animate-pulse" />
|
||||||
|
<span className="text-green-300 text-sm font-semibold">
|
||||||
|
Latest Release
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<span className="text-2xl font-bold text-white">
|
||||||
|
{latestRelease.version}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{latestRelease.name && (
|
||||||
|
<h3 className="text-lg font-semibold text-white">
|
||||||
|
{latestRelease.name}
|
||||||
|
</h3>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="flex items-center gap-2 text-sm text-gray-300">
|
||||||
|
<svg
|
||||||
|
className="w-4 h-4"
|
||||||
|
fill="none"
|
||||||
|
stroke="currentColor"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
aria-label="Release date"
|
||||||
|
>
|
||||||
|
<title>Release date</title>
|
||||||
|
<path
|
||||||
|
strokeLinecap="round"
|
||||||
|
strokeLinejoin="round"
|
||||||
|
strokeWidth={2}
|
||||||
|
d="M8 7V3m8 4V3m-9 8h10M5 21h14a2 2 0 002-2V7a2 2 0 00-2-2H5a2 2 0 00-2 2v12a2 2 0 002 2z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
<span>Released {latestRelease.publishedAt}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{latestRelease.body && (
|
||||||
|
<p className="text-sm text-gray-300 leading-relaxed line-clamp-3">
|
||||||
|
{latestRelease.body}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<a
|
||||||
|
href="https://github.com/PatchMon/PatchMon/releases/latest"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="inline-flex items-center gap-2 text-sm text-blue-300 hover:text-blue-200 transition-colors font-medium"
|
||||||
|
>
|
||||||
|
View Release Notes
|
||||||
|
<svg
|
||||||
|
className="w-4 h-4"
|
||||||
|
fill="none"
|
||||||
|
stroke="currentColor"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
aria-label="External link"
|
||||||
|
>
|
||||||
|
<title>External link</title>
|
||||||
|
<path
|
||||||
|
strokeLinecap="round"
|
||||||
|
strokeLinejoin="round"
|
||||||
|
strokeWidth={2}
|
||||||
|
d="M10 6H6a2 2 0 00-2 2v10a2 2 0 002 2h10a2 2 0 002-2v-4M14 4h6m0 0v6m0-6L10 14"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-4 bg-black/20 backdrop-blur-sm rounded-lg p-6 border border-white/10">
|
||||||
|
<div className="animate-pulse space-y-3">
|
||||||
|
<div className="h-6 bg-white/20 rounded w-3/4" />
|
||||||
|
<div className="h-4 bg-white/20 rounded w-1/2" />
|
||||||
|
<div className="h-4 bg-white/20 rounded w-full" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Social Links Footer */}
|
||||||
|
<div className="max-w-xl mx-auto w-full">
|
||||||
|
<div className="border-t border-white/10 pt-6">
|
||||||
|
<p className="text-sm text-gray-400 mb-4">Connect with us</p>
|
||||||
|
<div className="flex flex-wrap items-center gap-2">
|
||||||
|
{/* GitHub */}
|
||||||
|
<a
|
||||||
|
href="https://github.com/PatchMon/PatchMon"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="flex items-center justify-center gap-1.5 px-3 h-10 bg-white/10 hover:bg-white/20 backdrop-blur-sm rounded-lg transition-colors border border-white/10"
|
||||||
|
title="GitHub Repository"
|
||||||
|
>
|
||||||
|
<Github className="h-5 w-5 text-white" />
|
||||||
|
{githubStars !== null && (
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<Star className="h-3.5 w-3.5 fill-current text-yellow-400" />
|
||||||
|
<span className="text-sm font-medium text-white">
|
||||||
|
{githubStars}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</a>
|
||||||
|
|
||||||
|
{/* Roadmap */}
|
||||||
|
<a
|
||||||
|
href="https://github.com/orgs/PatchMon/projects/2/views/1"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="flex items-center justify-center w-10 h-10 bg-white/10 hover:bg-white/20 backdrop-blur-sm rounded-lg transition-colors border border-white/10"
|
||||||
|
title="Roadmap"
|
||||||
|
>
|
||||||
|
<Route className="h-5 w-5 text-white" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
{/* Docs */}
|
||||||
|
<a
|
||||||
|
href="https://docs.patchmon.net"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="flex items-center justify-center w-10 h-10 bg-white/10 hover:bg-white/20 backdrop-blur-sm rounded-lg transition-colors border border-white/10"
|
||||||
|
title="Documentation"
|
||||||
|
>
|
||||||
|
<BookOpen className="h-5 w-5 text-white" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
{/* Discord */}
|
||||||
|
<a
|
||||||
|
href="https://patchmon.net/discord"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="flex items-center justify-center w-10 h-10 bg-white/10 hover:bg-white/20 backdrop-blur-sm rounded-lg transition-colors border border-white/10"
|
||||||
|
title="Discord Community"
|
||||||
|
>
|
||||||
|
<DiscordIcon className="h-5 w-5 text-white" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
{/* Email */}
|
||||||
|
<a
|
||||||
|
href="mailto:support@patchmon.net"
|
||||||
|
className="flex items-center justify-center w-10 h-10 bg-white/10 hover:bg-white/20 backdrop-blur-sm rounded-lg transition-colors border border-white/10"
|
||||||
|
title="Email Support"
|
||||||
|
>
|
||||||
|
<Mail className="h-5 w-5 text-white" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
{/* YouTube */}
|
||||||
|
<a
|
||||||
|
href="https://youtube.com/@patchmonTV"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="flex items-center justify-center w-10 h-10 bg-white/10 hover:bg-white/20 backdrop-blur-sm rounded-lg transition-colors border border-white/10"
|
||||||
|
title="YouTube Channel"
|
||||||
|
>
|
||||||
|
<FaYoutube className="h-5 w-5 text-white" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
{/* Reddit */}
|
||||||
|
<a
|
||||||
|
href="https://www.reddit.com/r/patchmon"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="flex items-center justify-center w-10 h-10 bg-white/10 hover:bg-white/20 backdrop-blur-sm rounded-lg transition-colors border border-white/10"
|
||||||
|
title="Reddit Community"
|
||||||
|
>
|
||||||
|
<FaReddit className="h-5 w-5 text-white" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
{/* Website */}
|
||||||
|
<a
|
||||||
|
href="https://patchmon.net"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="flex items-center justify-center w-10 h-10 bg-white/10 hover:bg-white/20 backdrop-blur-sm rounded-lg transition-colors border border-white/10"
|
||||||
|
title="Visit patchmon.net"
|
||||||
|
>
|
||||||
|
<Globe className="h-5 w-5 text-white" />
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Right side - Login Form */}
|
||||||
|
<div className="flex-1 flex items-center justify-center py-12 px-4 sm:px-6 lg:px-8 relative z-10">
|
||||||
|
<div className="max-w-md w-full space-y-8 bg-white dark:bg-secondary-900 rounded-2xl shadow-2xl p-8 lg:p-10">
|
||||||
|
<div>
|
||||||
|
<div className="mx-auto h-16 w-16 flex items-center justify-center">
|
||||||
|
<img
|
||||||
|
src="/assets/favicon.svg"
|
||||||
|
alt="PatchMon Logo"
|
||||||
|
className="h-16 w-16"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<h2 className="mt-6 text-center text-3xl font-extrabold text-secondary-900 dark:text-secondary-100">
|
||||||
{isSignupMode ? "Create PatchMon Account" : "Sign in to PatchMon"}
|
{isSignupMode ? "Create PatchMon Account" : "Sign in to PatchMon"}
|
||||||
</h2>
|
</h2>
|
||||||
<p className="mt-2 text-center text-sm text-secondary-600">
|
<p className="mt-2 text-center text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
Monitor and manage your Linux package updates
|
Monitor and manage your Linux package updates
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
@@ -262,7 +678,7 @@ const Login = () => {
|
|||||||
<div>
|
<div>
|
||||||
<label
|
<label
|
||||||
htmlFor={usernameId}
|
htmlFor={usernameId}
|
||||||
className="block text-sm font-medium text-secondary-700"
|
className="block text-sm font-medium text-secondary-900 dark:text-secondary-100"
|
||||||
>
|
>
|
||||||
{isSignupMode ? "Username" : "Username or Email"}
|
{isSignupMode ? "Username" : "Username or Email"}
|
||||||
</label>
|
</label>
|
||||||
@@ -293,7 +709,7 @@ const Login = () => {
|
|||||||
<div>
|
<div>
|
||||||
<label
|
<label
|
||||||
htmlFor={firstNameId}
|
htmlFor={firstNameId}
|
||||||
className="block text-sm font-medium text-secondary-700"
|
className="block text-sm font-medium text-secondary-900 dark:text-secondary-100"
|
||||||
>
|
>
|
||||||
First Name
|
First Name
|
||||||
</label>
|
</label>
|
||||||
@@ -316,7 +732,7 @@ const Login = () => {
|
|||||||
<div>
|
<div>
|
||||||
<label
|
<label
|
||||||
htmlFor={lastNameId}
|
htmlFor={lastNameId}
|
||||||
className="block text-sm font-medium text-secondary-700"
|
className="block text-sm font-medium text-secondary-900 dark:text-secondary-100"
|
||||||
>
|
>
|
||||||
Last Name
|
Last Name
|
||||||
</label>
|
</label>
|
||||||
@@ -340,7 +756,7 @@ const Login = () => {
|
|||||||
<div>
|
<div>
|
||||||
<label
|
<label
|
||||||
htmlFor={emailId}
|
htmlFor={emailId}
|
||||||
className="block text-sm font-medium text-secondary-700"
|
className="block text-sm font-medium text-secondary-900 dark:text-secondary-100"
|
||||||
>
|
>
|
||||||
Email
|
Email
|
||||||
</label>
|
</label>
|
||||||
@@ -366,7 +782,7 @@ const Login = () => {
|
|||||||
<div>
|
<div>
|
||||||
<label
|
<label
|
||||||
htmlFor={passwordId}
|
htmlFor={passwordId}
|
||||||
className="block text-sm font-medium text-secondary-700"
|
className="block text-sm font-medium text-secondary-900 dark:text-secondary-100"
|
||||||
>
|
>
|
||||||
Password
|
Password
|
||||||
</label>
|
</label>
|
||||||
@@ -433,14 +849,14 @@ const Login = () => {
|
|||||||
|
|
||||||
{signupEnabled && (
|
{signupEnabled && (
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
<p className="text-sm text-secondary-600">
|
<p className="text-sm text-secondary-700 dark:text-secondary-300">
|
||||||
{isSignupMode
|
{isSignupMode
|
||||||
? "Already have an account?"
|
? "Already have an account?"
|
||||||
: "Don't have an account?"}{" "}
|
: "Don't have an account?"}{" "}
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={toggleMode}
|
onClick={toggleMode}
|
||||||
className="font-medium text-primary-600 hover:text-primary-500 focus:outline-none focus:underline"
|
className="font-medium text-primary-600 hover:text-primary-500 dark:text-primary-400 dark:hover:text-primary-300 focus:outline-none focus:underline"
|
||||||
>
|
>
|
||||||
{isSignupMode ? "Sign in" : "Sign up"}
|
{isSignupMode ? "Sign in" : "Sign up"}
|
||||||
</button>
|
</button>
|
||||||
@@ -451,21 +867,26 @@ const Login = () => {
|
|||||||
) : (
|
) : (
|
||||||
<form className="mt-8 space-y-6" onSubmit={handleTfaSubmit}>
|
<form className="mt-8 space-y-6" onSubmit={handleTfaSubmit}>
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
<div className="mx-auto h-12 w-12 flex items-center justify-center rounded-full bg-blue-100">
|
<div className="mx-auto h-16 w-16 flex items-center justify-center">
|
||||||
<Smartphone size={24} color="#2563eb" strokeWidth={2} />
|
<img
|
||||||
|
src="/assets/favicon.svg"
|
||||||
|
alt="PatchMon Logo"
|
||||||
|
className="h-16 w-16"
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
<h3 className="mt-4 text-lg font-medium text-secondary-900">
|
<h3 className="mt-4 text-lg font-medium text-secondary-900 dark:text-secondary-100">
|
||||||
Two-Factor Authentication
|
Two-Factor Authentication
|
||||||
</h3>
|
</h3>
|
||||||
<p className="mt-2 text-sm text-secondary-600">
|
<p className="mt-2 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
Enter the 6-digit code from your authenticator app
|
Enter the code from your authenticator app, or use a backup
|
||||||
|
code
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<label
|
<label
|
||||||
htmlFor={tokenId}
|
htmlFor={tokenId}
|
||||||
className="block text-sm font-medium text-secondary-700"
|
className="block text-sm font-medium text-secondary-900 dark:text-secondary-100"
|
||||||
>
|
>
|
||||||
Verification Code
|
Verification Code
|
||||||
</label>
|
</label>
|
||||||
@@ -477,11 +898,15 @@ const Login = () => {
|
|||||||
required
|
required
|
||||||
value={tfaData.token}
|
value={tfaData.token}
|
||||||
onChange={handleTfaInputChange}
|
onChange={handleTfaInputChange}
|
||||||
className="appearance-none rounded-md relative block w-full px-3 py-2 border border-secondary-300 placeholder-secondary-500 text-secondary-900 focus:outline-none focus:ring-primary-500 focus:border-primary-500 focus:z-10 sm:text-sm text-center text-lg font-mono tracking-widest"
|
className="appearance-none rounded-md relative block w-full px-3 py-2 border border-secondary-300 placeholder-secondary-500 text-secondary-900 focus:outline-none focus:ring-primary-500 focus:border-primary-500 focus:z-10 sm:text-sm text-center text-lg font-mono tracking-widest uppercase"
|
||||||
placeholder="000000"
|
placeholder="Enter code"
|
||||||
maxLength="6"
|
maxLength="6"
|
||||||
|
pattern="[A-Z0-9]{6}"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
<p className="mt-1 text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
|
Enter a 6-digit TOTP code or a 6-character backup code
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
@@ -495,7 +920,7 @@ const Login = () => {
|
|||||||
/>
|
/>
|
||||||
<label
|
<label
|
||||||
htmlFor={rememberMeId}
|
htmlFor={rememberMeId}
|
||||||
className="ml-2 block text-sm text-secondary-700"
|
className="ml-2 block text-sm text-secondary-900 dark:text-secondary-200"
|
||||||
>
|
>
|
||||||
Remember me on this computer (skip TFA for 30 days)
|
Remember me on this computer (skip TFA for 30 days)
|
||||||
</label>
|
</label>
|
||||||
@@ -531,22 +956,21 @@ const Login = () => {
|
|||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={handleBackToLogin}
|
onClick={handleBackToLogin}
|
||||||
className="group relative w-full flex justify-center py-2 px-4 border border-secondary-300 text-sm font-medium rounded-md text-secondary-700 bg-white hover:bg-secondary-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-primary-500 items-center gap-2"
|
className="group relative w-full flex justify-center py-2 px-4 border border-secondary-300 dark:border-secondary-600 text-sm font-medium rounded-md text-secondary-700 dark:text-secondary-200 bg-white dark:bg-secondary-800 hover:bg-secondary-50 dark:hover:bg-secondary-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-primary-500 items-center gap-2"
|
||||||
>
|
>
|
||||||
<ArrowLeft size={16} color="#475569" strokeWidth={2} />
|
<ArrowLeft
|
||||||
|
size={16}
|
||||||
|
className="text-secondary-700 dark:text-secondary-200"
|
||||||
|
strokeWidth={2}
|
||||||
|
/>
|
||||||
Back to Login
|
Back to Login
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="text-center">
|
|
||||||
<p className="text-sm text-secondary-600">
|
|
||||||
Don't have access to your authenticator? Use a backup code.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</form>
|
</form>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -557,9 +557,18 @@ const EditHostGroupModal = ({ group, onClose, onSubmit, isLoading }) => {
|
|||||||
|
|
||||||
// Delete Confirmation Modal
|
// Delete Confirmation Modal
|
||||||
const DeleteHostGroupModal = ({ group, onClose, onConfirm, isLoading }) => {
|
const DeleteHostGroupModal = ({ group, onClose, onConfirm, isLoading }) => {
|
||||||
|
// Fetch hosts for this group
|
||||||
|
const { data: hostsData } = useQuery({
|
||||||
|
queryKey: ["hostGroupHosts", group?.id],
|
||||||
|
queryFn: () => hostGroupsAPI.getHosts(group.id).then((res) => res.data),
|
||||||
|
enabled: !!group && group._count?.hosts > 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hosts = hostsData || [];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md">
|
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md max-h-[90vh] overflow-y-auto">
|
||||||
<div className="flex items-center gap-3 mb-4">
|
<div className="flex items-center gap-3 mb-4">
|
||||||
<div className="w-10 h-10 bg-danger-100 rounded-full flex items-center justify-center">
|
<div className="w-10 h-10 bg-danger-100 rounded-full flex items-center justify-center">
|
||||||
<AlertTriangle className="h-5 w-5 text-danger-600" />
|
<AlertTriangle className="h-5 w-5 text-danger-600" />
|
||||||
@@ -581,12 +590,30 @@ const DeleteHostGroupModal = ({ group, onClose, onConfirm, isLoading }) => {
|
|||||||
</p>
|
</p>
|
||||||
{group._count.hosts > 0 && (
|
{group._count.hosts > 0 && (
|
||||||
<div className="mt-3 p-3 bg-warning-50 border border-warning-200 rounded-md">
|
<div className="mt-3 p-3 bg-warning-50 border border-warning-200 rounded-md">
|
||||||
<p className="text-sm text-warning-800">
|
<p className="text-sm text-warning-800 mb-2">
|
||||||
<strong>Warning:</strong> This group contains{" "}
|
<strong>Warning:</strong> This group contains{" "}
|
||||||
{group._count.hosts} host
|
{group._count.hosts} host
|
||||||
{group._count.hosts !== 1 ? "s" : ""}. You must move or remove
|
{group._count.hosts !== 1 ? "s" : ""}. You must move or remove
|
||||||
these hosts before deleting the group.
|
these hosts before deleting the group.
|
||||||
</p>
|
</p>
|
||||||
|
{hosts.length > 0 && (
|
||||||
|
<div className="mt-2">
|
||||||
|
<p className="text-xs font-medium text-warning-900 mb-1">
|
||||||
|
Hosts in this group:
|
||||||
|
</p>
|
||||||
|
<div className="max-h-32 overflow-y-auto bg-warning-100 rounded p-2">
|
||||||
|
{hosts.map((host) => (
|
||||||
|
<div
|
||||||
|
key={host.id}
|
||||||
|
className="text-xs text-warning-900 flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Server className="h-3 w-3" />
|
||||||
|
{host.friendly_name || host.hostname}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -539,7 +539,7 @@ const Packages = () => {
|
|||||||
<Package className="h-5 w-5 text-primary-600 mr-2" />
|
<Package className="h-5 w-5 text-primary-600 mr-2" />
|
||||||
<div>
|
<div>
|
||||||
<p className="text-sm text-secondary-500 dark:text-white">
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
Total Packages
|
Packages
|
||||||
</p>
|
</p>
|
||||||
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
{totalPackagesCount}
|
{totalPackagesCount}
|
||||||
@@ -553,7 +553,7 @@ const Packages = () => {
|
|||||||
<Package className="h-5 w-5 text-blue-600 mr-2" />
|
<Package className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
<div>
|
<div>
|
||||||
<p className="text-sm text-secondary-500 dark:text-white">
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
Total Installations
|
Installations
|
||||||
</p>
|
</p>
|
||||||
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
{totalInstallationsCount}
|
{totalInstallationsCount}
|
||||||
@@ -562,48 +562,73 @@ const Packages = () => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
setUpdateStatusFilter("needs-updates");
|
||||||
|
setCategoryFilter("all");
|
||||||
|
setHostFilter("all");
|
||||||
|
setSearchTerm("");
|
||||||
|
}}
|
||||||
|
className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 text-left w-full"
|
||||||
|
title="Click to filter packages that need updates"
|
||||||
|
>
|
||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
<Package className="h-5 w-5 text-warning-600 mr-2" />
|
<Package className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
<div>
|
<div>
|
||||||
<p className="text-sm text-secondary-500 dark:text-white">
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
Total Outdated Packages
|
Outdated Packages
|
||||||
</p>
|
</p>
|
||||||
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
{outdatedPackagesCount}
|
{outdatedPackagesCount}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</button>
|
||||||
|
|
||||||
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
|
<button
|
||||||
<div className="flex items-center">
|
type="button"
|
||||||
<Server className="h-5 w-5 text-warning-600 mr-2" />
|
onClick={() => {
|
||||||
<div>
|
setUpdateStatusFilter("security-updates");
|
||||||
<p className="text-sm text-secondary-500 dark:text-white">
|
setCategoryFilter("all");
|
||||||
Hosts Pending Updates
|
setHostFilter("all");
|
||||||
</p>
|
setSearchTerm("");
|
||||||
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
}}
|
||||||
{uniquePackageHostsCount}
|
className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 text-left w-full"
|
||||||
</p>
|
title="Click to filter packages with security updates"
|
||||||
</div>
|
>
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
|
|
||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
<Shield className="h-5 w-5 text-danger-600 mr-2" />
|
<Shield className="h-5 w-5 text-danger-600 mr-2" />
|
||||||
<div>
|
<div>
|
||||||
<p className="text-sm text-secondary-500 dark:text-white">
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
Security Updates Across All Hosts
|
Security Packages
|
||||||
</p>
|
</p>
|
||||||
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
{securityUpdatesCount}
|
{securityUpdatesCount}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
</button>
|
||||||
|
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => navigate("/hosts?filter=needsUpdates")}
|
||||||
|
className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 text-left w-full"
|
||||||
|
title="Click to view hosts that need updates"
|
||||||
|
>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Server className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Outdated Hosts
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{uniquePackageHostsCount}
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
{/* Packages List */}
|
{/* Packages List */}
|
||||||
<div className="card flex-1 flex flex-col overflow-hidden min-h-0">
|
<div className="card flex-1 flex flex-col overflow-hidden min-h-0">
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ import {
|
|||||||
import { useEffect, useId, useState } from "react";
|
import { useEffect, useId, useState } from "react";
|
||||||
|
|
||||||
import { useAuth } from "../contexts/AuthContext";
|
import { useAuth } from "../contexts/AuthContext";
|
||||||
|
import { THEME_PRESETS, useColorTheme } from "../contexts/ColorThemeContext";
|
||||||
import { useTheme } from "../contexts/ThemeContext";
|
import { useTheme } from "../contexts/ThemeContext";
|
||||||
import { isCorsError, tfaAPI } from "../utils/api";
|
import { isCorsError, tfaAPI } from "../utils/api";
|
||||||
|
|
||||||
@@ -38,6 +39,7 @@ const Profile = () => {
|
|||||||
const confirmPasswordId = useId();
|
const confirmPasswordId = useId();
|
||||||
const { user, updateProfile, changePassword } = useAuth();
|
const { user, updateProfile, changePassword } = useAuth();
|
||||||
const { toggleTheme, isDark } = useTheme();
|
const { toggleTheme, isDark } = useTheme();
|
||||||
|
const { colorTheme, setColorTheme } = useColorTheme();
|
||||||
const [activeTab, setActiveTab] = useState("profile");
|
const [activeTab, setActiveTab] = useState("profile");
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
const [message, setMessage] = useState({ type: "", text: "" });
|
const [message, setMessage] = useState({ type: "", text: "" });
|
||||||
@@ -78,8 +80,10 @@ const Profile = () => {
|
|||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
setMessage({ type: "", text: "" });
|
setMessage({ type: "", text: "" });
|
||||||
|
|
||||||
|
console.log("Submitting profile data:", profileData);
|
||||||
try {
|
try {
|
||||||
const result = await updateProfile(profileData);
|
const result = await updateProfile(profileData);
|
||||||
|
console.log("Profile update result:", result);
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
setMessage({ type: "success", text: "Profile updated successfully!" });
|
setMessage({ type: "success", text: "Profile updated successfully!" });
|
||||||
} else {
|
} else {
|
||||||
@@ -411,6 +415,68 @@ const Profile = () => {
|
|||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Color Theme Settings */}
|
||||||
|
<div className="mt-6 pt-6 border-t border-secondary-200 dark:border-secondary-600">
|
||||||
|
<h4 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
|
Color Theme
|
||||||
|
</h4>
|
||||||
|
<p className="text-xs text-secondary-500 dark:text-secondary-400 mb-4">
|
||||||
|
Choose your preferred color scheme for the application
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div className="grid grid-cols-2 md:grid-cols-3 gap-4">
|
||||||
|
{Object.entries(THEME_PRESETS).map(([themeKey, theme]) => {
|
||||||
|
const isSelected = colorTheme === themeKey;
|
||||||
|
const gradientColors = theme.login.xColors;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
key={themeKey}
|
||||||
|
type="button"
|
||||||
|
onClick={() => setColorTheme(themeKey)}
|
||||||
|
className={`relative p-4 rounded-lg border-2 transition-all ${
|
||||||
|
isSelected
|
||||||
|
? "border-primary-500 ring-2 ring-primary-200 dark:ring-primary-800"
|
||||||
|
: "border-secondary-200 dark:border-secondary-600 hover:border-primary-300"
|
||||||
|
} cursor-pointer`}
|
||||||
|
>
|
||||||
|
{/* Theme Preview */}
|
||||||
|
<div
|
||||||
|
className="h-20 rounded-md mb-3 overflow-hidden"
|
||||||
|
style={{
|
||||||
|
background: `linear-gradient(135deg, ${gradientColors.join(", ")})`,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Theme Name */}
|
||||||
|
<div className="text-sm font-medium text-secondary-900 dark:text-white mb-1">
|
||||||
|
{theme.name}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Selected Indicator */}
|
||||||
|
{isSelected && (
|
||||||
|
<div className="absolute top-2 right-2 bg-primary-500 text-white rounded-full p-1">
|
||||||
|
<svg
|
||||||
|
className="w-4 h-4"
|
||||||
|
fill="currentColor"
|
||||||
|
viewBox="0 0 20 20"
|
||||||
|
aria-label="Selected theme"
|
||||||
|
>
|
||||||
|
<title>Selected</title>
|
||||||
|
<path
|
||||||
|
fillRule="evenodd"
|
||||||
|
d="M16.707 5.293a1 1 0 010 1.414l-8 8a1 1 0 01-1.414 0l-4-4a1 1 0 011.414-1.414L8 12.586l7.293-7.293a1 1 0 011.414 0z"
|
||||||
|
clipRule="evenodd"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex justify-end">
|
<div className="flex justify-end">
|
||||||
@@ -564,6 +630,7 @@ const Profile = () => {
|
|||||||
// TFA Tab Component
|
// TFA Tab Component
|
||||||
const TfaTab = () => {
|
const TfaTab = () => {
|
||||||
const verificationTokenId = useId();
|
const verificationTokenId = useId();
|
||||||
|
const disablePasswordId = useId();
|
||||||
const [setupStep, setSetupStep] = useState("status"); // 'status', 'setup', 'verify', 'backup-codes'
|
const [setupStep, setSetupStep] = useState("status"); // 'status', 'setup', 'verify', 'backup-codes'
|
||||||
const [verificationToken, setVerificationToken] = useState("");
|
const [verificationToken, setVerificationToken] = useState("");
|
||||||
const [password, setPassword] = useState("");
|
const [password, setPassword] = useState("");
|
||||||
|
|||||||
483
frontend/src/pages/docker/NetworkDetail.jsx
Normal file
483
frontend/src/pages/docker/NetworkDetail.jsx
Normal file
@@ -0,0 +1,483 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
CheckCircle,
|
||||||
|
Container,
|
||||||
|
Globe,
|
||||||
|
Network,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
Tag,
|
||||||
|
XCircle,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api, { formatRelativeTime } from "../../utils/api";
|
||||||
|
|
||||||
|
const NetworkDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "network", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/networks/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const network = data?.network;
|
||||||
|
const host = data?.host;
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !network) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Network not found
|
||||||
|
</h3>
|
||||||
|
<p className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
|
The network you're looking for doesn't exist or has been
|
||||||
|
removed.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const BooleanBadge = ({ value, trueLabel = "Yes", falseLabel = "No" }) => {
|
||||||
|
return value ? (
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200">
|
||||||
|
<CheckCircle className="h-3 w-3 mr-1" />
|
||||||
|
{trueLabel}
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
<XCircle className="h-3 w-3 mr-1" />
|
||||||
|
{falseLabel}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Network className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{network.name}
|
||||||
|
</h1>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
Network ID: {network.network_id.substring(0, 12)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Network className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Driver
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{network.driver}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Globe className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Scope
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{network.scope}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Containers
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{network.container_count || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<RefreshCw className="h-5 w-5 text-secondary-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Last Checked
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(network.last_checked)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Network Information Card */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Network Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<dl className="grid grid-cols-1 gap-x-4 gap-y-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Network ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono break-all">
|
||||||
|
{network.network_id}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Name
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{network.name}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Driver
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200">
|
||||||
|
{network.driver}
|
||||||
|
</span>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Scope
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-purple-100 text-purple-800 dark:bg-purple-900 dark:text-purple-200">
|
||||||
|
{network.scope}
|
||||||
|
</span>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Containers Attached
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{network.container_count || 0}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
IPv6 Enabled
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1">
|
||||||
|
<BooleanBadge value={network.ipv6_enabled} />
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Internal
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1">
|
||||||
|
<BooleanBadge value={network.internal} />
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Attachable
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1">
|
||||||
|
<BooleanBadge value={network.attachable} />
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Ingress
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1">
|
||||||
|
<BooleanBadge value={network.ingress} />
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Config Only
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1">
|
||||||
|
<BooleanBadge value={network.config_only} />
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{network.created_at && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(network.created_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Last Checked
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(network.last_checked)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* IPAM Configuration */}
|
||||||
|
{network.ipam && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
IPAM Configuration
|
||||||
|
</h3>
|
||||||
|
<p className="mt-1 text-sm text-secondary-500 dark:text-secondary-400">
|
||||||
|
IP Address Management settings
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
{network.ipam.driver && (
|
||||||
|
<div className="mb-4">
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400 mb-1">
|
||||||
|
Driver
|
||||||
|
</dt>
|
||||||
|
<dd>
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200">
|
||||||
|
{network.ipam.driver}
|
||||||
|
</span>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{network.ipam.config && network.ipam.config.length > 0 && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400 mb-3">
|
||||||
|
Subnet Configuration
|
||||||
|
</dt>
|
||||||
|
<div className="space-y-4">
|
||||||
|
{network.ipam.config.map((config, index) => (
|
||||||
|
<div
|
||||||
|
key={config.subnet || `config-${index}`}
|
||||||
|
className="bg-secondary-50 dark:bg-secondary-900/50 rounded-lg p-4"
|
||||||
|
>
|
||||||
|
<dl className="grid grid-cols-1 gap-x-4 gap-y-3 sm:grid-cols-2">
|
||||||
|
{config.subnet && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-xs font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Subnet
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono">
|
||||||
|
{config.subnet}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{config.gateway && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-xs font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Gateway
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono">
|
||||||
|
{config.gateway}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{config.ip_range && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-xs font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
IP Range
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono">
|
||||||
|
{config.ip_range}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{config.aux_addresses &&
|
||||||
|
Object.keys(config.aux_addresses).length > 0 && (
|
||||||
|
<div className="sm:col-span-2">
|
||||||
|
<dt className="text-xs font-medium text-secondary-500 dark:text-secondary-400 mb-2">
|
||||||
|
Auxiliary Addresses
|
||||||
|
</dt>
|
||||||
|
<dd className="space-y-1">
|
||||||
|
{Object.entries(config.aux_addresses).map(
|
||||||
|
([key, value]) => (
|
||||||
|
<div
|
||||||
|
key={key}
|
||||||
|
className="flex items-center text-sm"
|
||||||
|
>
|
||||||
|
<span className="text-secondary-500 dark:text-secondary-400 min-w-[120px]">
|
||||||
|
{key}:
|
||||||
|
</span>
|
||||||
|
<span className="text-secondary-900 dark:text-white font-mono">
|
||||||
|
{value}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
),
|
||||||
|
)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{network.ipam.options &&
|
||||||
|
Object.keys(network.ipam.options).length > 0 && (
|
||||||
|
<div className="mt-4">
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400 mb-2">
|
||||||
|
IPAM Options
|
||||||
|
</dt>
|
||||||
|
<dd className="space-y-1">
|
||||||
|
{Object.entries(network.ipam.options).map(
|
||||||
|
([key, value]) => (
|
||||||
|
<div
|
||||||
|
key={key}
|
||||||
|
className="flex items-start py-2 border-b border-secondary-100 dark:border-secondary-700 last:border-0"
|
||||||
|
>
|
||||||
|
<span className="text-sm font-medium text-secondary-500 dark:text-secondary-400 min-w-[200px]">
|
||||||
|
{key}
|
||||||
|
</span>
|
||||||
|
<span className="text-sm text-secondary-900 dark:text-white break-all">
|
||||||
|
{value}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
),
|
||||||
|
)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Host Information */}
|
||||||
|
{host && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white flex items-center">
|
||||||
|
<Server className="h-5 w-5 mr-2" />
|
||||||
|
Host Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<dl className="grid grid-cols-1 gap-x-4 gap-y-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Hostname
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${host.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{host.hostname}
|
||||||
|
</Link>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Operating System
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.os_name} {host.os_version}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Labels */}
|
||||||
|
{network.labels && Object.keys(network.labels).length > 0 && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white flex items-center">
|
||||||
|
<Tag className="h-5 w-5 mr-2" />
|
||||||
|
Labels
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<div className="space-y-2">
|
||||||
|
{Object.entries(network.labels).map(([key, value]) => (
|
||||||
|
<div
|
||||||
|
key={key}
|
||||||
|
className="flex items-start py-2 border-b border-secondary-100 dark:border-secondary-700 last:border-0"
|
||||||
|
>
|
||||||
|
<span className="text-sm font-medium text-secondary-500 dark:text-secondary-400 min-w-[200px]">
|
||||||
|
{key}
|
||||||
|
</span>
|
||||||
|
<span className="text-sm text-secondary-900 dark:text-white break-all">
|
||||||
|
{value}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default NetworkDetail;
|
||||||
359
frontend/src/pages/docker/VolumeDetail.jsx
Normal file
359
frontend/src/pages/docker/VolumeDetail.jsx
Normal file
@@ -0,0 +1,359 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
Database,
|
||||||
|
HardDrive,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
Tag,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api, { formatRelativeTime } from "../../utils/api";
|
||||||
|
|
||||||
|
const VolumeDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "volume", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/volumes/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const volume = data?.volume;
|
||||||
|
const host = data?.host;
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !volume) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Volume not found
|
||||||
|
</h3>
|
||||||
|
<p className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
|
The volume you're looking for doesn't exist or has been removed.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const formatBytes = (bytes) => {
|
||||||
|
if (bytes === null || bytes === undefined) return "N/A";
|
||||||
|
const sizes = ["Bytes", "KB", "MB", "GB", "TB"];
|
||||||
|
if (bytes === 0) return "0 Bytes";
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||||
|
return `${Math.round((bytes / 1024 ** i) * 100) / 100} ${sizes[i]}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<HardDrive className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{volume.name}
|
||||||
|
</h1>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
Volume ID: {volume.volume_id}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<HardDrive className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Driver
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{volume.driver}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Database className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">Size</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{formatBytes(volume.size_bytes)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Server className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Containers
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{volume.ref_count || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<RefreshCw className="h-5 w-5 text-secondary-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Last Checked
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(volume.last_checked)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Volume Information Card */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Volume Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<dl className="grid grid-cols-1 gap-x-4 gap-y-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Volume ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono">
|
||||||
|
{volume.volume_id}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Name
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{volume.name}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Driver
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200">
|
||||||
|
{volume.driver}
|
||||||
|
</span>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Scope
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-purple-100 text-purple-800 dark:bg-purple-900 dark:text-purple-200">
|
||||||
|
{volume.scope}
|
||||||
|
</span>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Size
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatBytes(volume.size_bytes)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Containers Using
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{volume.ref_count || 0}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{volume.mountpoint && (
|
||||||
|
<div className="sm:col-span-2">
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Mount Point
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono break-all">
|
||||||
|
{volume.mountpoint}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{volume.renderer && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Renderer
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{volume.renderer}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(volume.created_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Last Checked
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(volume.last_checked)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Host Information */}
|
||||||
|
{host && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white flex items-center">
|
||||||
|
<Server className="h-5 w-5 mr-2" />
|
||||||
|
Host Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<dl className="grid grid-cols-1 gap-x-4 gap-y-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Hostname
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${host.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{host.hostname}
|
||||||
|
</Link>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Operating System
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.os_name} {host.os_version}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Labels */}
|
||||||
|
{volume.labels && Object.keys(volume.labels).length > 0 && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white flex items-center">
|
||||||
|
<Tag className="h-5 w-5 mr-2" />
|
||||||
|
Labels
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<div className="space-y-2">
|
||||||
|
{Object.entries(volume.labels).map(([key, value]) => (
|
||||||
|
<div
|
||||||
|
key={key}
|
||||||
|
className="flex items-start py-2 border-b border-secondary-100 dark:border-secondary-700 last:border-0"
|
||||||
|
>
|
||||||
|
<span className="text-sm font-medium text-secondary-500 dark:text-secondary-400 min-w-[200px]">
|
||||||
|
{key}
|
||||||
|
</span>
|
||||||
|
<span className="text-sm text-secondary-900 dark:text-white break-all">
|
||||||
|
{value}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Options */}
|
||||||
|
{volume.options && Object.keys(volume.options).length > 0 && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Volume Options
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<div className="space-y-2">
|
||||||
|
{Object.entries(volume.options).map(([key, value]) => (
|
||||||
|
<div
|
||||||
|
key={key}
|
||||||
|
className="flex items-start py-2 border-b border-secondary-100 dark:border-secondary-700 last:border-0"
|
||||||
|
>
|
||||||
|
<span className="text-sm font-medium text-secondary-500 dark:text-secondary-400 min-w-[200px]">
|
||||||
|
{key}
|
||||||
|
</span>
|
||||||
|
<span className="text-sm text-secondary-900 dark:text-white break-all">
|
||||||
|
{value}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default VolumeDetail;
|
||||||
@@ -746,239 +746,126 @@ const Integrations = () => {
|
|||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<h3 className="text-lg font-semibold text-secondary-900 dark:text-white">
|
<h3 className="text-lg font-semibold text-secondary-900 dark:text-white">
|
||||||
Docker Container Monitoring
|
Docker Inventory Collection
|
||||||
</h3>
|
</h3>
|
||||||
<p className="text-sm text-secondary-600 dark:text-secondary-400">
|
<p className="text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
Monitor Docker containers and images for available updates
|
Docker monitoring is now built into the PatchMon Go agent
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Installation Instructions */}
|
{/* Info Message */}
|
||||||
<div className="bg-primary-50 dark:bg-primary-900/20 border border-primary-200 dark:border-primary-800 rounded-lg p-6">
|
<div className="bg-primary-50 dark:bg-primary-900/20 border border-primary-200 dark:border-primary-800 rounded-lg p-6">
|
||||||
<h3 className="text-lg font-semibold text-primary-900 dark:text-primary-200 mb-4">
|
<div className="flex items-start gap-3">
|
||||||
Agent Installation
|
<CheckCircle className="h-5 w-5 text-primary-600 dark:text-primary-400 flex-shrink-0 mt-0.5" />
|
||||||
</h3>
|
<div>
|
||||||
<ol className="list-decimal list-inside space-y-3 text-sm text-primary-800 dark:text-primary-300">
|
<h4 className="text-md font-semibold text-primary-900 dark:text-primary-200 mb-2">
|
||||||
|
Automatic Docker Discovery
|
||||||
|
</h4>
|
||||||
|
<p className="text-sm text-primary-800 dark:text-primary-300 mb-3">
|
||||||
|
The PatchMon Go agent automatically discovers Docker
|
||||||
|
when it's available on your host and collects
|
||||||
|
comprehensive inventory information:
|
||||||
|
</p>
|
||||||
|
<ul className="list-disc list-inside space-y-2 text-sm text-primary-800 dark:text-primary-300 ml-2">
|
||||||
<li>
|
<li>
|
||||||
Make sure you have the PatchMon credentials file set up on
|
<strong>Containers</strong> - Running and stopped
|
||||||
your host (
|
containers with status, images, ports, and labels
|
||||||
<code className="bg-primary-100 dark:bg-primary-900/40 px-1 py-0.5 rounded text-xs">
|
|
||||||
/etc/patchmon/credentials
|
|
||||||
</code>
|
|
||||||
)
|
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
SSH into your Docker host where you want to monitor
|
<strong>Images</strong> - All Docker images with
|
||||||
containers
|
repository, tags, sizes, and sources
|
||||||
</li>
|
</li>
|
||||||
<li>Run the installation command below</li>
|
|
||||||
<li>
|
<li>
|
||||||
The agent will automatically collect Docker container and
|
<strong>Volumes</strong> - Named and anonymous volumes
|
||||||
image information every 5 minutes
|
with drivers, mountpoints, and usage
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<strong>Networks</strong> - Docker networks with
|
||||||
|
drivers, IPAM configuration, and connected containers
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<strong>Real-time Updates</strong> - Container status
|
||||||
|
changes are pushed instantly via WebSocket
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* How It Works */}
|
||||||
|
<div className="bg-white dark:bg-secondary-900 border border-secondary-200 dark:border-secondary-600 rounded-lg p-6">
|
||||||
|
<h4 className="text-md font-semibold text-secondary-900 dark:text-white mb-4">
|
||||||
|
How It Works
|
||||||
|
</h4>
|
||||||
|
<ol className="list-decimal list-inside space-y-3 text-sm text-secondary-700 dark:text-secondary-300">
|
||||||
|
<li>
|
||||||
|
Install the PatchMon Go agent on your host (see the Hosts
|
||||||
|
page for installation instructions)
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
The agent automatically detects if Docker is installed and
|
||||||
|
running on the host
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
During each collection cycle, the agent gathers Docker
|
||||||
|
inventory data and sends it to the PatchMon server
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
View your complete Docker inventory (containers, images,
|
||||||
|
volumes, networks) in the{" "}
|
||||||
|
<a
|
||||||
|
href="/docker"
|
||||||
|
className="text-primary-600 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-300 underline"
|
||||||
|
>
|
||||||
|
Docker page
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
Container status changes are pushed to the server in
|
||||||
|
real-time via WebSocket connection
|
||||||
</li>
|
</li>
|
||||||
<li>View your Docker inventory in the Docker page</li>
|
|
||||||
</ol>
|
</ol>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Installation Command */}
|
{/* No Configuration Required */}
|
||||||
<div className="bg-white dark:bg-secondary-900 border border-secondary-200 dark:border-secondary-600 rounded-lg p-6">
|
<div className="bg-green-50 dark:bg-green-900/20 border border-green-200 dark:border-green-800 rounded-lg p-4">
|
||||||
<h4 className="text-md font-semibold text-secondary-900 dark:text-white mb-3">
|
|
||||||
Quick Installation (One-Line Command)
|
|
||||||
</h4>
|
|
||||||
<div className="space-y-3">
|
|
||||||
<div>
|
|
||||||
<div className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2">
|
|
||||||
Download and install the Docker agent:
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
value={`curl -o /usr/local/bin/patchmon-docker-agent.sh "${server_url}/api/v1/docker/agent" && chmod +x /usr/local/bin/patchmon-docker-agent.sh && echo "*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect" | crontab -`}
|
|
||||||
readOnly
|
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-secondary-50 dark:bg-secondary-900 text-secondary-900 dark:text-white font-mono text-xs"
|
|
||||||
/>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() =>
|
|
||||||
copy_to_clipboard(
|
|
||||||
`curl -o /usr/local/bin/patchmon-docker-agent.sh "${server_url}/api/v1/docker/agent" && chmod +x /usr/local/bin/patchmon-docker-agent.sh && echo "*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect" | crontab -`,
|
|
||||||
"docker-install",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
className="btn-primary flex items-center gap-1 px-3 py-2 whitespace-nowrap"
|
|
||||||
>
|
|
||||||
{copy_success["docker-install"] ? (
|
|
||||||
<>
|
|
||||||
<CheckCircle className="h-4 w-4" />
|
|
||||||
Copied
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<Copy className="h-4 w-4" />
|
|
||||||
Copy
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<p className="text-xs text-secondary-500 dark:text-secondary-400 mt-2">
|
|
||||||
💡 This will download the agent, make it executable, and
|
|
||||||
set up a cron job to run every 5 minutes
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Manual Installation Steps */}
|
|
||||||
<div className="bg-white dark:bg-secondary-900 border border-secondary-200 dark:border-secondary-600 rounded-lg p-6">
|
|
||||||
<h4 className="text-md font-semibold text-secondary-900 dark:text-white mb-3">
|
|
||||||
Manual Installation Steps
|
|
||||||
</h4>
|
|
||||||
<div className="space-y-4">
|
|
||||||
<div>
|
|
||||||
<p className="text-sm text-secondary-700 dark:text-secondary-300 mb-2">
|
|
||||||
<strong>Step 1:</strong> Download the agent
|
|
||||||
</p>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
value={`curl -o /usr/local/bin/patchmon-docker-agent.sh "${server_url}/api/v1/docker/agent"`}
|
|
||||||
readOnly
|
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-secondary-50 dark:bg-secondary-900 text-secondary-900 dark:text-white font-mono text-xs"
|
|
||||||
/>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() =>
|
|
||||||
copy_to_clipboard(
|
|
||||||
`curl -o /usr/local/bin/patchmon-docker-agent.sh "${server_url}/api/v1/docker/agent"`,
|
|
||||||
"docker-download",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
className="btn-primary p-2"
|
|
||||||
>
|
|
||||||
{copy_success["docker-download"] ? (
|
|
||||||
<CheckCircle className="h-4 w-4" />
|
|
||||||
) : (
|
|
||||||
<Copy className="h-4 w-4" />
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<p className="text-sm text-secondary-700 dark:text-secondary-300 mb-2">
|
|
||||||
<strong>Step 2:</strong> Make it executable
|
|
||||||
</p>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
value="chmod +x /usr/local/bin/patchmon-docker-agent.sh"
|
|
||||||
readOnly
|
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-secondary-50 dark:bg-secondary-900 text-secondary-900 dark:text-white font-mono text-xs"
|
|
||||||
/>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() =>
|
|
||||||
copy_to_clipboard(
|
|
||||||
"chmod +x /usr/local/bin/patchmon-docker-agent.sh",
|
|
||||||
"docker-chmod",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
className="btn-primary p-2"
|
|
||||||
>
|
|
||||||
{copy_success["docker-chmod"] ? (
|
|
||||||
<CheckCircle className="h-4 w-4" />
|
|
||||||
) : (
|
|
||||||
<Copy className="h-4 w-4" />
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<p className="text-sm text-secondary-700 dark:text-secondary-300 mb-2">
|
|
||||||
<strong>Step 3:</strong> Test the agent
|
|
||||||
</p>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
value="/usr/local/bin/patchmon-docker-agent.sh collect"
|
|
||||||
readOnly
|
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-secondary-50 dark:bg-secondary-900 text-secondary-900 dark:text-white font-mono text-xs"
|
|
||||||
/>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() =>
|
|
||||||
copy_to_clipboard(
|
|
||||||
"/usr/local/bin/patchmon-docker-agent.sh collect",
|
|
||||||
"docker-test",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
className="btn-primary p-2"
|
|
||||||
>
|
|
||||||
{copy_success["docker-test"] ? (
|
|
||||||
<CheckCircle className="h-4 w-4" />
|
|
||||||
) : (
|
|
||||||
<Copy className="h-4 w-4" />
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<p className="text-sm text-secondary-700 dark:text-secondary-300 mb-2">
|
|
||||||
<strong>Step 4:</strong> Set up automatic collection
|
|
||||||
(every 5 minutes)
|
|
||||||
</p>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
value='echo "*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect" | crontab -'
|
|
||||||
readOnly
|
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-secondary-50 dark:bg-secondary-900 text-secondary-900 dark:text-white font-mono text-xs"
|
|
||||||
/>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() =>
|
|
||||||
copy_to_clipboard(
|
|
||||||
'echo "*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect" | crontab -',
|
|
||||||
"docker-cron",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
className="btn-primary p-2"
|
|
||||||
>
|
|
||||||
{copy_success["docker-cron"] ? (
|
|
||||||
<CheckCircle className="h-4 w-4" />
|
|
||||||
) : (
|
|
||||||
<Copy className="h-4 w-4" />
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Prerequisites */}
|
|
||||||
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4">
|
|
||||||
<div className="flex items-start gap-2">
|
<div className="flex items-start gap-2">
|
||||||
<AlertCircle className="h-5 w-5 text-yellow-600 dark:text-yellow-400 flex-shrink-0 mt-0.5" />
|
<CheckCircle className="h-5 w-5 text-green-600 dark:text-green-400 flex-shrink-0 mt-0.5" />
|
||||||
<div className="text-sm text-yellow-800 dark:text-yellow-200">
|
<div className="text-sm text-green-800 dark:text-green-200">
|
||||||
<p className="font-semibold mb-2">Prerequisites:</p>
|
<p className="font-semibold mb-1">
|
||||||
|
No Additional Configuration Required
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
Once the Go agent is installed and Docker is running on
|
||||||
|
your host, Docker inventory collection happens
|
||||||
|
automatically. No separate Docker agent or cron jobs
|
||||||
|
needed.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Requirements */}
|
||||||
|
<div className="bg-blue-50 dark:bg-blue-900/20 border border-blue-200 dark:border-blue-800 rounded-lg p-4">
|
||||||
|
<div className="flex items-start gap-2">
|
||||||
|
<AlertCircle className="h-5 w-5 text-blue-600 dark:text-blue-400 flex-shrink-0 mt-0.5" />
|
||||||
|
<div className="text-sm text-blue-800 dark:text-blue-200">
|
||||||
|
<p className="font-semibold mb-2">Requirements:</p>
|
||||||
<ul className="list-disc list-inside space-y-1 ml-2">
|
<ul className="list-disc list-inside space-y-1 ml-2">
|
||||||
|
<li>PatchMon Go agent must be installed and running</li>
|
||||||
|
<li>Docker daemon must be installed and running</li>
|
||||||
<li>
|
<li>
|
||||||
Docker must be installed and running on the host
|
Agent must have access to the Docker socket (
|
||||||
</li>
|
<code className="bg-blue-100 dark:bg-blue-900/40 px-1 py-0.5 rounded text-xs">
|
||||||
<li>
|
/var/run/docker.sock
|
||||||
PatchMon credentials file must exist at{" "}
|
|
||||||
<code className="bg-yellow-100 dark:bg-yellow-900/40 px-1 py-0.5 rounded text-xs">
|
|
||||||
/etc/patchmon/credentials
|
|
||||||
</code>
|
</code>
|
||||||
|
)
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
The host must have network access to your PatchMon
|
Typically requires running the agent as root or with
|
||||||
server
|
Docker group permissions
|
||||||
</li>
|
</li>
|
||||||
<li>The agent must run as root (or with sudo)</li>
|
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -215,8 +215,8 @@ const SettingsHostGroups = () => {
|
|||||||
title={`View hosts in ${group.name}`}
|
title={`View hosts in ${group.name}`}
|
||||||
>
|
>
|
||||||
<Server className="h-4 w-4 mr-2" />
|
<Server className="h-4 w-4 mr-2" />
|
||||||
{group._count.hosts} host
|
{group._count?.hosts || 0} host
|
||||||
{group._count.hosts !== 1 ? "s" : ""}
|
{group._count?.hosts !== 1 ? "s" : ""}
|
||||||
</button>
|
</button>
|
||||||
</td>
|
</td>
|
||||||
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
@@ -539,9 +539,18 @@ const EditHostGroupModal = ({ group, onClose, onSubmit, isLoading }) => {
|
|||||||
|
|
||||||
// Delete Confirmation Modal
|
// Delete Confirmation Modal
|
||||||
const DeleteHostGroupModal = ({ group, onClose, onConfirm, isLoading }) => {
|
const DeleteHostGroupModal = ({ group, onClose, onConfirm, isLoading }) => {
|
||||||
|
// Fetch hosts for this group
|
||||||
|
const { data: hostsData } = useQuery({
|
||||||
|
queryKey: ["hostGroupHosts", group?.id],
|
||||||
|
queryFn: () => hostGroupsAPI.getHosts(group.id).then((res) => res.data),
|
||||||
|
enabled: !!group && group._count?.hosts > 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hosts = hostsData || [];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md">
|
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md max-h-[90vh] overflow-y-auto">
|
||||||
<div className="flex items-center gap-3 mb-4">
|
<div className="flex items-center gap-3 mb-4">
|
||||||
<div className="w-10 h-10 bg-danger-100 rounded-full flex items-center justify-center">
|
<div className="w-10 h-10 bg-danger-100 rounded-full flex items-center justify-center">
|
||||||
<AlertTriangle className="h-5 w-5 text-danger-600" />
|
<AlertTriangle className="h-5 w-5 text-danger-600" />
|
||||||
@@ -561,14 +570,32 @@ const DeleteHostGroupModal = ({ group, onClose, onConfirm, isLoading }) => {
|
|||||||
Are you sure you want to delete the host group{" "}
|
Are you sure you want to delete the host group{" "}
|
||||||
<span className="font-semibold">"{group.name}"</span>?
|
<span className="font-semibold">"{group.name}"</span>?
|
||||||
</p>
|
</p>
|
||||||
{group._count.hosts > 0 && (
|
{group._count?.hosts > 0 && (
|
||||||
<div className="mt-3 p-3 bg-blue-50 border border-blue-200 rounded-md">
|
<div className="mt-3 p-3 bg-blue-50 border border-blue-200 rounded-md">
|
||||||
<p className="text-sm text-blue-800">
|
<p className="text-sm text-blue-800 mb-2">
|
||||||
<strong>Note:</strong> This group contains {group._count.hosts}{" "}
|
<strong>Note:</strong> This group contains {group._count?.hosts}{" "}
|
||||||
host
|
host
|
||||||
{group._count.hosts !== 1 ? "s" : ""}. These hosts will be moved
|
{group._count?.hosts !== 1 ? "s" : ""}. These hosts will be
|
||||||
to "No group" after deletion.
|
moved to "No group" after deletion.
|
||||||
</p>
|
</p>
|
||||||
|
{hosts.length > 0 && (
|
||||||
|
<div className="mt-2">
|
||||||
|
<p className="text-xs font-medium text-blue-900 mb-1">
|
||||||
|
Hosts in this group:
|
||||||
|
</p>
|
||||||
|
<div className="max-h-32 overflow-y-auto bg-blue-100 rounded p-2">
|
||||||
|
{hosts.map((host) => (
|
||||||
|
<div
|
||||||
|
key={host.id}
|
||||||
|
className="text-xs text-blue-900 flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Server className="h-3 w-3" />
|
||||||
|
{host.friendly_name || host.hostname}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
413
frontend/src/pages/settings/SettingsMetrics.jsx
Normal file
413
frontend/src/pages/settings/SettingsMetrics.jsx
Normal file
@@ -0,0 +1,413 @@
|
|||||||
|
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertCircle,
|
||||||
|
BarChart3,
|
||||||
|
BookOpen,
|
||||||
|
CheckCircle,
|
||||||
|
Eye,
|
||||||
|
EyeOff,
|
||||||
|
Globe,
|
||||||
|
Info,
|
||||||
|
RefreshCw,
|
||||||
|
Send,
|
||||||
|
Shield,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { useState } from "react";
|
||||||
|
import SettingsLayout from "../../components/SettingsLayout";
|
||||||
|
|
||||||
|
// API functions - will be added to utils/api.js
|
||||||
|
const metricsAPI = {
|
||||||
|
getSettings: () =>
|
||||||
|
fetch("/api/v1/metrics", {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${localStorage.getItem("token")}`,
|
||||||
|
},
|
||||||
|
}).then((res) => res.json()),
|
||||||
|
updateSettings: (data) =>
|
||||||
|
fetch("/api/v1/metrics", {
|
||||||
|
method: "PUT",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Authorization: `Bearer ${localStorage.getItem("token")}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(data),
|
||||||
|
}).then((res) => res.json()),
|
||||||
|
regenerateId: () =>
|
||||||
|
fetch("/api/v1/metrics/regenerate-id", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${localStorage.getItem("token")}`,
|
||||||
|
},
|
||||||
|
}).then((res) => res.json()),
|
||||||
|
sendNow: () =>
|
||||||
|
fetch("/api/v1/metrics/send-now", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${localStorage.getItem("token")}`,
|
||||||
|
},
|
||||||
|
}).then((res) => res.json()),
|
||||||
|
};
|
||||||
|
|
||||||
|
const SettingsMetrics = () => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const [showFullId, setShowFullId] = useState(false);
|
||||||
|
|
||||||
|
// Fetch metrics settings
|
||||||
|
const {
|
||||||
|
data: metricsSettings,
|
||||||
|
isLoading,
|
||||||
|
error,
|
||||||
|
} = useQuery({
|
||||||
|
queryKey: ["metrics-settings"],
|
||||||
|
queryFn: () => metricsAPI.getSettings(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Toggle metrics mutation
|
||||||
|
const toggleMetricsMutation = useMutation({
|
||||||
|
mutationFn: (enabled) =>
|
||||||
|
metricsAPI.updateSettings({ metrics_enabled: enabled }),
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries(["metrics-settings"]);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Regenerate ID mutation
|
||||||
|
const regenerateIdMutation = useMutation({
|
||||||
|
mutationFn: () => metricsAPI.regenerateId(),
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries(["metrics-settings"]);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send now mutation
|
||||||
|
const sendNowMutation = useMutation({
|
||||||
|
mutationFn: () => metricsAPI.sendNow(),
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries(["metrics-settings"]);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<SettingsLayout>
|
||||||
|
<div className="flex items-center justify-center h-64">
|
||||||
|
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-primary-600"></div>
|
||||||
|
</div>
|
||||||
|
</SettingsLayout>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return (
|
||||||
|
<SettingsLayout>
|
||||||
|
<div className="bg-red-50 dark:bg-red-900 border border-red-200 dark:border-red-700 rounded-md p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertCircle className="h-5 w-5 text-red-400 dark:text-red-300" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Error loading metrics settings
|
||||||
|
</h3>
|
||||||
|
<p className="mt-1 text-sm text-red-700 dark:text-red-300">
|
||||||
|
{error.message || "Failed to load settings"}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</SettingsLayout>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const maskId = (id) => {
|
||||||
|
if (!id) return "";
|
||||||
|
if (showFullId) return id;
|
||||||
|
return `${id.substring(0, 8)}...${id.substring(id.length - 8)}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SettingsLayout>
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center mb-6">
|
||||||
|
<BarChart3 className="h-6 w-6 text-primary-600 mr-3" />
|
||||||
|
<div>
|
||||||
|
<h2 className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
Anonymous Metrics & Telemetry
|
||||||
|
</h2>
|
||||||
|
<p className="text-sm text-secondary-600 dark:text-secondary-400 mt-1">
|
||||||
|
Help us understand PatchMon's global usage (100% anonymous)
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Privacy Information */}
|
||||||
|
<div className="bg-blue-50 dark:bg-blue-900/30 border border-blue-200 dark:border-blue-700 rounded-lg p-6">
|
||||||
|
<div className="flex">
|
||||||
|
<Shield className="h-6 w-6 text-blue-600 dark:text-blue-400 flex-shrink-0" />
|
||||||
|
<div className="ml-4 flex-1">
|
||||||
|
<h3 className="text-base font-semibold text-blue-900 dark:text-blue-100 mb-3">
|
||||||
|
Your Privacy Matters
|
||||||
|
</h3>
|
||||||
|
<div className="text-sm text-blue-800 dark:text-blue-200 space-y-2">
|
||||||
|
<p className="flex items-start">
|
||||||
|
<CheckCircle className="h-4 w-4 mr-2 mt-0.5 flex-shrink-0" />
|
||||||
|
<span>
|
||||||
|
<strong>We do NOT collect:</strong> IP addresses, hostnames,
|
||||||
|
system details, or any personally identifiable information
|
||||||
|
</span>
|
||||||
|
</p>
|
||||||
|
<p className="flex items-start">
|
||||||
|
<CheckCircle className="h-4 w-4 mr-2 mt-0.5 flex-shrink-0" />
|
||||||
|
<span>
|
||||||
|
<strong>We ONLY collect:</strong> An anonymous UUID (for
|
||||||
|
deduplication) and the number of hosts you're monitoring
|
||||||
|
</span>
|
||||||
|
</p>
|
||||||
|
<p className="flex items-start">
|
||||||
|
<CheckCircle className="h-4 w-4 mr-2 mt-0.5 flex-shrink-0" />
|
||||||
|
<span>
|
||||||
|
<strong>Purpose:</strong> Display a live counter on our
|
||||||
|
website showing global PatchMon adoption
|
||||||
|
</span>
|
||||||
|
</p>
|
||||||
|
<p className="flex items-start">
|
||||||
|
<Globe className="h-4 w-4 mr-2 mt-0.5 flex-shrink-0" />
|
||||||
|
<span>
|
||||||
|
<strong>Open Source:</strong> All code is public and
|
||||||
|
auditable on GitHub
|
||||||
|
</span>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* More Information Button */}
|
||||||
|
<div className="mt-4 pt-4 border-t border-blue-200 dark:border-blue-700">
|
||||||
|
<a
|
||||||
|
href="https://docs.patchmon.net/books/patchmon-application-documentation/page/metrics-collection-information"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="inline-flex items-center px-4 py-2 text-sm font-medium text-blue-700 dark:text-blue-300 bg-blue-100 dark:bg-blue-900/50 rounded-md hover:bg-blue-200 dark:hover:bg-blue-900/70 transition-colors"
|
||||||
|
>
|
||||||
|
<BookOpen className="h-4 w-4 mr-2" />
|
||||||
|
More Information
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Metrics Toggle */}
|
||||||
|
<div className="bg-white dark:bg-secondary-800 rounded-lg border border-secondary-200 dark:border-secondary-700 p-6">
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="flex-1">
|
||||||
|
<h3 className="text-lg font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
|
Enable Anonymous Metrics
|
||||||
|
</h3>
|
||||||
|
<p className="text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
Share anonymous usage statistics to help us showcase PatchMon's
|
||||||
|
global adoption. Data is sent automatically every 24 hours.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() =>
|
||||||
|
toggleMetricsMutation.mutate(!metricsSettings?.metrics_enabled)
|
||||||
|
}
|
||||||
|
disabled={toggleMetricsMutation.isPending}
|
||||||
|
className={`ml-4 relative inline-flex h-6 w-11 flex-shrink-0 cursor-pointer rounded-full border-2 border-transparent transition-colors duration-200 ease-in-out focus:outline-none focus:ring-2 focus:ring-primary-500 focus:ring-offset-2 ${
|
||||||
|
metricsSettings?.metrics_enabled
|
||||||
|
? "bg-primary-600"
|
||||||
|
: "bg-secondary-200 dark:bg-secondary-700"
|
||||||
|
} ${toggleMetricsMutation.isPending ? "opacity-50" : ""}`}
|
||||||
|
>
|
||||||
|
<span
|
||||||
|
className={`inline-block h-5 w-5 transform rounded-full bg-white shadow ring-0 transition duration-200 ease-in-out ${
|
||||||
|
metricsSettings?.metrics_enabled
|
||||||
|
? "translate-x-5"
|
||||||
|
: "translate-x-0"
|
||||||
|
}`}
|
||||||
|
/>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Status */}
|
||||||
|
<div className="mt-4 pt-4 border-t border-secondary-200 dark:border-secondary-700">
|
||||||
|
<div className="flex items-center text-sm">
|
||||||
|
{metricsSettings?.metrics_enabled ? (
|
||||||
|
<>
|
||||||
|
<CheckCircle className="h-4 w-4 text-green-500 mr-2" />
|
||||||
|
<span className="text-green-700 dark:text-green-400">
|
||||||
|
Metrics enabled - Thank you for supporting PatchMon!
|
||||||
|
</span>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<EyeOff className="h-4 w-4 text-secondary-500 mr-2" />
|
||||||
|
<span className="text-secondary-600 dark:text-secondary-400">
|
||||||
|
Metrics disabled - No data is being sent
|
||||||
|
</span>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Anonymous ID Section */}
|
||||||
|
<div className="bg-white dark:bg-secondary-800 rounded-lg border border-secondary-200 dark:border-secondary-700 p-6">
|
||||||
|
<div className="flex items-start justify-between mb-4">
|
||||||
|
<div>
|
||||||
|
<h3 className="text-lg font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
|
Your Anonymous Instance ID
|
||||||
|
</h3>
|
||||||
|
<p className="text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
This UUID identifies your instance without revealing any
|
||||||
|
personal information
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="mt-4 space-y-4">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<div className="flex-1 bg-secondary-50 dark:bg-secondary-700 rounded-md p-3 font-mono text-sm break-all">
|
||||||
|
{maskId(metricsSettings?.metrics_anonymous_id)}
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => setShowFullId(!showFullId)}
|
||||||
|
className="p-2 text-secondary-600 dark:text-secondary-400 hover:text-secondary-900 dark:hover:text-white"
|
||||||
|
title={showFullId ? "Hide ID" : "Show full ID"}
|
||||||
|
>
|
||||||
|
{showFullId ? (
|
||||||
|
<EyeOff className="h-5 w-5" />
|
||||||
|
) : (
|
||||||
|
<Eye className="h-5 w-5" />
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex gap-3">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => regenerateIdMutation.mutate()}
|
||||||
|
disabled={regenerateIdMutation.isPending}
|
||||||
|
className="inline-flex items-center px-4 py-2 border border-secondary-300 dark:border-secondary-600 text-sm font-medium rounded-md text-secondary-700 dark:text-secondary-200 bg-white dark:bg-secondary-700 hover:bg-secondary-50 dark:hover:bg-secondary-600 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-primary-500 disabled:opacity-50"
|
||||||
|
>
|
||||||
|
{regenerateIdMutation.isPending ? (
|
||||||
|
<>
|
||||||
|
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-secondary-700 dark:border-secondary-200 mr-2"></div>
|
||||||
|
Regenerating...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<RefreshCw className="h-4 w-4 mr-2" />
|
||||||
|
Regenerate ID
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => sendNowMutation.mutate()}
|
||||||
|
disabled={
|
||||||
|
!metricsSettings?.metrics_enabled || sendNowMutation.isPending
|
||||||
|
}
|
||||||
|
className="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md text-white bg-primary-600 hover:bg-primary-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-primary-500 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||||
|
>
|
||||||
|
{sendNowMutation.isPending ? (
|
||||||
|
<>
|
||||||
|
<div className="animate-spin rounded-full h-4 w-4 border-b-2 border-white mr-2"></div>
|
||||||
|
Sending...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<Send className="h-4 w-4 mr-2" />
|
||||||
|
Send Metrics Now
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{metricsSettings?.metrics_last_sent && (
|
||||||
|
<p className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
|
Last sent:{" "}
|
||||||
|
{new Date(metricsSettings.metrics_last_sent).toLocaleString()}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Success/Error Messages */}
|
||||||
|
{regenerateIdMutation.isSuccess && (
|
||||||
|
<div className="mt-4 bg-green-50 dark:bg-green-900/30 border border-green-200 dark:border-green-700 rounded-md p-3">
|
||||||
|
<div className="flex">
|
||||||
|
<CheckCircle className="h-4 w-4 text-green-400 dark:text-green-300 mt-0.5" />
|
||||||
|
<p className="ml-2 text-sm text-green-700 dark:text-green-300">
|
||||||
|
Anonymous ID regenerated successfully
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{sendNowMutation.isSuccess && (
|
||||||
|
<div className="mt-4 bg-green-50 dark:bg-green-900/30 border border-green-200 dark:border-green-700 rounded-md p-3">
|
||||||
|
<div className="flex">
|
||||||
|
<CheckCircle className="h-4 w-4 text-green-400 dark:text-green-300 mt-0.5" />
|
||||||
|
<div className="ml-2 text-sm text-green-700 dark:text-green-300">
|
||||||
|
<p className="font-medium">Metrics sent successfully!</p>
|
||||||
|
{sendNowMutation.data?.data && (
|
||||||
|
<p className="mt-1">
|
||||||
|
Sent: {sendNowMutation.data.data.hostCount} hosts, version{" "}
|
||||||
|
{sendNowMutation.data.data.version}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{sendNowMutation.isError && (
|
||||||
|
<div className="mt-4 bg-red-50 dark:bg-red-900/30 border border-red-200 dark:border-red-700 rounded-md p-3">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertCircle className="h-4 w-4 text-red-400 dark:text-red-300 mt-0.5" />
|
||||||
|
<div className="ml-2 text-sm text-red-700 dark:text-red-300">
|
||||||
|
{sendNowMutation.error?.message || "Failed to send metrics"}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Information Panel */}
|
||||||
|
<div className="bg-secondary-50 dark:bg-secondary-800/50 border border-secondary-200 dark:border-secondary-700 rounded-lg p-6">
|
||||||
|
<div className="flex">
|
||||||
|
<Info className="h-5 w-5 text-secondary-500 dark:text-secondary-400 flex-shrink-0 mt-0.5" />
|
||||||
|
<div className="ml-3 text-sm text-secondary-700 dark:text-secondary-300">
|
||||||
|
<h4 className="font-medium mb-2">How it works:</h4>
|
||||||
|
<ul className="space-y-1 list-disc list-inside">
|
||||||
|
<li>
|
||||||
|
Metrics are sent automatically every 24 hours when enabled
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
Only host count and version number are transmitted (no
|
||||||
|
sensitive data)
|
||||||
|
</li>
|
||||||
|
<li>The anonymous UUID prevents duplicate counting</li>
|
||||||
|
<li>You can regenerate your ID or opt-out at any time</li>
|
||||||
|
<li>
|
||||||
|
All collected data is displayed publicly on{" "}
|
||||||
|
<a
|
||||||
|
href="https://patchmon.net"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="text-primary-600 dark:text-primary-400 hover:underline"
|
||||||
|
>
|
||||||
|
patchmon.net
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</SettingsLayout>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default SettingsMetrics;
|
||||||
@@ -5,7 +5,7 @@ const API_BASE_URL = import.meta.env.VITE_API_URL || "/api/v1";
|
|||||||
// Create axios instance with default config
|
// Create axios instance with default config
|
||||||
const api = axios.create({
|
const api = axios.create({
|
||||||
baseURL: API_BASE_URL,
|
baseURL: API_BASE_URL,
|
||||||
timeout: 10000,
|
timeout: 10000, // 10 seconds
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
@@ -19,6 +19,30 @@ api.interceptors.request.use(
|
|||||||
if (token) {
|
if (token) {
|
||||||
config.headers.Authorization = `Bearer ${token}`;
|
config.headers.Authorization = `Bearer ${token}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add device ID for TFA remember-me functionality
|
||||||
|
// This uniquely identifies the browser profile (normal vs incognito)
|
||||||
|
let deviceId = localStorage.getItem("device_id");
|
||||||
|
if (!deviceId) {
|
||||||
|
// Generate a unique device ID and store it
|
||||||
|
// Use crypto.randomUUID() if available, otherwise generate a UUID v4 manually
|
||||||
|
if (typeof crypto !== "undefined" && crypto.randomUUID) {
|
||||||
|
deviceId = crypto.randomUUID();
|
||||||
|
} else {
|
||||||
|
// Fallback: Generate UUID v4 manually
|
||||||
|
deviceId = "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(
|
||||||
|
/[xy]/g,
|
||||||
|
(c) => {
|
||||||
|
const r = (Math.random() * 16) | 0;
|
||||||
|
const v = c === "x" ? r : (r & 0x3) | 0x8;
|
||||||
|
return v.toString(16);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
localStorage.setItem("device_id", deviceId);
|
||||||
|
}
|
||||||
|
config.headers["X-Device-ID"] = deviceId;
|
||||||
|
|
||||||
return config;
|
return config;
|
||||||
},
|
},
|
||||||
(error) => {
|
(error) => {
|
||||||
@@ -95,6 +119,8 @@ export const adminHostsAPI = {
|
|||||||
api.put("/hosts/bulk/groups", { hostIds, groupIds }),
|
api.put("/hosts/bulk/groups", { hostIds, groupIds }),
|
||||||
toggleAutoUpdate: (hostId, autoUpdate) =>
|
toggleAutoUpdate: (hostId, autoUpdate) =>
|
||||||
api.patch(`/hosts/${hostId}/auto-update`, { auto_update: autoUpdate }),
|
api.patch(`/hosts/${hostId}/auto-update`, { auto_update: autoUpdate }),
|
||||||
|
forceAgentUpdate: (hostId) => api.post(`/hosts/${hostId}/force-agent-update`),
|
||||||
|
fetchReport: (hostId) => api.post(`/hosts/${hostId}/fetch-report`),
|
||||||
updateFriendlyName: (hostId, friendlyName) =>
|
updateFriendlyName: (hostId, friendlyName) =>
|
||||||
api.patch(`/hosts/${hostId}/friendly-name`, {
|
api.patch(`/hosts/${hostId}/friendly-name`, {
|
||||||
friendly_name: friendlyName,
|
friendly_name: friendlyName,
|
||||||
@@ -143,6 +169,12 @@ export const settingsAPI = {
|
|||||||
getServerUrl: () => api.get("/settings/server-url"),
|
getServerUrl: () => api.get("/settings/server-url"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// User Preferences API
|
||||||
|
export const userPreferencesAPI = {
|
||||||
|
get: () => api.get("/user/preferences"),
|
||||||
|
update: (preferences) => api.patch("/user/preferences", preferences),
|
||||||
|
};
|
||||||
|
|
||||||
// Agent File Management API
|
// Agent File Management API
|
||||||
export const agentFileAPI = {
|
export const agentFileAPI = {
|
||||||
getInfo: () => api.get("/hosts/agent/info"),
|
getInfo: () => api.get("/hosts/agent/info"),
|
||||||
|
|||||||
171
frontend/src/utils/docker.js
Normal file
171
frontend/src/utils/docker.js
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
/**
|
||||||
|
* Docker-related utility functions for the frontend
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a registry link for a Docker image based on its repository and source
|
||||||
|
* @param {string} repository - The full repository name (e.g., "ghcr.io/owner/repo")
|
||||||
|
* @param {string} source - The detected source (github, gitlab, docker-hub, etc.)
|
||||||
|
* @returns {string|null} - The URL to the registry page, or null if unknown
|
||||||
|
*/
|
||||||
|
export function generateRegistryLink(repository, source) {
|
||||||
|
if (!repository) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the domain and path from the repository
|
||||||
|
const parts = repository.split("/");
|
||||||
|
let domain = "";
|
||||||
|
let path = "";
|
||||||
|
|
||||||
|
// Check if repository has a domain (contains a dot)
|
||||||
|
if (parts[0].includes(".") || parts[0].includes(":")) {
|
||||||
|
domain = parts[0];
|
||||||
|
path = parts.slice(1).join("/");
|
||||||
|
} else {
|
||||||
|
// No domain means Docker Hub
|
||||||
|
domain = "docker.io";
|
||||||
|
path = repository;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (source) {
|
||||||
|
case "docker-hub":
|
||||||
|
case "docker.io": {
|
||||||
|
// Docker Hub: https://hub.docker.com/r/{path} or https://hub.docker.com/_/{path} for official images
|
||||||
|
// Official images are those without a namespace (e.g., "postgres" not "user/postgres")
|
||||||
|
// or explicitly prefixed with "library/"
|
||||||
|
if (path.startsWith("library/")) {
|
||||||
|
const cleanPath = path.replace("library/", "");
|
||||||
|
return `https://hub.docker.com/_/${cleanPath}`;
|
||||||
|
}
|
||||||
|
// Check if it's an official image (single part, no slash after removing library/)
|
||||||
|
if (!path.includes("/")) {
|
||||||
|
return `https://hub.docker.com/_/${path}`;
|
||||||
|
}
|
||||||
|
// Regular user/org image
|
||||||
|
return `https://hub.docker.com/r/${path}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "github":
|
||||||
|
case "ghcr.io": {
|
||||||
|
// GitHub Container Registry
|
||||||
|
// Format: ghcr.io/{owner}/{package} or ghcr.io/{owner}/{repo}/{package}
|
||||||
|
// URL format: https://github.com/{owner}/{repo}/pkgs/container/{package}
|
||||||
|
if (domain === "ghcr.io" && path) {
|
||||||
|
const pathParts = path.split("/");
|
||||||
|
if (pathParts.length === 2) {
|
||||||
|
// Simple case: ghcr.io/owner/package -> github.com/owner/owner/pkgs/container/package
|
||||||
|
// OR: ghcr.io/owner/repo -> github.com/owner/repo/pkgs/container/{package}
|
||||||
|
// Actually, for 2 parts it's owner/package, and repo is same as owner typically
|
||||||
|
const owner = pathParts[0];
|
||||||
|
const packageName = pathParts[1];
|
||||||
|
return `https://github.com/${owner}/${owner}/pkgs/container/${packageName}`;
|
||||||
|
} else if (pathParts.length >= 3) {
|
||||||
|
// Extended case: ghcr.io/owner/repo/package -> github.com/owner/repo/pkgs/container/package
|
||||||
|
const owner = pathParts[0];
|
||||||
|
const repo = pathParts[1];
|
||||||
|
const packageName = pathParts.slice(2).join("/");
|
||||||
|
return `https://github.com/${owner}/${repo}/pkgs/container/${packageName}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Legacy GitHub Packages
|
||||||
|
if (domain === "docker.pkg.github.com" && path) {
|
||||||
|
const pathParts = path.split("/");
|
||||||
|
if (pathParts.length >= 1) {
|
||||||
|
return `https://github.com/${pathParts[0]}/packages`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "gitlab":
|
||||||
|
case "registry.gitlab.com": {
|
||||||
|
// GitLab Container Registry
|
||||||
|
if (path) {
|
||||||
|
return `https://gitlab.com/${path}/container_registry`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "google":
|
||||||
|
case "gcr.io": {
|
||||||
|
// Google Container Registry
|
||||||
|
if (domain.includes("gcr.io") || domain.includes("pkg.dev")) {
|
||||||
|
return `https://console.cloud.google.com/gcr/images/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "quay":
|
||||||
|
case "quay.io": {
|
||||||
|
// Quay.io
|
||||||
|
if (path) {
|
||||||
|
return `https://quay.io/repository/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "redhat":
|
||||||
|
case "registry.access.redhat.com": {
|
||||||
|
// Red Hat
|
||||||
|
if (path) {
|
||||||
|
return `https://access.redhat.com/containers/#/registry.access.redhat.com/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "azure":
|
||||||
|
case "azurecr.io": {
|
||||||
|
// Azure Container Registry
|
||||||
|
if (domain.includes("azurecr.io")) {
|
||||||
|
const registryName = domain.split(".")[0];
|
||||||
|
return `https://portal.azure.com/#view/Microsoft_Azure_ContainerRegistries/RepositoryBlade/registryName/${registryName}/repositoryName/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "aws":
|
||||||
|
case "amazonaws.com": {
|
||||||
|
// AWS ECR
|
||||||
|
if (domain.includes("amazonaws.com")) {
|
||||||
|
const domainParts = domain.split(".");
|
||||||
|
const region = domainParts[3]; // Extract region
|
||||||
|
return `https://${region}.console.aws.amazon.com/ecr/repositories/private/${path}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
case "private":
|
||||||
|
// For private registries, try to construct a basic URL
|
||||||
|
if (domain) {
|
||||||
|
return `https://${domain}`;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a user-friendly display name for a registry source
|
||||||
|
* @param {string} source - The source identifier
|
||||||
|
* @returns {string} - Human-readable source name
|
||||||
|
*/
|
||||||
|
export function getSourceDisplayName(source) {
|
||||||
|
const sourceNames = {
|
||||||
|
"docker-hub": "Docker Hub",
|
||||||
|
github: "GitHub",
|
||||||
|
gitlab: "GitLab",
|
||||||
|
google: "Google",
|
||||||
|
quay: "Quay.io",
|
||||||
|
redhat: "Red Hat",
|
||||||
|
azure: "Azure",
|
||||||
|
aws: "AWS ECR",
|
||||||
|
private: "Private Registry",
|
||||||
|
local: "Local",
|
||||||
|
unknown: "Unknown",
|
||||||
|
};
|
||||||
|
|
||||||
|
return sourceNames[source] || source;
|
||||||
|
}
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import { Agent as HttpAgent } from "node:http";
|
||||||
import react from "@vitejs/plugin-react";
|
import react from "@vitejs/plugin-react";
|
||||||
import { defineConfig } from "vite";
|
import { defineConfig } from "vite";
|
||||||
|
|
||||||
@@ -14,6 +15,15 @@ export default defineConfig({
|
|||||||
target: `http://${process.env.BACKEND_HOST || "localhost"}:${process.env.BACKEND_PORT || "3001"}`,
|
target: `http://${process.env.BACKEND_HOST || "localhost"}:${process.env.BACKEND_PORT || "3001"}`,
|
||||||
changeOrigin: true,
|
changeOrigin: true,
|
||||||
secure: false,
|
secure: false,
|
||||||
|
// Configure HTTP agent to support more concurrent connections
|
||||||
|
// Fixes 1000ms timeout issue when using HTTP (not HTTPS) with multiple hosts
|
||||||
|
agent: new HttpAgent({
|
||||||
|
keepAlive: true,
|
||||||
|
maxSockets: 50, // Increase from default 6 to handle multiple hosts
|
||||||
|
maxFreeSockets: 10,
|
||||||
|
timeout: 60000,
|
||||||
|
keepAliveMsecs: 1000,
|
||||||
|
}),
|
||||||
configure:
|
configure:
|
||||||
process.env.VITE_ENABLE_LOGGING === "true"
|
process.env.VITE_ENABLE_LOGGING === "true"
|
||||||
? (proxy, _options) => {
|
? (proxy, _options) => {
|
||||||
|
|||||||
1956
package-lock.json
generated
1956
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon",
|
"name": "patchmon",
|
||||||
"version": "1.3.0",
|
"version": "1.3.2",
|
||||||
"description": "Linux Patch Monitoring System",
|
"description": "Linux Patch Monitoring System",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
@@ -25,7 +25,7 @@
|
|||||||
"lint:fix": "biome check --write ."
|
"lint:fix": "biome check --write ."
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@biomejs/biome": "2.2.4",
|
"@biomejs/biome": "^2.3.0",
|
||||||
"concurrently": "^8.2.2",
|
"concurrently": "^8.2.2",
|
||||||
"lefthook": "^1.13.4"
|
"lefthook": "^1.13.4"
|
||||||
},
|
},
|
||||||
|
|||||||
715
tools/diagnostics.sh
Executable file
715
tools/diagnostics.sh
Executable file
@@ -0,0 +1,715 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# PatchMon Diagnostics Collection Script
|
||||||
|
# Collects system information, logs, and configuration for troubleshooting
|
||||||
|
# Usage: sudo bash diagnostics.sh [instance-name]
|
||||||
|
|
||||||
|
# Note: Not using 'set -e' because we want to continue even if some commands fail
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Print functions
|
||||||
|
print_status() {
|
||||||
|
echo -e "${GREEN}✅ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_info() {
|
||||||
|
echo -e "${BLUE}ℹ️ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_error() {
|
||||||
|
echo -e "${RED}❌ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_warning() {
|
||||||
|
echo -e "${YELLOW}⚠️ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_success() {
|
||||||
|
echo -e "${GREEN}🎉 $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if running as root
|
||||||
|
if [[ $EUID -ne 0 ]]; then
|
||||||
|
print_error "This script must be run as root"
|
||||||
|
print_info "Please run: sudo bash $0"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Function to sanitize sensitive information
|
||||||
|
sanitize_sensitive() {
|
||||||
|
local input="$1"
|
||||||
|
# Replace passwords, secrets, and tokens with [REDACTED]
|
||||||
|
echo "$input" | \
|
||||||
|
sed -E 's/(PASSWORD|SECRET|TOKEN|KEY|PASS)=[^"]*$/\1=[REDACTED]/gi' | \
|
||||||
|
sed -E 's/(PASSWORD|SECRET|TOKEN|KEY|PASS)="[^"]*"/\1="[REDACTED]"/gi' | \
|
||||||
|
sed -E 's/(password|secret|token|key|pass)": *"[^"]*"/\1": "[REDACTED]"/gi' | \
|
||||||
|
sed -E 's/(>)[a-zA-Z0-9+\/=]{20,}/\1[REDACTED]/g' | \
|
||||||
|
sed -E 's|postgresql://([^:]+):([^@]+)@|postgresql://\1:[REDACTED]@|g' | \
|
||||||
|
sed -E 's|mysql://([^:]+):([^@]+)@|mysql://\1:[REDACTED]@|g' | \
|
||||||
|
sed -E 's|mongodb://([^:]+):([^@]+)@|mongodb://\1:[REDACTED]@|g'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to detect PatchMon installations
|
||||||
|
detect_installations() {
|
||||||
|
local installations=()
|
||||||
|
|
||||||
|
if [ ! -d "/opt" ]; then
|
||||||
|
print_error "/opt directory does not exist"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
for dir in /opt/*/; do
|
||||||
|
# Skip if no directories found
|
||||||
|
[ -d "$dir" ] || continue
|
||||||
|
|
||||||
|
local dirname=$(basename "$dir")
|
||||||
|
|
||||||
|
# Skip backup directories
|
||||||
|
if [[ "$dirname" =~ \.backup\. ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if it's a PatchMon installation
|
||||||
|
if [ -f "$dir/backend/package.json" ]; then
|
||||||
|
if grep -q "patchmon" "$dir/backend/package.json" 2>/dev/null; then
|
||||||
|
installations+=("$dirname")
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "${installations[@]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to select installation
|
||||||
|
select_installation() {
|
||||||
|
local installations=($(detect_installations))
|
||||||
|
|
||||||
|
if [ ${#installations[@]} -eq 0 ]; then
|
||||||
|
print_error "No PatchMon installations found in /opt" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$1" ]; then
|
||||||
|
# Use provided instance name
|
||||||
|
if [[ " ${installations[@]} " =~ " $1 " ]]; then
|
||||||
|
echo "$1"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "Instance '$1' not found" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Send status messages to stderr so they don't contaminate the return value
|
||||||
|
print_info "Found ${#installations[@]} installation(s):" >&2
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
local i=1
|
||||||
|
declare -A install_map
|
||||||
|
for install in "${installations[@]}"; do
|
||||||
|
# Get service status
|
||||||
|
local status="unknown"
|
||||||
|
if systemctl is-active --quiet "$install" 2>/dev/null; then
|
||||||
|
status="${GREEN}running${NC}"
|
||||||
|
elif systemctl is-enabled --quiet "$install" 2>/dev/null; then
|
||||||
|
status="${RED}stopped${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "%2d. %-30s (%b)\n" "$i" "$install" "$status" >&2
|
||||||
|
install_map[$i]="$install"
|
||||||
|
i=$((i + 1))
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
# If only one installation, select it automatically
|
||||||
|
if [ ${#installations[@]} -eq 1 ]; then
|
||||||
|
print_info "Only one installation found, selecting automatically: ${installations[0]}" >&2
|
||||||
|
echo "${installations[0]}"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Multiple installations - prompt user
|
||||||
|
printf "${BLUE}Select installation number [1]: ${NC}" >&2
|
||||||
|
read -r selection </dev/tty
|
||||||
|
|
||||||
|
selection=${selection:-1}
|
||||||
|
|
||||||
|
if [[ "$selection" =~ ^[0-9]+$ ]] && [ -n "${install_map[$selection]}" ]; then
|
||||||
|
echo "${install_map[$selection]}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "Invalid selection" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main script
|
||||||
|
main() {
|
||||||
|
# Capture the directory where script is run from at the very start
|
||||||
|
ORIGINAL_DIR=$(pwd)
|
||||||
|
|
||||||
|
echo -e "${BLUE}====================================================${NC}"
|
||||||
|
echo -e "${BLUE} PatchMon Diagnostics Collection${NC}"
|
||||||
|
echo -e "${BLUE}====================================================${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Select instance
|
||||||
|
instance_name=$(select_installation "$1")
|
||||||
|
instance_dir="/opt/$instance_name"
|
||||||
|
|
||||||
|
print_info "Selected instance: $instance_name"
|
||||||
|
print_info "Directory: $instance_dir"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Create single diagnostics file in the original directory
|
||||||
|
timestamp=$(date +%Y%m%d_%H%M%S)
|
||||||
|
diag_file="${ORIGINAL_DIR}/patchmon_diagnostics_${instance_name}_${timestamp}.txt"
|
||||||
|
|
||||||
|
print_info "Collecting diagnostics to: $diag_file"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Initialize the diagnostics file with header
|
||||||
|
cat > "$diag_file" << EOF
|
||||||
|
===================================================
|
||||||
|
PatchMon Diagnostics Report
|
||||||
|
===================================================
|
||||||
|
Instance: $instance_name
|
||||||
|
Generated: $(date)
|
||||||
|
Hostname: $(hostname)
|
||||||
|
Generated from: ${ORIGINAL_DIR}
|
||||||
|
===================================================
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 1. System Information
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting system information..."
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
=== System Information ===
|
||||||
|
OS: $(cat /etc/os-release 2>/dev/null | grep PRETTY_NAME | cut -d'"' -f2 || echo "Unknown")
|
||||||
|
Kernel: $(uname -r)
|
||||||
|
Uptime: $(uptime)
|
||||||
|
|
||||||
|
=== CPU Information ===
|
||||||
|
$(lscpu | grep -E "Model name|CPU\(s\)|Thread|Core" || echo "Not available")
|
||||||
|
|
||||||
|
=== Memory Information ===
|
||||||
|
$(free -h)
|
||||||
|
|
||||||
|
=== Disk Usage ===
|
||||||
|
$(df -h | grep -E "Filesystem|/dev/|/opt")
|
||||||
|
|
||||||
|
=== Network Interfaces ===
|
||||||
|
$(ip -br addr)
|
||||||
|
|
||||||
|
===================================================
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 2. PatchMon Instance Information
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting instance information..."
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
|
||||||
|
=== PatchMon Instance Information ===
|
||||||
|
|
||||||
|
=== Directory Structure ===
|
||||||
|
$(ls -lah "$instance_dir" 2>/dev/null || echo "Cannot access directory")
|
||||||
|
|
||||||
|
=== Backend Package Info ===
|
||||||
|
$(cat "$instance_dir/backend/package.json" 2>/dev/null | grep -E "name|version" || echo "Not found")
|
||||||
|
|
||||||
|
=== Frontend Package Info ===
|
||||||
|
$(cat "$instance_dir/frontend/package.json" 2>/dev/null | grep -E "name|version" || echo "Not found")
|
||||||
|
|
||||||
|
=== Deployment Info ===
|
||||||
|
$(cat "$instance_dir/deployment-info.txt" 2>/dev/null || echo "No deployment-info.txt found")
|
||||||
|
|
||||||
|
===================================================
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 3. Environment Configuration (Sanitized)
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting environment configuration (sanitized)..."
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Backend Environment Configuration (Sanitized) ===" >> "$diag_file"
|
||||||
|
if [ -f "$instance_dir/backend/.env" ]; then
|
||||||
|
sanitize_sensitive "$(cat "$instance_dir/backend/.env")" >> "$diag_file"
|
||||||
|
else
|
||||||
|
echo "Backend .env file not found" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 4. Service Status and Configuration
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting service information..."
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
|
||||||
|
=== Service Status and Configuration ===
|
||||||
|
|
||||||
|
=== Service Status ===
|
||||||
|
$(systemctl status "$instance_name" 2>/dev/null || echo "Service not found")
|
||||||
|
|
||||||
|
=== Service File ===
|
||||||
|
$(cat "/etc/systemd/system/${instance_name}.service" 2>/dev/null || echo "Service file not found")
|
||||||
|
|
||||||
|
=== Service is-enabled ===
|
||||||
|
$(systemctl is-enabled "$instance_name" 2>/dev/null || echo "unknown")
|
||||||
|
|
||||||
|
=== Service is-active ===
|
||||||
|
$(systemctl is-active "$instance_name" 2>/dev/null || echo "unknown")
|
||||||
|
|
||||||
|
===================================================
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 5. Service Logs
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting service logs..."
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Service Logs (last 500 lines) ===" >> "$diag_file"
|
||||||
|
journalctl -u "$instance_name" -n 500 --no-pager >> "$diag_file" 2>&1 || \
|
||||||
|
echo "Could not retrieve service logs" >> "$diag_file"
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 6. Nginx Configuration
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting nginx configuration..."
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
|
||||||
|
=== Nginx Configuration ===
|
||||||
|
|
||||||
|
=== Nginx Status ===
|
||||||
|
$(systemctl status nginx 2>/dev/null | head -20 || echo "Nginx not found")
|
||||||
|
|
||||||
|
=== Site Configuration ===
|
||||||
|
$(cat "/etc/nginx/sites-available/$instance_name" 2>/dev/null || echo "Nginx config not found")
|
||||||
|
|
||||||
|
=== Nginx Error Log (last 100 lines) ===
|
||||||
|
$(tail -100 /var/log/nginx/error.log 2>/dev/null || echo "Error log not accessible")
|
||||||
|
|
||||||
|
=== Nginx Access Log (last 50 lines) ===
|
||||||
|
$(tail -50 /var/log/nginx/access.log 2>/dev/null || echo "Access log not accessible")
|
||||||
|
|
||||||
|
=== Nginx Test ===
|
||||||
|
$(nginx -t 2>&1 || echo "Nginx test failed")
|
||||||
|
|
||||||
|
===================================================
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 7. Database Connection Test
|
||||||
|
# ========================================
|
||||||
|
print_info "Testing database connection..."
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Database Information ===" >> "$diag_file"
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
|
||||||
|
if [ -f "$instance_dir/backend/.env" ]; then
|
||||||
|
# Load .env
|
||||||
|
set -a
|
||||||
|
source "$instance_dir/backend/.env"
|
||||||
|
set +a
|
||||||
|
|
||||||
|
# Parse DATABASE_URL
|
||||||
|
if [ -n "$DATABASE_URL" ]; then
|
||||||
|
DB_USER=$(echo "$DATABASE_URL" | sed -n 's|postgresql://\([^:]*\):.*|\1|p')
|
||||||
|
DB_PASS=$(echo "$DATABASE_URL" | sed -n 's|postgresql://[^:]*:\([^@]*\)@.*|\1|p')
|
||||||
|
DB_HOST=$(echo "$DATABASE_URL" | sed -n 's|.*@\([^:]*\):.*|\1|p')
|
||||||
|
DB_PORT=$(echo "$DATABASE_URL" | sed -n 's|.*:\([0-9]*\)/.*|\1|p')
|
||||||
|
DB_NAME=$(echo "$DATABASE_URL" | sed -n 's|.*/\([^?]*\).*|\1|p')
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
=== Database Connection Details ===
|
||||||
|
Host: $DB_HOST
|
||||||
|
Port: $DB_PORT
|
||||||
|
Database: $DB_NAME
|
||||||
|
User: $DB_USER
|
||||||
|
|
||||||
|
=== PostgreSQL Status ===
|
||||||
|
$(systemctl status postgresql 2>/dev/null | head -20 || echo "PostgreSQL status not available")
|
||||||
|
|
||||||
|
=== Connection Test ===
|
||||||
|
EOF
|
||||||
|
|
||||||
|
if PGPASSWORD="$DB_PASS" psql -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -c "SELECT version();" >> "$diag_file" 2>&1; then
|
||||||
|
echo "✅ Database connection: SUCCESSFUL" >> "$diag_file"
|
||||||
|
else
|
||||||
|
echo "❌ Database connection: FAILED" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Database Size ===" >> "$diag_file"
|
||||||
|
PGPASSWORD="$DB_PASS" psql -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -c "
|
||||||
|
SELECT
|
||||||
|
pg_size_pretty(pg_database_size('$DB_NAME')) as database_size;
|
||||||
|
" >> "$diag_file" 2>&1 || echo "Could not get database size" >> "$diag_file"
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Table Sizes ===" >> "$diag_file"
|
||||||
|
PGPASSWORD="$DB_PASS" psql -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -c "
|
||||||
|
SELECT
|
||||||
|
schemaname,
|
||||||
|
tablename,
|
||||||
|
pg_size_pretty(pg_total_relation_size(schemaname||'.'||tablename)) AS size
|
||||||
|
FROM pg_tables
|
||||||
|
WHERE schemaname = 'public'
|
||||||
|
ORDER BY pg_total_relation_size(schemaname||'.'||tablename) DESC
|
||||||
|
LIMIT 10;
|
||||||
|
" >> "$diag_file" 2>&1 || echo "Could not get table sizes" >> "$diag_file"
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Migration Status ===" >> "$diag_file"
|
||||||
|
cd "$instance_dir/backend"
|
||||||
|
npx prisma migrate status >> "$diag_file" 2>&1 || echo "Could not get migration status" >> "$diag_file"
|
||||||
|
|
||||||
|
echo "===================================================" >> "$diag_file"
|
||||||
|
else
|
||||||
|
echo "DATABASE_URL not found in .env" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo ".env file not found" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 8. Redis Connection Test
|
||||||
|
# ========================================
|
||||||
|
print_info "Testing Redis connection..."
|
||||||
|
|
||||||
|
if [ -f "$instance_dir/backend/.env" ]; then
|
||||||
|
# Load .env
|
||||||
|
set -a
|
||||||
|
source "$instance_dir/backend/.env"
|
||||||
|
set +a
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
===================================================
|
||||||
|
Redis Information
|
||||||
|
===================================================
|
||||||
|
|
||||||
|
=== Redis Connection Details ===
|
||||||
|
Host: ${REDIS_HOST:-localhost}
|
||||||
|
Port: ${REDIS_PORT:-6379}
|
||||||
|
User: ${REDIS_USER:-(none)}
|
||||||
|
Database: ${REDIS_DB:-0}
|
||||||
|
|
||||||
|
=== Redis Status ===
|
||||||
|
$(systemctl status redis-server 2>/dev/null | head -20 || echo "Redis status not available")
|
||||||
|
|
||||||
|
=== Connection Test ===
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Test connection
|
||||||
|
if [ -n "$REDIS_USER" ] && [ -n "$REDIS_PASSWORD" ]; then
|
||||||
|
if redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" --user "$REDIS_USER" --pass "$REDIS_PASSWORD" --no-auth-warning -n "${REDIS_DB:-0}" ping >> "$diag_file" 2>&1; then
|
||||||
|
echo "✅ Redis connection (with user): SUCCESSFUL" >> "$diag_file"
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Redis INFO ===" >> "$diag_file"
|
||||||
|
redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" --user "$REDIS_USER" --pass "$REDIS_PASSWORD" --no-auth-warning -n "${REDIS_DB:-0}" INFO >> "$diag_file" 2>&1
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Redis Database Size ===" >> "$diag_file"
|
||||||
|
redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" --user "$REDIS_USER" --pass "$REDIS_PASSWORD" --no-auth-warning -n "${REDIS_DB:-0}" DBSIZE >> "$diag_file" 2>&1
|
||||||
|
else
|
||||||
|
echo "❌ Redis connection (with user): FAILED" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
elif [ -n "$REDIS_PASSWORD" ]; then
|
||||||
|
if redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" -a "$REDIS_PASSWORD" --no-auth-warning -n "${REDIS_DB:-0}" ping >> "$diag_file" 2>&1; then
|
||||||
|
echo "✅ Redis connection (requirepass): SUCCESSFUL" >> "$diag_file"
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Redis INFO ===" >> "$diag_file"
|
||||||
|
redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" -a "$REDIS_PASSWORD" --no-auth-warning -n "${REDIS_DB:-0}" INFO >> "$diag_file" 2>&1
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Redis Database Size ===" >> "$diag_file"
|
||||||
|
redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" -a "$REDIS_PASSWORD" --no-auth-warning -n "${REDIS_DB:-0}" DBSIZE >> "$diag_file" 2>&1
|
||||||
|
else
|
||||||
|
echo "❌ Redis connection (requirepass): FAILED" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" -n "${REDIS_DB:-0}" ping >> "$diag_file" 2>&1; then
|
||||||
|
echo "✅ Redis connection (no auth): SUCCESSFUL" >> "$diag_file"
|
||||||
|
else
|
||||||
|
echo "❌ Redis connection: FAILED" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Redis ACL Users ===" >> "$diag_file"
|
||||||
|
if [ -n "$REDIS_USER" ] && [ -n "$REDIS_PASSWORD" ]; then
|
||||||
|
redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" --user "$REDIS_USER" --pass "$REDIS_PASSWORD" --no-auth-warning ACL LIST >> "$diag_file"
|
||||||
|
elif [ -n "$REDIS_PASSWORD" ]; then
|
||||||
|
redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" -a "$REDIS_PASSWORD" --no-auth-warning ACL LIST >> "$diag_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "===================================================" >> "$diag_file"
|
||||||
|
else
|
||||||
|
echo ".env file not found" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 9. Network and Port Information
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting network information..."
|
||||||
|
|
||||||
|
# Get backend port from .env
|
||||||
|
local backend_port=$(grep '^PORT=' "$instance_dir/backend/.env" 2>/dev/null | cut -d'=' -f2 | tr -d ' ' || echo "3000")
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
===================================================
|
||||||
|
Network and Port Information
|
||||||
|
===================================================
|
||||||
|
|
||||||
|
=== Listening Ports ===
|
||||||
|
$(ss -tlnp | grep -E "LISTEN|nginx|node|postgres|redis" || netstat -tlnp | grep -E "LISTEN|nginx|node|postgres|redis" || echo "Could not get port information")
|
||||||
|
|
||||||
|
=== Active Connections ===
|
||||||
|
$(ss -tn state established | head -20 || echo "Could not get connection information")
|
||||||
|
|
||||||
|
=== Backend Port Connections (Port $backend_port) ===
|
||||||
|
Total connections to backend: $(ss -tn | grep ":$backend_port" | wc -l || echo "0")
|
||||||
|
$(ss -tn | grep ":$backend_port" | head -10 || echo "No connections found")
|
||||||
|
|
||||||
|
=== PostgreSQL Connections ===
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Get PostgreSQL connection count
|
||||||
|
if [ -n "$DB_PASS" ] && [ -n "$DB_USER" ] && [ -n "$DB_NAME" ]; then
|
||||||
|
PGPASSWORD="$DB_PASS" psql -h "${DB_HOST:-localhost}" -U "$DB_USER" -d "$DB_NAME" -c "
|
||||||
|
SELECT
|
||||||
|
count(*) as total_connections,
|
||||||
|
count(*) FILTER (WHERE state = 'active') as active_connections,
|
||||||
|
count(*) FILTER (WHERE state = 'idle') as idle_connections
|
||||||
|
FROM pg_stat_activity
|
||||||
|
WHERE datname = '$DB_NAME';
|
||||||
|
" >> "$diag_file" 2>&1 || echo "Could not get PostgreSQL connection stats" >> "$diag_file"
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== PostgreSQL Connection Details ===" >> "$diag_file"
|
||||||
|
PGPASSWORD="$DB_PASS" psql -h "${DB_HOST:-localhost}" -U "$DB_USER" -d "$DB_NAME" -c "
|
||||||
|
SELECT
|
||||||
|
pid,
|
||||||
|
usename,
|
||||||
|
application_name,
|
||||||
|
client_addr,
|
||||||
|
state,
|
||||||
|
query_start,
|
||||||
|
state_change
|
||||||
|
FROM pg_stat_activity
|
||||||
|
WHERE datname = '$DB_NAME'
|
||||||
|
ORDER BY query_start DESC
|
||||||
|
LIMIT 20;
|
||||||
|
" >> "$diag_file" 2>&1 || echo "Could not get connection details" >> "$diag_file"
|
||||||
|
else
|
||||||
|
echo "Database credentials not available" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Redis Connections ===" >> "$diag_file"
|
||||||
|
|
||||||
|
# Get Redis connection count
|
||||||
|
if [ -n "$REDIS_USER" ] && [ -n "$REDIS_PASSWORD" ]; then
|
||||||
|
redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" --user "$REDIS_USER" --pass "$REDIS_PASSWORD" --no-auth-warning -n "${REDIS_DB:-0}" INFO clients >> "$diag_file" 2>&1 || echo "Could not get Redis connection info" >> "$diag_file"
|
||||||
|
elif [ -n "$REDIS_PASSWORD" ]; then
|
||||||
|
redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" -a "$REDIS_PASSWORD" --no-auth-warning -n "${REDIS_DB:-0}" INFO clients >> "$diag_file" 2>&1 || echo "Could not get Redis connection info" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
|
||||||
|
=== Firewall Status (UFW) ===
|
||||||
|
$(ufw status 2>/dev/null || echo "UFW not available")
|
||||||
|
|
||||||
|
=== Firewall Status (iptables) ===
|
||||||
|
$(iptables -L -n | head -50 2>/dev/null || echo "iptables not available")
|
||||||
|
|
||||||
|
===================================================
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 10. Process Information
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting process information..."
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
===================================================
|
||||||
|
Process Information
|
||||||
|
===================================================
|
||||||
|
|
||||||
|
=== PatchMon Node Processes ===
|
||||||
|
$(ps aux | grep -E "node.*$instance_dir|PID" | grep -v grep || echo "No processes found")
|
||||||
|
|
||||||
|
=== Top Processes (CPU) ===
|
||||||
|
$(ps aux --sort=-%cpu | head -15)
|
||||||
|
|
||||||
|
=== Top Processes (Memory) ===
|
||||||
|
$(ps aux --sort=-%mem | head -15)
|
||||||
|
|
||||||
|
===================================================
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 11. SSL Certificate Information
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting SSL certificate information..."
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
===================================================
|
||||||
|
SSL Certificate Information
|
||||||
|
===================================================
|
||||||
|
|
||||||
|
=== Certbot Certificates ===
|
||||||
|
$(certbot certificates 2>/dev/null || echo "Certbot not available or no certificates")
|
||||||
|
|
||||||
|
=== SSL Certificate Files ===
|
||||||
|
$(ls -lh /etc/letsencrypt/live/$instance_name/ 2>/dev/null || echo "No SSL certificates found for $instance_name")
|
||||||
|
|
||||||
|
===================================================
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 12. Recent System Logs
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting recent system logs..."
|
||||||
|
|
||||||
|
journalctl -n 200 --no-pager >> "$diag_file" 2>&1 || \
|
||||||
|
echo "Could not retrieve system logs" >> "$diag_file"
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 13. Installation Log (if exists)
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting installation log..."
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "=== Installation Log (last 200 lines) ===" >> "$diag_file"
|
||||||
|
if [ -f "$instance_dir/patchmon-install.log" ]; then
|
||||||
|
tail -200 "$instance_dir/patchmon-install.log" >> "$diag_file" 2>&1
|
||||||
|
else
|
||||||
|
echo "No installation log found" >> "$diag_file"
|
||||||
|
fi
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# 14. Node.js and npm Information
|
||||||
|
# ========================================
|
||||||
|
print_info "Collecting Node.js information..."
|
||||||
|
|
||||||
|
cat >> "$diag_file" << EOF
|
||||||
|
===================================================
|
||||||
|
Node.js and npm Information
|
||||||
|
===================================================
|
||||||
|
|
||||||
|
=== Node.js Version ===
|
||||||
|
$(node --version 2>/dev/null || echo "Node.js not found")
|
||||||
|
|
||||||
|
=== npm Version ===
|
||||||
|
$(npm --version 2>/dev/null || echo "npm not found")
|
||||||
|
|
||||||
|
=== Backend Dependencies ===
|
||||||
|
$(cd "$instance_dir/backend" && npm list --depth=0 2>/dev/null || echo "Could not list backend dependencies")
|
||||||
|
|
||||||
|
===================================================
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# Finalize diagnostics file
|
||||||
|
# ========================================
|
||||||
|
print_info "Finalizing diagnostics file..."
|
||||||
|
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "====================================================" >> "$diag_file"
|
||||||
|
echo "END OF DIAGNOSTICS REPORT" >> "$diag_file"
|
||||||
|
echo "====================================================" >> "$diag_file"
|
||||||
|
echo "" >> "$diag_file"
|
||||||
|
echo "IMPORTANT: Sensitive Information" >> "$diag_file"
|
||||||
|
echo "Passwords, secrets, and tokens have been sanitized" >> "$diag_file"
|
||||||
|
echo "and replaced with [REDACTED]. However, please review" >> "$diag_file"
|
||||||
|
echo "before sharing to ensure no sensitive data is included." >> "$diag_file"
|
||||||
|
echo "====================================================" >> "$diag_file"
|
||||||
|
|
||||||
|
print_status "Diagnostics file created: $diag_file"
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# Display summary
|
||||||
|
# ========================================
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}====================================================${NC}"
|
||||||
|
echo -e "${GREEN} Diagnostics Collection Complete!${NC}"
|
||||||
|
echo -e "${GREEN}====================================================${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Get service statuses and file size
|
||||||
|
local service_status=$(systemctl is-active "$instance_name" 2>/dev/null || echo "unknown")
|
||||||
|
local nginx_status=$(systemctl is-active nginx 2>/dev/null || echo "unknown")
|
||||||
|
local postgres_status=$(systemctl is-active postgresql 2>/dev/null || echo "unknown")
|
||||||
|
local redis_status=$(systemctl is-active redis-server 2>/dev/null || echo "unknown")
|
||||||
|
local file_size=$(du -h "$diag_file" 2>/dev/null | cut -f1 || echo "unknown")
|
||||||
|
local line_count=$(wc -l < "$diag_file" 2>/dev/null || echo "unknown")
|
||||||
|
|
||||||
|
# Get connection counts for summary
|
||||||
|
local backend_port=$(grep '^PORT=' "$instance_dir/backend/.env" 2>/dev/null | cut -d'=' -f2 | tr -d ' ' || echo "3000")
|
||||||
|
local backend_conn_count=$(ss -tn 2>/dev/null | grep ":$backend_port" | wc -l || echo "0")
|
||||||
|
|
||||||
|
local db_conn_count="N/A"
|
||||||
|
if [ -n "$DB_PASS" ] && [ -n "$DB_USER" ] && [ -n "$DB_NAME" ]; then
|
||||||
|
db_conn_count=$(PGPASSWORD="$DB_PASS" psql -h "${DB_HOST:-localhost}" -U "$DB_USER" -d "$DB_NAME" -t -A -c "SELECT count(*) FROM pg_stat_activity WHERE datname = '$DB_NAME';" 2>/dev/null || echo "N/A")
|
||||||
|
fi
|
||||||
|
|
||||||
|
local redis_conn_count="N/A"
|
||||||
|
if [ -n "$REDIS_USER" ] && [ -n "$REDIS_PASSWORD" ]; then
|
||||||
|
redis_conn_count=$(redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" --user "$REDIS_USER" --pass "$REDIS_PASSWORD" --no-auth-warning INFO clients 2>/dev/null | grep "connected_clients:" | cut -d':' -f2 | tr -d '\r' || echo "N/A")
|
||||||
|
elif [ -n "$REDIS_PASSWORD" ]; then
|
||||||
|
redis_conn_count=$(redis-cli -h "${REDIS_HOST:-localhost}" -p "${REDIS_PORT:-6379}" -a "$REDIS_PASSWORD" --no-auth-warning INFO clients 2>/dev/null | grep "connected_clients:" | cut -d':' -f2 | tr -d '\r' || echo "N/A")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Compact, copyable summary
|
||||||
|
echo -e "${BLUE}═══════════════════════════════════════════════════${NC}"
|
||||||
|
echo -e "${BLUE}DIAGNOSTICS SUMMARY (copy-paste friendly)${NC}"
|
||||||
|
echo -e "${BLUE}═══════════════════════════════════════════════════${NC}"
|
||||||
|
echo "Instance: $instance_name"
|
||||||
|
echo "File: $diag_file"
|
||||||
|
echo "Size: $file_size ($line_count lines)"
|
||||||
|
echo "Generated: $(date '+%Y-%m-%d %H:%M:%S')"
|
||||||
|
echo "---"
|
||||||
|
echo "Service Status: $service_status"
|
||||||
|
echo "Nginx Status: $nginx_status"
|
||||||
|
echo "PostgreSQL: $postgres_status"
|
||||||
|
echo "Redis: $redis_status"
|
||||||
|
echo "---"
|
||||||
|
echo "Backend Port: $backend_port (Active Connections: $backend_conn_count)"
|
||||||
|
echo "Database Connections: $db_conn_count"
|
||||||
|
echo "Redis Connections: $redis_conn_count"
|
||||||
|
echo "---"
|
||||||
|
echo "View: cat $(basename "$diag_file")"
|
||||||
|
echo "Or: less $(basename "$diag_file")"
|
||||||
|
echo "Share: Send $(basename "$diag_file") to support"
|
||||||
|
echo -e "${BLUE}═══════════════════════════════════════════════════${NC}"
|
||||||
|
echo ""
|
||||||
|
print_warning "Review file before sharing - sensitive data has been sanitized"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
print_success "Done!"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
|
|
||||||
286
tools/fix-migrations.sh
Executable file
286
tools/fix-migrations.sh
Executable file
@@ -0,0 +1,286 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# PatchMon Migration Fixer
|
||||||
|
# Standalone script to detect and fix failed Prisma migrations
|
||||||
|
# Usage: sudo bash fix-migrations.sh [instance-name]
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Print functions
|
||||||
|
print_status() {
|
||||||
|
echo -e "${GREEN}✅ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_info() {
|
||||||
|
echo -e "${BLUE}ℹ️ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_error() {
|
||||||
|
echo -e "${RED}❌ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_warning() {
|
||||||
|
echo -e "${YELLOW}⚠️ $1${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if running as root
|
||||||
|
if [[ $EUID -ne 0 ]]; then
|
||||||
|
print_error "This script must be run as root"
|
||||||
|
print_info "Please run: sudo bash $0"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Function to detect PatchMon installations
|
||||||
|
detect_installations() {
|
||||||
|
local installations=()
|
||||||
|
|
||||||
|
if [ -d "/opt" ]; then
|
||||||
|
for dir in /opt/*/; do
|
||||||
|
local dirname=$(basename "$dir")
|
||||||
|
# Skip backup directories
|
||||||
|
if [[ "$dirname" =~ \.backup\. ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
# Check if it's a PatchMon installation
|
||||||
|
if [ -f "$dir/backend/package.json" ] && grep -q "patchmon" "$dir/backend/package.json" 2>/dev/null; then
|
||||||
|
installations+=("$dirname")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "${installations[@]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to select installation
|
||||||
|
select_installation() {
|
||||||
|
local installations=($(detect_installations))
|
||||||
|
|
||||||
|
if [ ${#installations[@]} -eq 0 ]; then
|
||||||
|
print_error "No PatchMon installations found in /opt"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$1" ]; then
|
||||||
|
# Use provided instance name
|
||||||
|
if [[ " ${installations[@]} " =~ " $1 " ]]; then
|
||||||
|
echo "$1"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "Instance '$1' not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Found ${#installations[@]} installation(s):"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
local i=1
|
||||||
|
declare -A install_map
|
||||||
|
for install in "${installations[@]}"; do
|
||||||
|
printf "%2d. %s\n" "$i" "$install"
|
||||||
|
install_map[$i]="$install"
|
||||||
|
i=$((i + 1))
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -n -e "${BLUE}Select installation number [1]: ${NC}"
|
||||||
|
read -r selection
|
||||||
|
|
||||||
|
selection=${selection:-1}
|
||||||
|
|
||||||
|
if [[ "$selection" =~ ^[0-9]+$ ]] && [ -n "${install_map[$selection]}" ]; then
|
||||||
|
echo "${install_map[$selection]}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "Invalid selection"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to check and fix failed migrations
|
||||||
|
fix_failed_migrations() {
|
||||||
|
local db_name="$1"
|
||||||
|
local db_user="$2"
|
||||||
|
local db_pass="$3"
|
||||||
|
local db_host="${4:-localhost}"
|
||||||
|
|
||||||
|
print_info "Checking for failed migrations in database..."
|
||||||
|
|
||||||
|
# Query for failed migrations
|
||||||
|
local failed_migrations
|
||||||
|
failed_migrations=$(PGPASSWORD="$db_pass" psql -h "$db_host" -U "$db_user" -d "$db_name" -t -A -c \
|
||||||
|
"SELECT migration_name FROM _prisma_migrations WHERE finished_at IS NULL AND started_at IS NOT NULL;" 2>/dev/null || echo "")
|
||||||
|
|
||||||
|
if [ -z "$failed_migrations" ]; then
|
||||||
|
print_status "No failed migrations found"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_warning "Found failed migration(s):"
|
||||||
|
echo "$failed_migrations" | while read -r migration; do
|
||||||
|
[ -n "$migration" ] && print_warning " - $migration"
|
||||||
|
done
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
print_info "What would you like to do?"
|
||||||
|
echo " 1. Clean and retry (delete failed records and re-run migration)"
|
||||||
|
echo " 2. Mark as completed (if schema changes are already applied)"
|
||||||
|
echo " 3. Show migration details only"
|
||||||
|
echo " 4. Cancel"
|
||||||
|
echo ""
|
||||||
|
echo -n -e "${BLUE}Select option [1]: ${NC}"
|
||||||
|
read -r option
|
||||||
|
|
||||||
|
option=${option:-1}
|
||||||
|
|
||||||
|
case $option in
|
||||||
|
1)
|
||||||
|
print_info "Cleaning failed migrations and preparing for retry..."
|
||||||
|
echo "$failed_migrations" | while read -r migration; do
|
||||||
|
if [ -n "$migration" ]; then
|
||||||
|
print_info "Processing: $migration"
|
||||||
|
|
||||||
|
# Mark as rolled back
|
||||||
|
PGPASSWORD="$db_pass" psql -h "$db_host" -U "$db_user" -d "$db_name" -c \
|
||||||
|
"UPDATE _prisma_migrations SET rolled_back_at = NOW() WHERE migration_name = '$migration' AND finished_at IS NULL;" >/dev/null 2>&1
|
||||||
|
|
||||||
|
# Delete the failed record
|
||||||
|
PGPASSWORD="$db_pass" psql -h "$db_host" -U "$db_user" -d "$db_name" -c \
|
||||||
|
"DELETE FROM _prisma_migrations WHERE migration_name = '$migration' AND finished_at IS NULL;" >/dev/null 2>&1
|
||||||
|
|
||||||
|
print_status "Cleared: $migration"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
print_status "Failed migrations cleared - ready to retry"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
2)
|
||||||
|
print_info "Marking migrations as completed..."
|
||||||
|
echo "$failed_migrations" | while read -r migration; do
|
||||||
|
if [ -n "$migration" ]; then
|
||||||
|
print_info "Marking as complete: $migration"
|
||||||
|
|
||||||
|
PGPASSWORD="$db_pass" psql -h "$db_host" -U "$db_user" -d "$db_name" -c \
|
||||||
|
"UPDATE _prisma_migrations SET finished_at = NOW(), logs = 'Manually resolved by fix-migrations.sh' WHERE migration_name = '$migration' AND finished_at IS NULL;" >/dev/null 2>&1
|
||||||
|
|
||||||
|
print_status "Marked complete: $migration"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
print_status "All migrations marked as completed"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
3)
|
||||||
|
print_info "Migration details:"
|
||||||
|
PGPASSWORD="$db_pass" psql -h "$db_host" -U "$db_user" -d "$db_name" -c \
|
||||||
|
"SELECT migration_name, started_at, finished_at, rolled_back_at, logs FROM _prisma_migrations WHERE finished_at IS NULL AND started_at IS NOT NULL;"
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
4)
|
||||||
|
print_info "Cancelled"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
print_error "Invalid option"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main script
|
||||||
|
main() {
|
||||||
|
echo -e "${BLUE}====================================================${NC}"
|
||||||
|
echo -e "${BLUE} PatchMon Migration Fixer${NC}"
|
||||||
|
echo -e "${BLUE}====================================================${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Select instance
|
||||||
|
instance_name=$(select_installation "$1")
|
||||||
|
instance_dir="/opt/$instance_name"
|
||||||
|
|
||||||
|
print_info "Selected instance: $instance_name"
|
||||||
|
print_info "Directory: $instance_dir"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Load .env to get database credentials
|
||||||
|
if [ ! -f "$instance_dir/backend/.env" ]; then
|
||||||
|
print_error "Cannot find .env file at $instance_dir/backend/.env"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Source .env
|
||||||
|
set -a
|
||||||
|
source "$instance_dir/backend/.env"
|
||||||
|
set +a
|
||||||
|
|
||||||
|
# Parse DATABASE_URL
|
||||||
|
if [ -z "$DATABASE_URL" ]; then
|
||||||
|
print_error "DATABASE_URL not found in .env file"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
DB_USER=$(echo "$DATABASE_URL" | sed -n 's|postgresql://\([^:]*\):.*|\1|p')
|
||||||
|
DB_PASS=$(echo "$DATABASE_URL" | sed -n 's|postgresql://[^:]*:\([^@]*\)@.*|\1|p')
|
||||||
|
DB_HOST=$(echo "$DATABASE_URL" | sed -n 's|.*@\([^:]*\):.*|\1|p')
|
||||||
|
DB_NAME=$(echo "$DATABASE_URL" | sed -n 's|.*/\([^?]*\).*|\1|p')
|
||||||
|
|
||||||
|
print_info "Database: $DB_NAME"
|
||||||
|
print_info "User: $DB_USER"
|
||||||
|
print_info "Host: $DB_HOST"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Test database connection
|
||||||
|
print_info "Testing database connection..."
|
||||||
|
if ! PGPASSWORD="$DB_PASS" psql -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -c "SELECT 1;" >/dev/null 2>&1; then
|
||||||
|
print_error "Cannot connect to database"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
print_status "Database connection successful"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check Prisma migration status
|
||||||
|
print_info "Checking Prisma migration status..."
|
||||||
|
cd "$instance_dir/backend"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e "${YELLOW}=== Prisma Migration Status ===${NC}"
|
||||||
|
npx prisma migrate status 2>&1 || true
|
||||||
|
echo -e "${YELLOW}==============================${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check for failed migrations
|
||||||
|
fix_failed_migrations "$DB_NAME" "$DB_USER" "$DB_PASS" "$DB_HOST"
|
||||||
|
|
||||||
|
# Ask if user wants to run migrations now
|
||||||
|
echo ""
|
||||||
|
echo -n -e "${BLUE}Do you want to run 'npx prisma migrate deploy' now? [y/N]: ${NC}"
|
||||||
|
read -r run_migrate
|
||||||
|
|
||||||
|
if [[ "$run_migrate" =~ ^[Yy] ]]; then
|
||||||
|
print_info "Running migrations..."
|
||||||
|
cd "$instance_dir/backend"
|
||||||
|
|
||||||
|
if npx prisma migrate deploy; then
|
||||||
|
print_status "Migrations completed successfully!"
|
||||||
|
else
|
||||||
|
print_error "Migration failed"
|
||||||
|
print_info "You may need to run this script again or investigate further"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_info "Skipped migration deployment"
|
||||||
|
print_info "Run manually: cd $instance_dir/backend && npx prisma migrate deploy"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
print_status "Done!"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
|
|
||||||
41
tools/fixconnlimit.sh
Normal file
41
tools/fixconnlimit.sh
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Script to update hardcoded connection pool values in prisma.js
|
||||||
|
# Usage: ./update_pool_values.sh [connection_limit] [pool_timeout] [connect_timeout] [idle_timeout] [max_lifetime]
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
FILE="${1:-backend/src/config/prisma.js}"
|
||||||
|
|
||||||
|
# Get values from arguments or use defaults
|
||||||
|
NEW_CONN_LIMIT="${2:-30}"
|
||||||
|
NEW_POOL_TIMEOUT="${3:-20}"
|
||||||
|
NEW_CONNECT_TIMEOUT="${4:-10}"
|
||||||
|
NEW_IDLE_TIMEOUT="${5:-300}"
|
||||||
|
NEW_MAX_LIFETIME="${6:-1800}"
|
||||||
|
|
||||||
|
if [ ! -f "$FILE" ]; then
|
||||||
|
echo "Error: File not found: $FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create backup
|
||||||
|
BACKUP_FILE="${FILE}.backup.$(date +%Y%m%d_%H%M%S)"
|
||||||
|
cp "$FILE" "$BACKUP_FILE"
|
||||||
|
echo "Backup created: $BACKUP_FILE"
|
||||||
|
|
||||||
|
# Replace the hardcoded values
|
||||||
|
sed -i "s|url\.searchParams\.set(\"connection_limit\", \".*\");|url.searchParams.set(\"connection_limit\", \"$NEW_CONN_LIMIT\");|g" "$FILE"
|
||||||
|
sed -i "s|url\.searchParams\.set(\"pool_timeout\", \".*\");|url.searchParams.set(\"pool_timeout\", \"$NEW_POOL_TIMEOUT\");|g" "$FILE"
|
||||||
|
sed -i "s|url\.searchParams\.set(\"connect_timeout\", \".*\");|url.searchParams.set(\"connect_timeout\", \"$NEW_CONNECT_TIMEOUT\");|g" "$FILE"
|
||||||
|
sed -i "s|url\.searchParams\.set(\"idle_timeout\", \".*\");|url.searchParams.set(\"idle_timeout\", \"$NEW_IDLE_TIMEOUT\");|g" "$FILE"
|
||||||
|
sed -i "s|url\.searchParams\.set(\"max_lifetime\", \".*\");|url.searchParams.set(\"max_lifetime\", \"$NEW_MAX_LIFETIME\");|g" "$FILE"
|
||||||
|
|
||||||
|
echo "Updated values:"
|
||||||
|
echo " connection_limit: $NEW_CONN_LIMIT"
|
||||||
|
echo " pool_timeout: $NEW_POOL_TIMEOUT"
|
||||||
|
echo " connect_timeout: $NEW_CONNECT_TIMEOUT"
|
||||||
|
echo " idle_timeout: $NEW_IDLE_TIMEOUT"
|
||||||
|
echo " max_lifetime: $NEW_MAX_LIFETIME"
|
||||||
|
echo ""
|
||||||
|
echo "Changes applied to $FILE"
|
||||||
128
tools/fixconnstrings.sh
Normal file
128
tools/fixconnstrings.sh
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Script to fix HTTP connection limit issue for hosts page
|
||||||
|
# This adds a bulk status endpoint and updates the frontend to use it
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "🔧 Fixing HTTP connection limit issue..."
|
||||||
|
|
||||||
|
# Backup files first
|
||||||
|
echo "📦 Creating backups..."
|
||||||
|
cp backend/src/routes/wsRoutes.js backend/src/routes/wsRoutes.js.bak
|
||||||
|
cp frontend/src/pages/Hosts.jsx frontend/src/pages/Hosts.jsx.bak
|
||||||
|
|
||||||
|
# Add bulk status endpoint to wsRoutes.js
|
||||||
|
echo "➕ Adding bulk status endpoint to backend..."
|
||||||
|
|
||||||
|
cat > /tmp/ws_routes_addition.txt << 'EOF'
|
||||||
|
// Get WebSocket connection status for multiple hosts at once
|
||||||
|
router.get("/status", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiIds } = req.query; // Comma-separated list of api_ids
|
||||||
|
const idArray = apiIds ? apiIds.split(',').filter(id => id.trim()) : [];
|
||||||
|
|
||||||
|
const statusMap = {};
|
||||||
|
idArray.forEach(apiId => {
|
||||||
|
statusMap[apiId] = getConnectionInfo(apiId);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: statusMap,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching bulk WebSocket status:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch WebSocket status",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Find the line number of the first router.get and insert after it
|
||||||
|
LINENUM=$(grep -n "router.get.*status.*apiId" backend/src/routes/wsRoutes.js | head -1 | cut -d: -f1)
|
||||||
|
sed -i "${LINENUM}r /tmp/ws_routes_addition.txt" backend/src/routes/wsRoutes.js
|
||||||
|
|
||||||
|
# Now update the frontend to use the bulk endpoint
|
||||||
|
echo "🔄 Updating frontend to use bulk endpoint..."
|
||||||
|
|
||||||
|
# Create a sed script to replace the fetchInitialStatus function
|
||||||
|
cat > /tmp/hosts_fix.sed << 'EOF'
|
||||||
|
/const fetchInitialStatus = async/,\}/c\
|
||||||
|
const fetchInitialStatus = async () => {\
|
||||||
|
const apiIds = hosts\
|
||||||
|
.filter((host) => host.api_id)\
|
||||||
|
.map(host => host.api_id);\
|
||||||
|
\
|
||||||
|
if (apiIds.length === 0) return;\
|
||||||
|
\
|
||||||
|
try {\
|
||||||
|
const response = await fetch(`/api/v1/ws/status?apiIds=${apiIds.join(',')}`, {\
|
||||||
|
headers: {\
|
||||||
|
Authorization: `Bearer ${token}`,\
|
||||||
|
},\
|
||||||
|
});\
|
||||||
|
if (response.ok) {\
|
||||||
|
const result = await response.json();\
|
||||||
|
setWsStatusMap(result.data);\
|
||||||
|
}\
|
||||||
|
} catch (_error) {\
|
||||||
|
// Silently handle errors\
|
||||||
|
}\
|
||||||
|
};
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Apply the sed script (multiline replacement is tricky with sed, so we'll use a different approach)
|
||||||
|
echo "✨ Using awk for multi-line replacement..."
|
||||||
|
|
||||||
|
# Create a temporary awk script
|
||||||
|
cat > /tmp/update_hosts.awk << 'AWK_EOF'
|
||||||
|
BEGIN { in_function=0; brace_count=0 }
|
||||||
|
/store.fetchInitialStatus/ { printing=1 }
|
||||||
|
/const fetchInitialStatus = async/ {
|
||||||
|
print " // Fetch initial WebSocket status for all hosts";
|
||||||
|
print " const fetchInitialStatus = async () => {";
|
||||||
|
print " const apiIds = hosts";
|
||||||
|
print " .filter((host) => host.api_id)";
|
||||||
|
print " .map(host => host.api_id);";
|
||||||
|
print "";
|
||||||
|
print " if (apiIds.length === 0) return;";
|
||||||
|
print "";
|
||||||
|
print " try {";
|
||||||
|
print " const response = await fetch(`/api/v1/ws/status?apiIds=${apiIds.join(',')}`, {";
|
||||||
|
print " headers: {";
|
||||||
|
print " Authorization: `Bearer ${token}`,";
|
||||||
|
print " },";
|
||||||
|
print " });";
|
||||||
|
print " if (response.ok) {";
|
||||||
|
print " const result = await response.json();";
|
||||||
|
print " setWsStatusMap(result.data);";
|
||||||
|
print " }";
|
||||||
|
print " } catch (_error) {";
|
||||||
|
print " // Silently handle errors";
|
||||||
|
print " }";
|
||||||
|
print " };";
|
||||||
|
skipping=1;
|
||||||
|
next
|
||||||
|
}
|
||||||
|
skipping && /^\t\t\}/ { skipping=0; next }
|
||||||
|
skipping { next }
|
||||||
|
{ print }
|
||||||
|
AWK_EOF
|
||||||
|
|
||||||
|
awk -f /tmp/update_hosts.awk frontend/src/pages/Hosts.jsx.bak > frontend/src/pages/Hosts.jsx
|
||||||
|
|
||||||
|
# Clean up temp files
|
||||||
|
rm /tmp/ws_routes_addition.txt /tmp/hosts_fix.sed /tmp/update_hosts.awk
|
||||||
|
|
||||||
|
echo "✅ Done! Files have been modified."
|
||||||
|
echo ""
|
||||||
|
echo "📝 Changes made:"
|
||||||
|
echo " - backend/src/routes/wsRoutes.js: Added bulk status endpoint"
|
||||||
|
echo " - frontend/src/pages/Hosts.jsx: Updated to use bulk endpoint"
|
||||||
|
echo ""
|
||||||
|
echo "💾 Backups saved as:"
|
||||||
|
echo " - backend/src/routes/wsRoutes.js.bak"
|
||||||
|
echo " - frontend/src/pages/Hosts.jsx.bak"
|
||||||
Reference in New Issue
Block a user