mirror of
https://github.com/9technologygroup/patchmon.net.git
synced 2025-11-13 02:17:05 +00:00
Building Docker compatibilty within the Agent
This commit is contained in:
@@ -16,12 +16,28 @@ function getOptimizedDatabaseUrl() {
|
||||
// Parse the URL
|
||||
const url = new URL(originalUrl);
|
||||
|
||||
// Add connection pooling parameters for multiple instances
|
||||
url.searchParams.set("connection_limit", "5"); // Reduced from default 10
|
||||
url.searchParams.set("pool_timeout", "10"); // 10 seconds
|
||||
url.searchParams.set("connect_timeout", "10"); // 10 seconds
|
||||
url.searchParams.set("idle_timeout", "300"); // 5 minutes
|
||||
url.searchParams.set("max_lifetime", "1800"); // 30 minutes
|
||||
// Add connection pooling parameters - configurable via environment variables
|
||||
const connectionLimit = process.env.DB_CONNECTION_LIMIT || "30";
|
||||
const poolTimeout = process.env.DB_POOL_TIMEOUT || "20";
|
||||
const connectTimeout = process.env.DB_CONNECT_TIMEOUT || "10";
|
||||
const idleTimeout = process.env.DB_IDLE_TIMEOUT || "300";
|
||||
const maxLifetime = process.env.DB_MAX_LIFETIME || "1800";
|
||||
|
||||
url.searchParams.set("connection_limit", connectionLimit);
|
||||
url.searchParams.set("pool_timeout", poolTimeout);
|
||||
url.searchParams.set("connect_timeout", connectTimeout);
|
||||
url.searchParams.set("idle_timeout", idleTimeout);
|
||||
url.searchParams.set("max_lifetime", maxLifetime);
|
||||
|
||||
// Log connection pool settings in development/debug mode
|
||||
if (
|
||||
process.env.ENABLE_LOGGING === "true" ||
|
||||
process.env.LOG_LEVEL === "debug"
|
||||
) {
|
||||
console.log(
|
||||
`[Database Pool] connection_limit=${connectionLimit}, pool_timeout=${poolTimeout}s, connect_timeout=${connectTimeout}s`,
|
||||
);
|
||||
}
|
||||
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
@@ -218,6 +218,30 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
// Trigger manual Docker inventory cleanup
|
||||
router.post(
|
||||
"/trigger/docker-inventory-cleanup",
|
||||
authenticateToken,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
const job = await queueManager.triggerDockerInventoryCleanup();
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
jobId: job.id,
|
||||
message: "Docker inventory cleanup triggered successfully",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error triggering Docker inventory cleanup:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to trigger Docker inventory cleanup",
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Get queue health status
|
||||
router.get("/health", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
@@ -274,6 +298,7 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1),
|
||||
]);
|
||||
|
||||
@@ -283,19 +308,22 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
|
||||
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed +
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed,
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed +
|
||||
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].delayed,
|
||||
|
||||
runningTasks:
|
||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
|
||||
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active +
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active,
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active +
|
||||
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].active,
|
||||
|
||||
failedTasks:
|
||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
|
||||
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed +
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed,
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed +
|
||||
stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].failed,
|
||||
|
||||
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
|
||||
return (
|
||||
@@ -375,10 +403,11 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
||||
stats: stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||
},
|
||||
{
|
||||
name: "Collect Host Statistics",
|
||||
queue: QUEUE_NAMES.AGENT_COMMANDS,
|
||||
description: "Collects package statistics from connected agents only",
|
||||
schedule: `Every ${settings.update_interval} minutes (Agent-driven)`,
|
||||
name: "Docker Inventory Cleanup",
|
||||
queue: QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP,
|
||||
description:
|
||||
"Removes Docker containers and images for non-existent hosts",
|
||||
schedule: "Daily at 4 AM",
|
||||
lastRun: recentJobs[4][0]?.finishedOn
|
||||
? new Date(recentJobs[4][0].finishedOn).toLocaleString()
|
||||
: "Never",
|
||||
@@ -388,6 +417,22 @@ router.get("/overview", authenticateToken, async (_req, res) => {
|
||||
: recentJobs[4][0]
|
||||
? "Success"
|
||||
: "Never run",
|
||||
stats: stats[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP],
|
||||
},
|
||||
{
|
||||
name: "Collect Host Statistics",
|
||||
queue: QUEUE_NAMES.AGENT_COMMANDS,
|
||||
description: "Collects package statistics from connected agents only",
|
||||
schedule: `Every ${settings.update_interval} minutes (Agent-driven)`,
|
||||
lastRun: recentJobs[5][0]?.finishedOn
|
||||
? new Date(recentJobs[5][0].finishedOn).toLocaleString()
|
||||
: "Never",
|
||||
lastRunTimestamp: recentJobs[5][0]?.finishedOn || 0,
|
||||
status: recentJobs[5][0]?.failedReason
|
||||
? "Failed"
|
||||
: recentJobs[5][0]
|
||||
? "Success"
|
||||
: "Never run",
|
||||
stats: stats[QUEUE_NAMES.AGENT_COMMANDS],
|
||||
},
|
||||
].sort((a, b) => {
|
||||
|
||||
@@ -522,7 +522,8 @@ router.get("/updates", authenticateToken, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// POST /api/v1/docker/collect - Collect Docker data from agent
|
||||
// POST /api/v1/docker/collect - Collect Docker data from agent (DEPRECATED - kept for backward compatibility)
|
||||
// New agents should use POST /api/v1/integrations/docker
|
||||
router.post("/collect", async (req, res) => {
|
||||
try {
|
||||
const { apiId, apiKey, containers, images, updates } = req.body;
|
||||
@@ -745,6 +746,322 @@ router.post("/collect", async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
// POST /api/v1/integrations/docker - New integration endpoint for Docker data collection
|
||||
router.post("/../integrations/docker", async (req, res) => {
|
||||
try {
|
||||
const apiId = req.headers["x-api-id"];
|
||||
const apiKey = req.headers["x-api-key"];
|
||||
const {
|
||||
containers,
|
||||
images,
|
||||
updates,
|
||||
daemon_info,
|
||||
hostname,
|
||||
machine_id,
|
||||
agent_version,
|
||||
} = req.body;
|
||||
|
||||
console.log(
|
||||
`[Docker Integration] Received data from ${hostname || machine_id}`,
|
||||
);
|
||||
|
||||
// Validate API credentials
|
||||
const host = await prisma.hosts.findFirst({
|
||||
where: { api_id: apiId, api_key: apiKey },
|
||||
});
|
||||
|
||||
if (!host) {
|
||||
console.warn("[Docker Integration] Invalid API credentials");
|
||||
return res.status(401).json({ error: "Invalid API credentials" });
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Docker Integration] Processing for host: ${host.friendly_name}`,
|
||||
);
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// Helper function to validate and parse dates
|
||||
const parseDate = (dateString) => {
|
||||
if (!dateString) return now;
|
||||
const date = new Date(dateString);
|
||||
return Number.isNaN(date.getTime()) ? now : date;
|
||||
};
|
||||
|
||||
let containersProcessed = 0;
|
||||
let imagesProcessed = 0;
|
||||
let updatesProcessed = 0;
|
||||
|
||||
// Process containers
|
||||
if (containers && Array.isArray(containers)) {
|
||||
console.log(
|
||||
`[Docker Integration] Processing ${containers.length} containers`,
|
||||
);
|
||||
for (const containerData of containers) {
|
||||
const containerId = uuidv4();
|
||||
|
||||
// Find or create image
|
||||
let imageId = null;
|
||||
if (containerData.image_repository && containerData.image_tag) {
|
||||
const image = await prisma.docker_images.upsert({
|
||||
where: {
|
||||
repository_tag_image_id: {
|
||||
repository: containerData.image_repository,
|
||||
tag: containerData.image_tag,
|
||||
image_id: containerData.image_id || "unknown",
|
||||
},
|
||||
},
|
||||
update: {
|
||||
last_checked: now,
|
||||
updated_at: now,
|
||||
},
|
||||
create: {
|
||||
id: uuidv4(),
|
||||
repository: containerData.image_repository,
|
||||
tag: containerData.image_tag,
|
||||
image_id: containerData.image_id || "unknown",
|
||||
source: containerData.image_source || "docker-hub",
|
||||
created_at: parseDate(containerData.created_at),
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
imageId = image.id;
|
||||
}
|
||||
|
||||
// Upsert container
|
||||
await prisma.docker_containers.upsert({
|
||||
where: {
|
||||
host_id_container_id: {
|
||||
host_id: host.id,
|
||||
container_id: containerData.container_id,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
name: containerData.name,
|
||||
image_id: imageId,
|
||||
image_name: containerData.image_name,
|
||||
image_tag: containerData.image_tag || "latest",
|
||||
status: containerData.status,
|
||||
state: containerData.state || containerData.status,
|
||||
ports: containerData.ports || null,
|
||||
started_at: containerData.started_at
|
||||
? parseDate(containerData.started_at)
|
||||
: null,
|
||||
updated_at: now,
|
||||
last_checked: now,
|
||||
},
|
||||
create: {
|
||||
id: containerId,
|
||||
host_id: host.id,
|
||||
container_id: containerData.container_id,
|
||||
name: containerData.name,
|
||||
image_id: imageId,
|
||||
image_name: containerData.image_name,
|
||||
image_tag: containerData.image_tag || "latest",
|
||||
status: containerData.status,
|
||||
state: containerData.state || containerData.status,
|
||||
ports: containerData.ports || null,
|
||||
created_at: parseDate(containerData.created_at),
|
||||
started_at: containerData.started_at
|
||||
? parseDate(containerData.started_at)
|
||||
: null,
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
containersProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Process standalone images
|
||||
if (images && Array.isArray(images)) {
|
||||
console.log(`[Docker Integration] Processing ${images.length} images`);
|
||||
for (const imageData of images) {
|
||||
await prisma.docker_images.upsert({
|
||||
where: {
|
||||
repository_tag_image_id: {
|
||||
repository: imageData.repository,
|
||||
tag: imageData.tag,
|
||||
image_id: imageData.image_id,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
size_bytes: imageData.size_bytes
|
||||
? BigInt(imageData.size_bytes)
|
||||
: null,
|
||||
digest: imageData.digest || null,
|
||||
last_checked: now,
|
||||
updated_at: now,
|
||||
},
|
||||
create: {
|
||||
id: uuidv4(),
|
||||
repository: imageData.repository,
|
||||
tag: imageData.tag,
|
||||
image_id: imageData.image_id,
|
||||
digest: imageData.digest,
|
||||
size_bytes: imageData.size_bytes
|
||||
? BigInt(imageData.size_bytes)
|
||||
: null,
|
||||
source: imageData.source || "docker-hub",
|
||||
created_at: parseDate(imageData.created_at),
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
imagesProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Process updates
|
||||
if (updates && Array.isArray(updates)) {
|
||||
console.log(`[Docker Integration] Processing ${updates.length} updates`);
|
||||
for (const updateData of updates) {
|
||||
// Find the image by repository and image_id
|
||||
const image = await prisma.docker_images.findFirst({
|
||||
where: {
|
||||
repository: updateData.repository,
|
||||
tag: updateData.current_tag,
|
||||
image_id: updateData.image_id,
|
||||
},
|
||||
});
|
||||
|
||||
if (image) {
|
||||
// Store digest info in changelog_url field as JSON
|
||||
const digestInfo = JSON.stringify({
|
||||
method: "digest_comparison",
|
||||
current_digest: updateData.current_digest,
|
||||
available_digest: updateData.available_digest,
|
||||
});
|
||||
|
||||
// Upsert the update record
|
||||
await prisma.docker_image_updates.upsert({
|
||||
where: {
|
||||
image_id_available_tag: {
|
||||
image_id: image.id,
|
||||
available_tag: updateData.available_tag,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
updated_at: now,
|
||||
changelog_url: digestInfo,
|
||||
severity: "digest_changed",
|
||||
},
|
||||
create: {
|
||||
id: uuidv4(),
|
||||
image_id: image.id,
|
||||
current_tag: updateData.current_tag,
|
||||
available_tag: updateData.available_tag,
|
||||
severity: "digest_changed",
|
||||
changelog_url: digestInfo,
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
updatesProcessed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Docker Integration] Successfully processed: ${containersProcessed} containers, ${imagesProcessed} images, ${updatesProcessed} updates`,
|
||||
);
|
||||
|
||||
res.json({
|
||||
message: "Docker data collected successfully",
|
||||
containers_received: containersProcessed,
|
||||
images_received: imagesProcessed,
|
||||
updates_found: updatesProcessed,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Docker Integration] Error collecting Docker data:", error);
|
||||
console.error("[Docker Integration] Error stack:", error.stack);
|
||||
res.status(500).json({
|
||||
error: "Failed to collect Docker data",
|
||||
message: error.message,
|
||||
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE /api/v1/docker/containers/:id - Delete a container
|
||||
router.delete("/containers/:id", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
// Check if container exists
|
||||
const container = await prisma.docker_containers.findUnique({
|
||||
where: { id },
|
||||
});
|
||||
|
||||
if (!container) {
|
||||
return res.status(404).json({ error: "Container not found" });
|
||||
}
|
||||
|
||||
// Delete the container
|
||||
await prisma.docker_containers.delete({
|
||||
where: { id },
|
||||
});
|
||||
|
||||
console.log(`🗑️ Deleted container: ${container.name} (${id})`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Container ${container.name} deleted successfully`,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error deleting container:", error);
|
||||
res.status(500).json({ error: "Failed to delete container" });
|
||||
}
|
||||
});
|
||||
|
||||
// DELETE /api/v1/docker/images/:id - Delete an image
|
||||
router.delete("/images/:id", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
// Check if image exists
|
||||
const image = await prisma.docker_images.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
docker_containers: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!image) {
|
||||
return res.status(404).json({ error: "Image not found" });
|
||||
}
|
||||
|
||||
// Check if image is in use by containers
|
||||
if (image._count.docker_containers > 0) {
|
||||
return res.status(400).json({
|
||||
error: `Cannot delete image: ${image._count.docker_containers} container(s) are using this image`,
|
||||
containersCount: image._count.docker_containers,
|
||||
});
|
||||
}
|
||||
|
||||
// Delete image updates first
|
||||
await prisma.docker_image_updates.deleteMany({
|
||||
where: { image_id: id },
|
||||
});
|
||||
|
||||
// Delete the image
|
||||
await prisma.docker_images.delete({
|
||||
where: { id },
|
||||
});
|
||||
|
||||
console.log(`🗑️ Deleted image: ${image.repository}:${image.tag} (${id})`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `Image ${image.repository}:${image.tag} deleted successfully`,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error deleting image:", error);
|
||||
res.status(500).json({ error: "Failed to delete image" });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/v1/docker/agent - Serve the Docker agent installation script
|
||||
router.get("/agent", async (_req, res) => {
|
||||
try {
|
||||
|
||||
@@ -356,6 +356,29 @@ router.post(
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Host creation error:", error);
|
||||
|
||||
// Check if error is related to connection pool exhaustion
|
||||
if (
|
||||
error.message &&
|
||||
(error.message.includes("connection pool") ||
|
||||
error.message.includes("Timed out fetching") ||
|
||||
error.message.includes("pool timeout"))
|
||||
) {
|
||||
console.error("⚠️ DATABASE CONNECTION POOL EXHAUSTED!");
|
||||
console.error(
|
||||
"⚠️ Current limit: DB_CONNECTION_LIMIT=" +
|
||||
(process.env.DB_CONNECTION_LIMIT || "30"),
|
||||
);
|
||||
console.error(
|
||||
"⚠️ Pool timeout: DB_POOL_TIMEOUT=" +
|
||||
(process.env.DB_POOL_TIMEOUT || "20") +
|
||||
"s",
|
||||
);
|
||||
console.error(
|
||||
"⚠️ Suggestion: Increase DB_CONNECTION_LIMIT in your .env file",
|
||||
);
|
||||
}
|
||||
|
||||
res.status(500).json({ error: "Failed to create host" });
|
||||
}
|
||||
},
|
||||
@@ -786,19 +809,41 @@ router.get("/info", validateApiCredentials, async (req, res) => {
|
||||
// Ping endpoint for health checks (now uses API credentials)
|
||||
router.post("/ping", validateApiCredentials, async (req, res) => {
|
||||
try {
|
||||
// Update last update timestamp
|
||||
const now = new Date();
|
||||
const lastUpdate = req.hostRecord.last_update;
|
||||
|
||||
// Detect if this is an agent startup (first ping or after long absence)
|
||||
const timeSinceLastUpdate = lastUpdate ? now - lastUpdate : null;
|
||||
const isStartup =
|
||||
!timeSinceLastUpdate || timeSinceLastUpdate > 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
// Log agent startup
|
||||
if (isStartup) {
|
||||
console.log(
|
||||
`🚀 Agent startup detected: ${req.hostRecord.friendly_name} (${req.hostRecord.hostname || req.hostRecord.api_id})`,
|
||||
);
|
||||
|
||||
// Check if status was previously offline
|
||||
if (req.hostRecord.status === "offline") {
|
||||
console.log(`✅ Agent back online: ${req.hostRecord.friendly_name}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Update last update timestamp and set status to active
|
||||
await prisma.hosts.update({
|
||||
where: { id: req.hostRecord.id },
|
||||
data: {
|
||||
last_update: new Date(),
|
||||
updated_at: new Date(),
|
||||
last_update: now,
|
||||
updated_at: now,
|
||||
status: "active",
|
||||
},
|
||||
});
|
||||
|
||||
const response = {
|
||||
message: "Ping successful",
|
||||
timestamp: new Date().toISOString(),
|
||||
timestamp: now.toISOString(),
|
||||
friendlyName: req.hostRecord.friendly_name,
|
||||
agentStartup: isStartup,
|
||||
};
|
||||
|
||||
// Check if this is a crontab update trigger
|
||||
|
||||
242
backend/src/routes/integrationRoutes.js
Normal file
242
backend/src/routes/integrationRoutes.js
Normal file
@@ -0,0 +1,242 @@
|
||||
const express = require("express");
|
||||
const { getPrismaClient } = require("../config/prisma");
|
||||
const { v4: uuidv4 } = require("uuid");
|
||||
|
||||
const prisma = getPrismaClient();
|
||||
const router = express.Router();
|
||||
|
||||
// POST /api/v1/integrations/docker - Docker data collection endpoint
|
||||
router.post("/docker", async (req, res) => {
|
||||
try {
|
||||
const apiId = req.headers["x-api-id"];
|
||||
const apiKey = req.headers["x-api-key"];
|
||||
const {
|
||||
containers,
|
||||
images,
|
||||
updates,
|
||||
daemon_info,
|
||||
hostname,
|
||||
machine_id,
|
||||
agent_version,
|
||||
} = req.body;
|
||||
|
||||
console.log(
|
||||
`[Docker Integration] Received data from ${hostname || machine_id}`,
|
||||
);
|
||||
|
||||
// Validate API credentials
|
||||
const host = await prisma.hosts.findFirst({
|
||||
where: { api_id: apiId, api_key: apiKey },
|
||||
});
|
||||
|
||||
if (!host) {
|
||||
console.warn("[Docker Integration] Invalid API credentials");
|
||||
return res.status(401).json({ error: "Invalid API credentials" });
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Docker Integration] Processing for host: ${host.friendly_name}`,
|
||||
);
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// Helper function to validate and parse dates
|
||||
const parseDate = (dateString) => {
|
||||
if (!dateString) return now;
|
||||
const date = new Date(dateString);
|
||||
return Number.isNaN(date.getTime()) ? now : date;
|
||||
};
|
||||
|
||||
let containersProcessed = 0;
|
||||
let imagesProcessed = 0;
|
||||
let updatesProcessed = 0;
|
||||
|
||||
// Process containers
|
||||
if (containers && Array.isArray(containers)) {
|
||||
console.log(
|
||||
`[Docker Integration] Processing ${containers.length} containers`,
|
||||
);
|
||||
for (const containerData of containers) {
|
||||
const containerId = uuidv4();
|
||||
|
||||
// Find or create image
|
||||
let imageId = null;
|
||||
if (containerData.image_repository && containerData.image_tag) {
|
||||
const image = await prisma.docker_images.upsert({
|
||||
where: {
|
||||
repository_tag_image_id: {
|
||||
repository: containerData.image_repository,
|
||||
tag: containerData.image_tag,
|
||||
image_id: containerData.image_id || "unknown",
|
||||
},
|
||||
},
|
||||
update: {
|
||||
last_checked: now,
|
||||
updated_at: now,
|
||||
},
|
||||
create: {
|
||||
id: uuidv4(),
|
||||
repository: containerData.image_repository,
|
||||
tag: containerData.image_tag,
|
||||
image_id: containerData.image_id || "unknown",
|
||||
source: containerData.image_source || "docker-hub",
|
||||
created_at: parseDate(containerData.created_at),
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
imageId = image.id;
|
||||
}
|
||||
|
||||
// Upsert container
|
||||
await prisma.docker_containers.upsert({
|
||||
where: {
|
||||
host_id_container_id: {
|
||||
host_id: host.id,
|
||||
container_id: containerData.container_id,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
name: containerData.name,
|
||||
image_id: imageId,
|
||||
image_name: containerData.image_name,
|
||||
image_tag: containerData.image_tag || "latest",
|
||||
status: containerData.status,
|
||||
state: containerData.state || containerData.status,
|
||||
ports: containerData.ports || null,
|
||||
started_at: containerData.started_at
|
||||
? parseDate(containerData.started_at)
|
||||
: null,
|
||||
updated_at: now,
|
||||
last_checked: now,
|
||||
},
|
||||
create: {
|
||||
id: containerId,
|
||||
host_id: host.id,
|
||||
container_id: containerData.container_id,
|
||||
name: containerData.name,
|
||||
image_id: imageId,
|
||||
image_name: containerData.image_name,
|
||||
image_tag: containerData.image_tag || "latest",
|
||||
status: containerData.status,
|
||||
state: containerData.state || containerData.status,
|
||||
ports: containerData.ports || null,
|
||||
created_at: parseDate(containerData.created_at),
|
||||
started_at: containerData.started_at
|
||||
? parseDate(containerData.started_at)
|
||||
: null,
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
containersProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Process standalone images
|
||||
if (images && Array.isArray(images)) {
|
||||
console.log(`[Docker Integration] Processing ${images.length} images`);
|
||||
for (const imageData of images) {
|
||||
await prisma.docker_images.upsert({
|
||||
where: {
|
||||
repository_tag_image_id: {
|
||||
repository: imageData.repository,
|
||||
tag: imageData.tag,
|
||||
image_id: imageData.image_id,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
size_bytes: imageData.size_bytes
|
||||
? BigInt(imageData.size_bytes)
|
||||
: null,
|
||||
digest: imageData.digest || null,
|
||||
last_checked: now,
|
||||
updated_at: now,
|
||||
},
|
||||
create: {
|
||||
id: uuidv4(),
|
||||
repository: imageData.repository,
|
||||
tag: imageData.tag,
|
||||
image_id: imageData.image_id,
|
||||
digest: imageData.digest,
|
||||
size_bytes: imageData.size_bytes
|
||||
? BigInt(imageData.size_bytes)
|
||||
: null,
|
||||
source: imageData.source || "docker-hub",
|
||||
created_at: parseDate(imageData.created_at),
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
imagesProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Process updates
|
||||
if (updates && Array.isArray(updates)) {
|
||||
console.log(`[Docker Integration] Processing ${updates.length} updates`);
|
||||
for (const updateData of updates) {
|
||||
// Find the image by repository and image_id
|
||||
const image = await prisma.docker_images.findFirst({
|
||||
where: {
|
||||
repository: updateData.repository,
|
||||
tag: updateData.current_tag,
|
||||
image_id: updateData.image_id,
|
||||
},
|
||||
});
|
||||
|
||||
if (image) {
|
||||
// Store digest info in changelog_url field as JSON
|
||||
const digestInfo = JSON.stringify({
|
||||
method: "digest_comparison",
|
||||
current_digest: updateData.current_digest,
|
||||
available_digest: updateData.available_digest,
|
||||
});
|
||||
|
||||
// Upsert the update record
|
||||
await prisma.docker_image_updates.upsert({
|
||||
where: {
|
||||
image_id_available_tag: {
|
||||
image_id: image.id,
|
||||
available_tag: updateData.available_tag,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
updated_at: now,
|
||||
changelog_url: digestInfo,
|
||||
severity: "digest_changed",
|
||||
},
|
||||
create: {
|
||||
id: uuidv4(),
|
||||
image_id: image.id,
|
||||
current_tag: updateData.current_tag,
|
||||
available_tag: updateData.available_tag,
|
||||
severity: "digest_changed",
|
||||
changelog_url: digestInfo,
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
updatesProcessed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Docker Integration] Successfully processed: ${containersProcessed} containers, ${imagesProcessed} images, ${updatesProcessed} updates`,
|
||||
);
|
||||
|
||||
res.json({
|
||||
message: "Docker data collected successfully",
|
||||
containers_received: containersProcessed,
|
||||
images_received: imagesProcessed,
|
||||
updates_found: updatesProcessed,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[Docker Integration] Error collecting Docker data:", error);
|
||||
console.error("[Docker Integration] Error stack:", error.stack);
|
||||
res.status(500).json({
|
||||
error: "Failed to collect Docker data",
|
||||
message: error.message,
|
||||
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -14,13 +14,16 @@ const router = express.Router();
|
||||
function getCurrentVersion() {
|
||||
try {
|
||||
const packageJson = require("../../package.json");
|
||||
return packageJson?.version || "1.3.0";
|
||||
if (!packageJson?.version) {
|
||||
throw new Error("Version not found in package.json");
|
||||
}
|
||||
return packageJson.version;
|
||||
} catch (packageError) {
|
||||
console.warn(
|
||||
"Could not read version from package.json, using fallback:",
|
||||
console.error(
|
||||
"Could not read version from package.json:",
|
||||
packageError.message,
|
||||
);
|
||||
return "1.3.0";
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -66,6 +66,7 @@ const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
|
||||
const gethomepageRoutes = require("./routes/gethomepageRoutes");
|
||||
const automationRoutes = require("./routes/automationRoutes");
|
||||
const dockerRoutes = require("./routes/dockerRoutes");
|
||||
const integrationRoutes = require("./routes/integrationRoutes");
|
||||
const wsRoutes = require("./routes/wsRoutes");
|
||||
const agentVersionRoutes = require("./routes/agentVersionRoutes");
|
||||
const { initSettings } = require("./services/settingsService");
|
||||
@@ -471,6 +472,7 @@ app.use(
|
||||
app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes);
|
||||
app.use(`/api/${apiVersion}/automation`, automationRoutes);
|
||||
app.use(`/api/${apiVersion}/docker`, dockerRoutes);
|
||||
app.use(`/api/${apiVersion}/integrations`, integrationRoutes);
|
||||
app.use(`/api/${apiVersion}/ws`, wsRoutes);
|
||||
app.use(`/api/${apiVersion}/agent`, agentVersionRoutes);
|
||||
|
||||
|
||||
@@ -428,26 +428,29 @@ class AgentVersionService {
|
||||
async getVersionInfo() {
|
||||
let hasUpdate = false;
|
||||
let updateStatus = "unknown";
|
||||
let effectiveLatestVersion = this.currentVersion; // Always use local version if available
|
||||
|
||||
// If we have a local version, use it as the latest regardless of GitHub
|
||||
if (this.currentVersion) {
|
||||
effectiveLatestVersion = this.currentVersion;
|
||||
// Latest version should ALWAYS come from GitHub, not from local binaries
|
||||
// currentVersion = what's installed locally
|
||||
// latestVersion = what's available on GitHub
|
||||
if (this.latestVersion) {
|
||||
console.log(`📦 Latest version from GitHub: ${this.latestVersion}`);
|
||||
} else {
|
||||
console.log(
|
||||
`🔄 Using local agent version ${this.currentVersion} as latest`,
|
||||
);
|
||||
} else if (this.latestVersion) {
|
||||
// Fallback to GitHub version only if no local version
|
||||
effectiveLatestVersion = this.latestVersion;
|
||||
console.log(
|
||||
`🔄 No local version found, using GitHub version ${this.latestVersion}`,
|
||||
`⚠️ No GitHub release version available (API may be unavailable)`,
|
||||
);
|
||||
}
|
||||
|
||||
if (this.currentVersion && effectiveLatestVersion) {
|
||||
if (this.currentVersion) {
|
||||
console.log(`💾 Current local agent version: ${this.currentVersion}`);
|
||||
} else {
|
||||
console.log(`⚠️ No local agent binary found`);
|
||||
}
|
||||
|
||||
// Determine update status by comparing current vs latest (from GitHub)
|
||||
if (this.currentVersion && this.latestVersion) {
|
||||
const comparison = compareVersions(
|
||||
this.currentVersion,
|
||||
effectiveLatestVersion,
|
||||
this.latestVersion,
|
||||
);
|
||||
if (comparison < 0) {
|
||||
hasUpdate = true;
|
||||
@@ -459,25 +462,25 @@ class AgentVersionService {
|
||||
hasUpdate = false;
|
||||
updateStatus = "up-to-date";
|
||||
}
|
||||
} else if (effectiveLatestVersion && !this.currentVersion) {
|
||||
} else if (this.latestVersion && !this.currentVersion) {
|
||||
hasUpdate = true;
|
||||
updateStatus = "no-agent";
|
||||
} else if (this.currentVersion && !effectiveLatestVersion) {
|
||||
} else if (this.currentVersion && !this.latestVersion) {
|
||||
// We have a current version but no latest version (GitHub API unavailable)
|
||||
hasUpdate = false;
|
||||
updateStatus = "github-unavailable";
|
||||
} else if (!this.currentVersion && !effectiveLatestVersion) {
|
||||
} else if (!this.currentVersion && !this.latestVersion) {
|
||||
updateStatus = "no-data";
|
||||
}
|
||||
|
||||
return {
|
||||
currentVersion: this.currentVersion,
|
||||
latestVersion: effectiveLatestVersion,
|
||||
latestVersion: this.latestVersion, // Always return GitHub version, not local
|
||||
hasUpdate: hasUpdate,
|
||||
updateStatus: updateStatus,
|
||||
lastChecked: this.lastChecked,
|
||||
supportedArchitectures: this.supportedArchitectures,
|
||||
status: effectiveLatestVersion ? "ready" : "no-releases",
|
||||
status: this.latestVersion ? "ready" : "no-releases",
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -99,8 +99,22 @@ function init(server, prismaClient) {
|
||||
// Notify subscribers of connection
|
||||
notifyConnectionChange(apiId, true);
|
||||
|
||||
ws.on("message", () => {
|
||||
// Currently we don't need to handle agent->server messages
|
||||
ws.on("message", async (data) => {
|
||||
// Handle incoming messages from agent (e.g., Docker status updates)
|
||||
try {
|
||||
const message = JSON.parse(data.toString());
|
||||
|
||||
if (message.type === "docker_status") {
|
||||
// Handle Docker container status events
|
||||
await handleDockerStatusEvent(apiId, message);
|
||||
}
|
||||
// Add more message types here as needed
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`[agent-ws] error parsing message from ${apiId}:`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on("close", () => {
|
||||
@@ -255,6 +269,62 @@ function subscribeToConnectionChanges(apiId, callback) {
|
||||
};
|
||||
}
|
||||
|
||||
// Handle Docker container status events from agent
|
||||
async function handleDockerStatusEvent(apiId, message) {
|
||||
try {
|
||||
const { event, container_id, name, status, timestamp } = message;
|
||||
|
||||
console.log(
|
||||
`[Docker Event] ${apiId}: Container ${name} (${container_id}) - ${status}`,
|
||||
);
|
||||
|
||||
// Find the host
|
||||
const host = await prisma.hosts.findUnique({
|
||||
where: { api_id: apiId },
|
||||
});
|
||||
|
||||
if (!host) {
|
||||
console.error(`[Docker Event] Host not found for api_id: ${apiId}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Update container status in database
|
||||
const container = await prisma.docker_containers.findUnique({
|
||||
where: {
|
||||
host_id_container_id: {
|
||||
host_id: host.id,
|
||||
container_id: container_id,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (container) {
|
||||
await prisma.docker_containers.update({
|
||||
where: { id: container.id },
|
||||
data: {
|
||||
status: status,
|
||||
state: status,
|
||||
updated_at: new Date(timestamp || Date.now()),
|
||||
last_checked: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
console.log(
|
||||
`[Docker Event] Updated container ${name} status to ${status}`,
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
`[Docker Event] Container ${name} not found in database (may be new)`,
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: Broadcast to connected dashboard clients via SSE or WebSocket
|
||||
// This would notify the frontend UI in real-time
|
||||
} catch (error) {
|
||||
console.error(`[Docker Event] Error handling Docker status event:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
init,
|
||||
broadcastSettingsUpdate,
|
||||
|
||||
164
backend/src/services/automation/dockerInventoryCleanup.js
Normal file
164
backend/src/services/automation/dockerInventoryCleanup.js
Normal file
@@ -0,0 +1,164 @@
|
||||
const { prisma } = require("./shared/prisma");
|
||||
|
||||
/**
|
||||
* Docker Inventory Cleanup Automation
|
||||
* Removes Docker containers and images for hosts that no longer exist
|
||||
*/
|
||||
class DockerInventoryCleanup {
|
||||
constructor(queueManager) {
|
||||
this.queueManager = queueManager;
|
||||
this.queueName = "docker-inventory-cleanup";
|
||||
}
|
||||
|
||||
/**
|
||||
* Process Docker inventory cleanup job
|
||||
*/
|
||||
async process(_job) {
|
||||
const startTime = Date.now();
|
||||
console.log("🧹 Starting Docker inventory cleanup...");
|
||||
|
||||
try {
|
||||
// Step 1: Find and delete orphaned containers (containers for non-existent hosts)
|
||||
const orphanedContainers = await prisma.docker_containers.findMany({
|
||||
where: {
|
||||
host_id: {
|
||||
// Find containers where the host doesn't exist
|
||||
notIn: await prisma.hosts
|
||||
.findMany({ select: { id: true } })
|
||||
.then((hosts) => hosts.map((h) => h.id)),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let deletedContainersCount = 0;
|
||||
const deletedContainers = [];
|
||||
|
||||
for (const container of orphanedContainers) {
|
||||
try {
|
||||
await prisma.docker_containers.delete({
|
||||
where: { id: container.id },
|
||||
});
|
||||
deletedContainersCount++;
|
||||
deletedContainers.push({
|
||||
id: container.id,
|
||||
container_id: container.container_id,
|
||||
name: container.name,
|
||||
image_name: container.image_name,
|
||||
host_id: container.host_id,
|
||||
});
|
||||
console.log(
|
||||
`🗑️ Deleted orphaned container: ${container.name} (host_id: ${container.host_id})`,
|
||||
);
|
||||
} catch (deleteError) {
|
||||
console.error(
|
||||
`❌ Failed to delete container ${container.id}:`,
|
||||
deleteError.message,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: Find and delete orphaned images (images with no containers using them)
|
||||
const orphanedImages = await prisma.docker_images.findMany({
|
||||
where: {
|
||||
docker_containers: {
|
||||
none: {},
|
||||
},
|
||||
},
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
docker_containers: true,
|
||||
docker_image_updates: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let deletedImagesCount = 0;
|
||||
const deletedImages = [];
|
||||
|
||||
for (const image of orphanedImages) {
|
||||
try {
|
||||
// First delete any image updates associated with this image
|
||||
if (image._count.docker_image_updates > 0) {
|
||||
await prisma.docker_image_updates.deleteMany({
|
||||
where: { image_id: image.id },
|
||||
});
|
||||
}
|
||||
|
||||
// Then delete the image itself
|
||||
await prisma.docker_images.delete({
|
||||
where: { id: image.id },
|
||||
});
|
||||
deletedImagesCount++;
|
||||
deletedImages.push({
|
||||
id: image.id,
|
||||
repository: image.repository,
|
||||
tag: image.tag,
|
||||
image_id: image.image_id,
|
||||
});
|
||||
console.log(
|
||||
`🗑️ Deleted orphaned image: ${image.repository}:${image.tag}`,
|
||||
);
|
||||
} catch (deleteError) {
|
||||
console.error(
|
||||
`❌ Failed to delete image ${image.id}:`,
|
||||
deleteError.message,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.log(
|
||||
`✅ Docker inventory cleanup completed in ${executionTime}ms - Deleted ${deletedContainersCount} containers and ${deletedImagesCount} images`,
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
deletedContainersCount,
|
||||
deletedImagesCount,
|
||||
deletedContainers,
|
||||
deletedImages,
|
||||
executionTime,
|
||||
};
|
||||
} catch (error) {
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.error(
|
||||
`❌ Docker inventory cleanup failed after ${executionTime}ms:`,
|
||||
error.message,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule recurring Docker inventory cleanup (daily at 4 AM)
|
||||
*/
|
||||
async schedule() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"docker-inventory-cleanup",
|
||||
{},
|
||||
{
|
||||
repeat: { cron: "0 4 * * *" }, // Daily at 4 AM
|
||||
jobId: "docker-inventory-cleanup-recurring",
|
||||
},
|
||||
);
|
||||
console.log("✅ Docker inventory cleanup scheduled");
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual Docker inventory cleanup
|
||||
*/
|
||||
async triggerManual() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"docker-inventory-cleanup-manual",
|
||||
{},
|
||||
{ priority: 1 },
|
||||
);
|
||||
console.log("✅ Manual Docker inventory cleanup triggered");
|
||||
return job;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DockerInventoryCleanup;
|
||||
@@ -52,17 +52,24 @@ class GitHubUpdateCheck {
|
||||
}
|
||||
|
||||
// Read version from package.json
|
||||
let currentVersion = "1.3.0"; // fallback
|
||||
let currentVersion = null;
|
||||
try {
|
||||
const packageJson = require("../../../package.json");
|
||||
if (packageJson?.version) {
|
||||
currentVersion = packageJson.version;
|
||||
}
|
||||
} catch (packageError) {
|
||||
console.warn(
|
||||
console.error(
|
||||
"Could not read version from package.json:",
|
||||
packageError.message,
|
||||
);
|
||||
throw new Error(
|
||||
"Could not determine current version from package.json",
|
||||
);
|
||||
}
|
||||
|
||||
if (!currentVersion) {
|
||||
throw new Error("Version not found in package.json");
|
||||
}
|
||||
|
||||
const isUpdateAvailable =
|
||||
|
||||
@@ -8,6 +8,7 @@ const GitHubUpdateCheck = require("./githubUpdateCheck");
|
||||
const SessionCleanup = require("./sessionCleanup");
|
||||
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
|
||||
const OrphanedPackageCleanup = require("./orphanedPackageCleanup");
|
||||
const DockerInventoryCleanup = require("./dockerInventoryCleanup");
|
||||
|
||||
// Queue names
|
||||
const QUEUE_NAMES = {
|
||||
@@ -15,6 +16,7 @@ const QUEUE_NAMES = {
|
||||
SESSION_CLEANUP: "session-cleanup",
|
||||
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
|
||||
ORPHANED_PACKAGE_CLEANUP: "orphaned-package-cleanup",
|
||||
DOCKER_INVENTORY_CLEANUP: "docker-inventory-cleanup",
|
||||
AGENT_COMMANDS: "agent-commands",
|
||||
};
|
||||
|
||||
@@ -91,6 +93,8 @@ class QueueManager {
|
||||
new OrphanedRepoCleanup(this);
|
||||
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] =
|
||||
new OrphanedPackageCleanup(this);
|
||||
this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP] =
|
||||
new DockerInventoryCleanup(this);
|
||||
|
||||
console.log("✅ All automation classes initialized");
|
||||
}
|
||||
@@ -149,6 +153,15 @@ class QueueManager {
|
||||
workerOptions,
|
||||
);
|
||||
|
||||
// Docker Inventory Cleanup Worker
|
||||
this.workers[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP] = new Worker(
|
||||
QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP,
|
||||
this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].process.bind(
|
||||
this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP],
|
||||
),
|
||||
workerOptions,
|
||||
);
|
||||
|
||||
// Agent Commands Worker
|
||||
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
|
||||
QUEUE_NAMES.AGENT_COMMANDS,
|
||||
@@ -205,6 +218,7 @@ class QueueManager {
|
||||
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
|
||||
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
|
||||
await this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].schedule();
|
||||
await this.automations[QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP].schedule();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -228,6 +242,12 @@ class QueueManager {
|
||||
].triggerManual();
|
||||
}
|
||||
|
||||
async triggerDockerInventoryCleanup() {
|
||||
return this.automations[
|
||||
QUEUE_NAMES.DOCKER_INVENTORY_CLEANUP
|
||||
].triggerManual();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue statistics
|
||||
*/
|
||||
|
||||
@@ -33,7 +33,8 @@ async function checkPublicRepo(owner, repo) {
|
||||
try {
|
||||
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
||||
|
||||
let currentVersion = "1.3.0"; // fallback
|
||||
// Get current version for User-Agent (or use generic if unavailable)
|
||||
let currentVersion = "unknown";
|
||||
try {
|
||||
const packageJson = require("../../../package.json");
|
||||
if (packageJson?.version) {
|
||||
@@ -41,7 +42,7 @@ async function checkPublicRepo(owner, repo) {
|
||||
}
|
||||
} catch (packageError) {
|
||||
console.warn(
|
||||
"Could not read version from package.json for User-Agent, using fallback:",
|
||||
"Could not read version from package.json for User-Agent:",
|
||||
packageError.message,
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user