Building the start of Automation page and implemented BullMQ module

This commit is contained in:
Muhammad Ibrahim
2025-10-10 12:24:23 +01:00
parent cfc91243eb
commit 0ad1a96871
16 changed files with 2154 additions and 749 deletions

View File

@@ -0,0 +1,67 @@
/**
* Echo Hello Automation
* Simple test automation task
*/
class EchoHello {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "echo-hello";
}
/**
* Process echo hello job
*/
async process(job) {
const startTime = Date.now();
console.log("👋 Starting echo hello task...");
try {
// Simple echo task
const message = job.data.message || "Hello from BullMQ!";
const timestamp = new Date().toISOString();
// Simulate some work
await new Promise((resolve) => setTimeout(resolve, 100));
const executionTime = Date.now() - startTime;
console.log(`✅ Echo hello completed in ${executionTime}ms: ${message}`);
return {
success: true,
message,
timestamp,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ Echo hello failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Echo hello is manual only - no scheduling
*/
async schedule() {
console.log(" Echo hello is manual only - no scheduling needed");
return null;
}
/**
* Trigger manual echo hello
*/
async triggerManual(message = "Hello from BullMQ!") {
const job = await this.queueManager.queues[this.queueName].add(
"echo-hello-manual",
{ message },
{ priority: 1 },
);
console.log("✅ Manual echo hello triggered");
return job;
}
}
module.exports = EchoHello;

View File

@@ -0,0 +1,153 @@
const { prisma } = require("./shared/prisma");
const { compareVersions, checkPublicRepo } = require("./shared/utils");
/**
* GitHub Update Check Automation
* Checks for new releases on GitHub using HTTPS API
*/
class GitHubUpdateCheck {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "github-update-check";
}
/**
* Process GitHub update check job
*/
async process(job) {
const startTime = Date.now();
console.log("🔍 Starting GitHub update check...");
try {
// Get settings
const settings = await prisma.settings.findFirst();
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
let owner, repo;
// Parse GitHub repository URL (supports both HTTPS and SSH formats)
if (repoUrl.includes("git@github.com:")) {
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
if (match) {
[, owner, repo] = match;
}
} else if (repoUrl.includes("github.com/")) {
const match = repoUrl.match(
/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/,
);
if (match) {
[, owner, repo] = match;
}
}
if (!owner || !repo) {
throw new Error("Could not parse GitHub repository URL");
}
// Always use HTTPS GitHub API (simpler and more reliable)
const latestVersion = await checkPublicRepo(owner, repo);
if (!latestVersion) {
throw new Error("Could not determine latest version");
}
// Read version from package.json
let currentVersion = "1.2.7"; // fallback
try {
const packageJson = require("../../../package.json");
if (packageJson?.version) {
currentVersion = packageJson.version;
}
} catch (packageError) {
console.warn(
"Could not read version from package.json:",
packageError.message,
);
}
const isUpdateAvailable =
compareVersions(latestVersion, currentVersion) > 0;
// Update settings with check results
await prisma.settings.update({
where: { id: settings.id },
data: {
last_update_check: new Date(),
update_available: isUpdateAvailable,
latest_version: latestVersion,
},
});
const executionTime = Date.now() - startTime;
console.log(
`✅ GitHub update check completed in ${executionTime}ms - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`,
);
return {
success: true,
currentVersion,
latestVersion,
isUpdateAvailable,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ GitHub update check failed after ${executionTime}ms:`,
error.message,
);
// Update last check time even on error
try {
const settings = await prisma.settings.findFirst();
if (settings) {
await prisma.settings.update({
where: { id: settings.id },
data: {
last_update_check: new Date(),
update_available: false,
},
});
}
} catch (updateError) {
console.error(
"❌ Error updating last check time:",
updateError.message,
);
}
throw error;
}
}
/**
* Schedule recurring GitHub update check (daily at midnight)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"github-update-check",
{},
{
repeat: { cron: "0 0 * * *" }, // Daily at midnight
jobId: "github-update-check-recurring",
},
);
console.log("✅ GitHub update check scheduled");
return job;
}
/**
* Trigger manual GitHub update check
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"github-update-check-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual GitHub update check triggered");
return job;
}
}
module.exports = GitHubUpdateCheck;

View File

@@ -0,0 +1,283 @@
const { Queue, Worker } = require("bullmq");
const { redis, redisConnection } = require("./shared/redis");
const { prisma } = require("./shared/prisma");
// Import automation classes
const GitHubUpdateCheck = require("./githubUpdateCheck");
const SessionCleanup = require("./sessionCleanup");
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
const EchoHello = require("./echoHello");
// Queue names
const QUEUE_NAMES = {
GITHUB_UPDATE_CHECK: "github-update-check",
SESSION_CLEANUP: "session-cleanup",
SYSTEM_MAINTENANCE: "system-maintenance",
ECHO_HELLO: "echo-hello",
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
};
/**
* Main Queue Manager
* Manages all BullMQ queues and workers
*/
class QueueManager {
constructor() {
this.queues = {};
this.workers = {};
this.automations = {};
this.isInitialized = false;
}
/**
* Initialize all queues, workers, and automations
*/
async initialize() {
try {
console.log("✅ Redis connection successful");
// Initialize queues
await this.initializeQueues();
// Initialize automation classes
await this.initializeAutomations();
// Initialize workers
await this.initializeWorkers();
// Setup event listeners
this.setupEventListeners();
this.isInitialized = true;
console.log("✅ Queue manager initialized successfully");
} catch (error) {
console.error("❌ Failed to initialize queue manager:", error.message);
throw error;
}
}
/**
* Initialize all queues
*/
async initializeQueues() {
for (const [key, queueName] of Object.entries(QUEUE_NAMES)) {
this.queues[queueName] = new Queue(queueName, {
connection: redisConnection,
defaultJobOptions: {
removeOnComplete: 50, // Keep last 50 completed jobs
removeOnFail: 20, // Keep last 20 failed jobs
attempts: 3, // Retry failed jobs 3 times
backoff: {
type: "exponential",
delay: 2000,
},
},
});
console.log(`✅ Queue '${queueName}' initialized`);
}
}
/**
* Initialize automation classes
*/
async initializeAutomations() {
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new GitHubUpdateCheck(
this,
);
this.automations[QUEUE_NAMES.SESSION_CLEANUP] = new SessionCleanup(this);
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] =
new OrphanedRepoCleanup(this);
this.automations[QUEUE_NAMES.ECHO_HELLO] = new EchoHello(this);
console.log("✅ All automation classes initialized");
}
/**
* Initialize all workers
*/
async initializeWorkers() {
// GitHub Update Check Worker
this.workers[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new Worker(
QUEUE_NAMES.GITHUB_UPDATE_CHECK,
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].process.bind(
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Session Cleanup Worker
this.workers[QUEUE_NAMES.SESSION_CLEANUP] = new Worker(
QUEUE_NAMES.SESSION_CLEANUP,
this.automations[QUEUE_NAMES.SESSION_CLEANUP].process.bind(
this.automations[QUEUE_NAMES.SESSION_CLEANUP],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Orphaned Repo Cleanup Worker
this.workers[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] = new Worker(
QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].process.bind(
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Echo Hello Worker
this.workers[QUEUE_NAMES.ECHO_HELLO] = new Worker(
QUEUE_NAMES.ECHO_HELLO,
this.automations[QUEUE_NAMES.ECHO_HELLO].process.bind(
this.automations[QUEUE_NAMES.ECHO_HELLO],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Add error handling for all workers
Object.values(this.workers).forEach((worker) => {
worker.on("error", (error) => {
console.error("Worker error:", error);
});
});
console.log("✅ All workers initialized");
}
/**
* Setup event listeners for all queues
*/
setupEventListeners() {
for (const queueName of Object.values(QUEUE_NAMES)) {
const queue = this.queues[queueName];
queue.on("error", (error) => {
console.error(`❌ Queue '${queueName}' experienced an error:`, error);
});
queue.on("failed", (job, err) => {
console.error(
`❌ Job '${job.id}' in queue '${queueName}' failed:`,
err,
);
});
queue.on("completed", (job) => {
console.log(`✅ Job '${job.id}' in queue '${queueName}' completed.`);
});
}
console.log("✅ Queue events initialized");
}
/**
* Schedule all recurring jobs
*/
async scheduleAllJobs() {
await this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].schedule();
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
await this.automations[QUEUE_NAMES.ECHO_HELLO].schedule();
}
/**
* Manual job triggers
*/
async triggerGitHubUpdateCheck() {
return this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].triggerManual();
}
async triggerSessionCleanup() {
return this.automations[QUEUE_NAMES.SESSION_CLEANUP].triggerManual();
}
async triggerOrphanedRepoCleanup() {
return this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].triggerManual();
}
async triggerEchoHello(message = "Hello from BullMQ!") {
return this.automations[QUEUE_NAMES.ECHO_HELLO].triggerManual(message);
}
/**
* Get queue statistics
*/
async getQueueStats(queueName) {
const queue = this.queues[queueName];
if (!queue) {
throw new Error(`Queue ${queueName} not found`);
}
const [waiting, active, completed, failed, delayed] = await Promise.all([
queue.getWaiting(),
queue.getActive(),
queue.getCompleted(),
queue.getFailed(),
queue.getDelayed(),
]);
return {
waiting: waiting.length,
active: active.length,
completed: completed.length,
failed: failed.length,
delayed: delayed.length,
};
}
/**
* Get all queue statistics
*/
async getAllQueueStats() {
const stats = {};
for (const queueName of Object.values(QUEUE_NAMES)) {
stats[queueName] = await this.getQueueStats(queueName);
}
return stats;
}
/**
* Get recent jobs for a queue
*/
async getRecentJobs(queueName, limit = 10) {
const queue = this.queues[queueName];
if (!queue) {
throw new Error(`Queue ${queueName} not found`);
}
const [completed, failed] = await Promise.all([
queue.getCompleted(0, limit - 1),
queue.getFailed(0, limit - 1),
]);
return [...completed, ...failed]
.sort((a, b) => new Date(b.finishedOn) - new Date(a.finishedOn))
.slice(0, limit);
}
/**
* Graceful shutdown
*/
async shutdown() {
console.log("🛑 Shutting down queue manager...");
for (const queueName of Object.keys(this.queues)) {
await this.queues[queueName].close();
await this.workers[queueName].close();
}
await redis.quit();
console.log("✅ Queue manager shutdown complete");
}
}
const queueManager = new QueueManager();
module.exports = { queueManager, QUEUE_NAMES };

View File

@@ -0,0 +1,114 @@
const { prisma } = require("./shared/prisma");
/**
* Orphaned Repository Cleanup Automation
* Removes repositories with no associated hosts
*/
class OrphanedRepoCleanup {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "orphaned-repo-cleanup";
}
/**
* Process orphaned repository cleanup job
*/
async process(job) {
const startTime = Date.now();
console.log("🧹 Starting orphaned repository cleanup...");
try {
// Find repositories with 0 hosts
const orphanedRepos = await prisma.repositories.findMany({
where: {
host_repositories: {
none: {},
},
},
include: {
_count: {
select: {
host_repositories: true,
},
},
},
});
let deletedCount = 0;
const deletedRepos = [];
// Delete orphaned repositories
for (const repo of orphanedRepos) {
try {
await prisma.repositories.delete({
where: { id: repo.id },
});
deletedCount++;
deletedRepos.push({
id: repo.id,
name: repo.name,
url: repo.url,
});
console.log(
`🗑️ Deleted orphaned repository: ${repo.name} (${repo.url})`,
);
} catch (deleteError) {
console.error(
`❌ Failed to delete repository ${repo.id}:`,
deleteError.message,
);
}
}
const executionTime = Date.now() - startTime;
console.log(
`✅ Orphaned repository cleanup completed in ${executionTime}ms - Deleted ${deletedCount} repositories`,
);
return {
success: true,
deletedCount,
deletedRepos,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ Orphaned repository cleanup failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Schedule recurring orphaned repository cleanup (daily at 2 AM)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"orphaned-repo-cleanup",
{},
{
repeat: { cron: "0 2 * * *" }, // Daily at 2 AM
jobId: "orphaned-repo-cleanup-recurring",
},
);
console.log("✅ Orphaned repository cleanup scheduled");
return job;
}
/**
* Trigger manual orphaned repository cleanup
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"orphaned-repo-cleanup-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual orphaned repository cleanup triggered");
return job;
}
}
module.exports = OrphanedRepoCleanup;

View File

@@ -0,0 +1,78 @@
const { prisma } = require("./shared/prisma");
const { cleanup_expired_sessions } = require("../../utils/session_manager");
/**
* Session Cleanup Automation
* Cleans up expired user sessions
*/
class SessionCleanup {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "session-cleanup";
}
/**
* Process session cleanup job
*/
async process(job) {
const startTime = Date.now();
console.log("🧹 Starting session cleanup...");
try {
const result = await prisma.user_sessions.deleteMany({
where: {
OR: [{ expires_at: { lt: new Date() } }, { is_revoked: true }],
},
});
const executionTime = Date.now() - startTime;
console.log(
`✅ Session cleanup completed in ${executionTime}ms - Cleaned up ${result.count} expired sessions`,
);
return {
success: true,
sessionsCleaned: result.count,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ Session cleanup failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Schedule recurring session cleanup (every hour)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"session-cleanup",
{},
{
repeat: { cron: "0 * * * *" }, // Every hour
jobId: "session-cleanup-recurring",
},
);
console.log("✅ Session cleanup scheduled");
return job;
}
/**
* Trigger manual session cleanup
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"session-cleanup-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual session cleanup triggered");
return job;
}
}
module.exports = SessionCleanup;

View File

@@ -0,0 +1,5 @@
const { PrismaClient } = require("@prisma/client");
const prisma = new PrismaClient();
module.exports = { prisma };

View File

@@ -0,0 +1,16 @@
const IORedis = require("ioredis");
// Redis connection configuration
const redisConnection = {
host: process.env.REDIS_HOST || "localhost",
port: parseInt(process.env.REDIS_PORT) || 6379,
password: process.env.REDIS_PASSWORD || undefined,
db: parseInt(process.env.REDIS_DB) || 0,
retryDelayOnFailover: 100,
maxRetriesPerRequest: null, // BullMQ requires this to be null
};
// Create Redis connection
const redis = new IORedis(redisConnection);
module.exports = { redis, redisConnection };

View File

@@ -0,0 +1,82 @@
// Common utilities for automation jobs
/**
* Compare two semantic versions
* @param {string} version1 - First version
* @param {string} version2 - Second version
* @returns {number} - 1 if version1 > version2, -1 if version1 < version2, 0 if equal
*/
function compareVersions(version1, version2) {
const v1parts = version1.split(".").map(Number);
const v2parts = version2.split(".").map(Number);
const maxLength = Math.max(v1parts.length, v2parts.length);
for (let i = 0; i < maxLength; i++) {
const v1part = v1parts[i] || 0;
const v2part = v2parts[i] || 0;
if (v1part > v2part) return 1;
if (v1part < v2part) return -1;
}
return 0;
}
/**
* Check public GitHub repository for latest release
* @param {string} owner - Repository owner
* @param {string} repo - Repository name
* @returns {Promise<string|null>} - Latest version or null
*/
async function checkPublicRepo(owner, repo) {
try {
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
let currentVersion = "1.2.7"; // fallback
try {
const packageJson = require("../../../package.json");
if (packageJson?.version) {
currentVersion = packageJson.version;
}
} catch (packageError) {
console.warn(
"Could not read version from package.json for User-Agent, using fallback:",
packageError.message,
);
}
const response = await fetch(httpsRepoUrl, {
method: "GET",
headers: {
Accept: "application/vnd.github.v3+json",
"User-Agent": `PatchMon-Server/${currentVersion}`,
},
});
if (!response.ok) {
const errorText = await response.text();
if (
errorText.includes("rate limit") ||
errorText.includes("API rate limit")
) {
console.log("⚠️ GitHub API rate limit exceeded, skipping update check");
return null;
}
throw new Error(
`GitHub API error: ${response.status} ${response.statusText}`,
);
}
const releaseData = await response.json();
return releaseData.tag_name.replace("v", "");
} catch (error) {
console.error("GitHub API error:", error.message);
throw error;
}
}
module.exports = {
compareVersions,
checkPublicRepo,
};