Compare commits

...

41 Commits

Author SHA1 Message Date
renovate[bot]
6ce131636d Update dependency express to v5 2025-10-13 20:26:17 +00:00
Muhammad Ibrahim
4d5040e0e9 Merge feature/automation into main
- Resolve migration reconciliation conflicts
- Include updated migration that handles 1.2.7 upgrade scenario
- Merge automation features and Docker support
2025-10-13 21:25:11 +01:00
Muhammad Ibrahim
28c5310b99 Fix migration reconciliation to handle 1.2.7 upgrade scenario
- Add case for table exists but no migration record (1.2.7 upgrade)
- Creates migration record for existing user_sessions table
- Prevents P3018 error when table exists from 1.2.7 installation
- Handles all upgrade scenarios properly
2025-10-13 21:24:35 +01:00
Muhammad Ibrahim
a2e9743da6 Add migration reconciliation for user_sessions 1.2.7 to 1.2.8+ upgrade
- Creates migration 20251004999999_reconcile_user_sessions_migration
- Runs before 20251005000000_add_user_sessions migration
- Properly handles failed migrations by marking them as rolled back first
- Handles migration name conflicts from 1.2.7 (add_user_sessions) to 1.2.8+ (20251005000000_add_user_sessions)
- Fixes failed migration states from upgrade attempts
- Works naturally within Prisma migration system
- No changes to Docker entrypoint or setup scripts needed
2025-10-13 21:15:22 +01:00
Muhammad Ibrahim
3863d641fa Fix git update conflicts in setup.sh --update
- Add git clean -fd to remove untracked files before pull
- Add git reset --hard HEAD to ensure clean state
- Prevents merge conflicts from untracked files during updates
- Ensures smooth updates from any version to any version
2025-10-13 21:15:04 +01:00
Muhammad Ibrahim
cc8f77a946 deleted the js file 2025-10-13 20:59:05 +01:00
Muhammad Ibrahim
36455e2bfd Fixed Migration to be done via a prixma migraiton not js script 2025-10-13 20:58:08 +01:00
Muhammad Ibrahim
af65d38cad - Fixes P3009 error when upgrading from 1.2.7
- Reconciles 'add_user_sessions' to '20251005000000_add_user_sessions'
- Prevents duplicate migration attempts
- Handles fresh installs gracefully"
2025-10-13 20:29:42 +01:00
Muhammad Ibrahim
29266b6d77 Added longer transaction timeout on Postgresql DB 2025-10-12 21:14:52 +01:00
Muhammad Ibrahim
f96e468482 Improved patchmon-agent.sh logic to handle locked apt processes
Introduced docker Feature integration via agent
2025-10-11 22:54:49 +01:00
Muhammad Ibrahim
9f8c88badf Remove 'coming soon' indicator from Automation menu item 2025-10-11 20:55:27 +01:00
Muhammad Ibrahim
7985a225d7 Merge main into feature/automation to align git history
Resolved conflicts:
- backend/src/server.js: Kept automation routes alongside gethomepage routes
- frontend/src/pages/Queue.jsx: Kept deleted (replaced by Automation.jsx)
- setup.sh: Kept newer version date (2025-10-11)

This merge brings in all commits from main including:
- GetHomepage integration
- Version 1.2.9 updates
- Migration file renames
- Bug fixes and improvements
2025-10-11 20:45:29 +01:00
Muhammad Ibrahim
8c538bd99c Merge changes from main: Add GetHomepage integration and update to v1.2.9
- Added gethomepageRoutes.js for GetHomepage integration
- Updated all package.json files to version 1.2.9
- Updated agent script to version 1.2.9
- Updated version fallbacks in versionRoutes.js and updateScheduler.js
- Updated setup.sh with version 1.2.9
- Merged GetHomepage integration UI (Integrations.jsx)
- Updated docker-entrypoint.sh from main
- Updated VersionUpdateTab component
- Combined automation and gethomepage routes in server.js
- Maintains both BullMQ automation and GetHomepage functionality
2025-10-11 20:35:47 +01:00
9 Technology Group LTD
623bf5e2c8 Merge pull request #161 from PatchMon/feature/gethomepage
Feature/gethomepage + new version 1.2.9
2025-10-11 20:21:44 +01:00
Muhammad Ibrahim
ed8cc81b89 Changed version from 1.2.8 to 1.2.9 in preperation for next release 2025-10-11 20:14:08 +01:00
Muhammad Ibrahim
5c4353a688 Fixed linting errors with gethomepage area 2025-10-11 20:04:29 +01:00
Muhammad Ibrahim
6ebcdd57d5 Fixed Migration order issue where users were getting error of "add_user_sessions" does not exist 2025-10-11 14:47:27 +01:00
Muhammad Ibrahim
a3d0dfd665 Fixed entrypoint to handle better updating of Agent mechanism
Updated Readme to show the --update flag
2025-10-10 21:52:57 +01:00
Muhammad Ibrahim
d99ded6d65 Added Database Backup ability when doing setup.sh -- update 2025-10-10 20:16:24 +01:00
Muhammad Ibrahim
1ea96b6172 Merge branch 'main' of github.com:9technologygroup/patchmon.net 2025-10-10 19:37:46 +01:00
Muhammad Ibrahim
1e5ee66825 Fixed version update checking mechanism
Updated the setup.sh script to have the --update flag
2025-10-10 19:32:44 +01:00
Muhammad Ibrahim
88130797e4 Updated Version to 1.2.8 2025-10-10 12:39:17 +01:00
Muhammad Ibrahim
0ad1a96871 Building the start of Automation page and implemented BullMQ module 2025-10-10 12:24:23 +01:00
9 Technology Group LTD
566c415471 Merge pull request #152 from PatchMon/feature/queue
Feature/Agent
2025-10-08 18:52:02 +01:00
Muhammad Ibrahim
cfc91243eb Fixed Issues with RHEL based systems not sending their repos to PatchMon 2025-10-08 18:46:39 +01:00
Muhammad Ibrahim
84cf31869b Fixed spacing in the header for the buttons 2025-10-08 17:57:56 +01:00
Muhammad Ibrahim
18c9d241eb Fixed RockyLinux 10 Support 2025-10-08 17:53:08 +01:00
Muhammad Ibrahim
86b5da3ea0 Removed titles from the top nav bar to give space to search bar 2025-10-08 17:25:24 +01:00
9 Technology Group LTD
c9b5ee63d8 Merge pull request #151 from PatchMon/fix/agentdata
Fix/agentdata
2025-10-08 16:25:56 +01:00
9 Technology Group LTD
3737a5a935 Merge pull request #145 from Maelstromeous/patch-1
Document manual result update process for PatchMon
2025-10-08 15:50:28 +01:00
9 Technology Group LTD
bcce48948a Merge pull request #148 from PatchMon/refactor/frontend_optimisations
Various optimisations/fixes - mostly frontend
2025-10-08 15:48:10 +01:00
tigattack
8fd91eae1a fix(frontend): use updateUserMutation in EditUserModal
Makes it more consistent with the other user mutations and resolves a lint error for the formerly unused `updateUserMutation`
2025-10-08 02:18:20 +01:00
tigattack
da8c661d20 refactor: fix lint errors 2025-10-08 02:12:51 +01:00
tigattack
2bf639e315 chore: update gitignore for docker dev 2025-10-08 02:10:40 +01:00
tigattack
c02ac4bd6f fix(frontend): don't query settings before auth 2025-10-08 02:10:40 +01:00
tigattack
4e0eaf7323 feat(frontend): add lazy loading for routes with Suspense fallback 2025-10-08 02:10:40 +01:00
tigattack
ef9ef58bcb feat(vite): add manual chunking for optimized build output 2025-10-08 02:08:46 +01:00
9 Technology Group LTD
29afe3da1f Merge pull request #147 from PatchMon/fix/agentdata
Add Line Chart
2025-10-08 00:47:47 +01:00
9 Technology Group LTD
12ef6fd8e1 Merge pull request #146 from PatchMon/fix/agentdata
Agent improvements for Debian
Removal of 100 package limit
Modified hosts detail page with Agent history
Added Device fingerprinting for better session management (I need to improve this though)
Added Dashboard card of Package trends for all or specific hosts
Fixed filtering on the package page
2025-10-08 00:33:12 +01:00
Matt Cavanagh
2b36e88d85 Revise manual update instructions in README
Updated instructions for forcing updates after host package changes.
2025-10-07 20:25:53 +01:00
Matt Cavanagh
6624ec002d Document manual update process for PatchMon
Add instructions for manual update in README
2025-10-07 20:24:15 +01:00
46 changed files with 9001 additions and 1773 deletions

2
.gitignore vendored
View File

@@ -154,4 +154,4 @@ setup-installer-site.sh
install-server.*
notify-clients-upgrade.sh
debug-agent.sh
docker/compose_dev_data
docker/compose_dev_*

View File

@@ -43,7 +43,7 @@ PatchMon provides centralized patch management across diverse server environment
### API & Integrations
- REST API under `/api/v1` with JWT auth
- **Proxmox LXC Auto-Enrollment** - Automatically discover and enroll LXC containers from Proxmox hosts ([Documentation](PROXMOX_AUTO_ENROLLMENT.md))
- Proxmox LXC Auto-Enrollment - Automatically discover and enroll LXC containers from Proxmox hosts
### Security
- Rate limiting for general, auth, and agent endpoints
@@ -85,11 +85,16 @@ apt-get upgrade -y
apt install curl -y
```
#### Script
#### Install Script
```bash
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh
```
#### Update Script (--update flag)
```bash
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh --update
```
#### Minimum specs for building : #####
CPU : 2 vCPU
RAM : 2GB
@@ -113,6 +118,14 @@ After installation:
- Visit `http(s)://<your-domain>` and complete first-time admin setup
- See all useful info in `deployment-info.txt`
## Forcing updates after host package changes
Should you perform a manual package update on your host and wish to see the results reflected in PatchMon quicker than the usual scheduled update, you can trigger the process manually by running:
```bash
/usr/local/bin/patchmon-agent.sh update
```
This will send the results immediately to PatchMon.
## Communication Model
- Outbound-only agents: servers initiate communication to PatchMon

View File

@@ -1,12 +1,12 @@
#!/bin/bash
# PatchMon Agent Script v1.2.8
# PatchMon Agent Script v1.2.9
# This script sends package update information to the PatchMon server using API credentials
# Configuration
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
API_VERSION="v1"
AGENT_VERSION="1.2.8"
AGENT_VERSION="1.2.9"
CONFIG_FILE="/etc/patchmon/agent.conf"
CREDENTIALS_FILE="/etc/patchmon/credentials"
LOG_FILE="/var/log/patchmon-agent.log"
@@ -38,21 +38,21 @@ error() {
exit 1
}
# Info logging (cleaner output - only stdout, no duplicate logging)
# Info logging (cleaner output - only stderr, no duplicate logging)
info() {
echo -e "${BLUE} $1${NC}"
echo -e "${BLUE} $1${NC}" >&2
log "INFO: $1"
}
# Success logging (cleaner output - only stdout, no duplicate logging)
# Success logging (cleaner output - only stderr, no duplicate logging)
success() {
echo -e "${GREEN}$1${NC}"
echo -e "${GREEN}$1${NC}" >&2
log "SUCCESS: $1"
}
# Warning logging (cleaner output - only stdout, no duplicate logging)
# Warning logging (cleaner output - only stderr, no duplicate logging)
warning() {
echo -e "${YELLOW}⚠️ $1${NC}"
echo -e "${YELLOW}⚠️ $1${NC}" >&2
log "WARNING: $1"
}
@@ -231,13 +231,14 @@ detect_os() {
"opensuse"|"opensuse-leap"|"opensuse-tumbleweed")
OS_TYPE="suse"
;;
"rocky"|"almalinux")
"almalinux")
OS_TYPE="rhel"
;;
"ol")
# Keep Oracle Linux as 'ol' for proper frontend identification
OS_TYPE="ol"
;;
# Rocky Linux keeps its own identity for proper frontend display
esac
elif [[ -f /etc/redhat-release ]]; then
@@ -265,7 +266,7 @@ get_repository_info() {
"ubuntu"|"debian")
get_apt_repositories repos_json first
;;
"centos"|"rhel"|"fedora"|"ol")
"centos"|"rhel"|"fedora"|"ol"|"rocky")
get_yum_repositories repos_json first
;;
*)
@@ -573,14 +574,118 @@ get_yum_repositories() {
local -n first_ref=$2
# Parse yum/dnf repository configuration
local repo_info=""
if command -v dnf >/dev/null 2>&1; then
local repo_info=$(dnf repolist all --verbose 2>/dev/null | grep -E "^Repo-id|^Repo-baseurl|^Repo-name|^Repo-status")
repo_info=$(dnf repolist all --verbose 2>/dev/null | grep -E "^Repo-id|^Repo-baseurl|^Repo-mirrors|^Repo-name|^Repo-status")
elif command -v yum >/dev/null 2>&1; then
local repo_info=$(yum repolist all -v 2>/dev/null | grep -E "^Repo-id|^Repo-baseurl|^Repo-name|^Repo-status")
repo_info=$(yum repolist all -v 2>/dev/null | grep -E "^Repo-id|^Repo-baseurl|^Repo-mirrors|^Repo-name|^Repo-status")
fi
# This is a simplified implementation - would need more work for full YUM support
# For now, return empty for non-APT systems
if [[ -z "$repo_info" ]]; then
return
fi
# Parse repository information
local current_repo=""
local repo_id=""
local repo_name=""
local repo_url=""
local repo_mirrors=""
local repo_status=""
while IFS= read -r line; do
if [[ "$line" =~ ^Repo-id[[:space:]]+:[[:space:]]+(.+)$ ]]; then
# Process previous repository if we have one
if [[ -n "$current_repo" ]]; then
process_yum_repo repos_ref first_ref "$repo_id" "$repo_name" "$repo_url" "$repo_mirrors" "$repo_status"
fi
# Start new repository
repo_id="${BASH_REMATCH[1]}"
repo_name="$repo_id"
repo_url=""
repo_mirrors=""
repo_status=""
current_repo="$repo_id"
elif [[ "$line" =~ ^Repo-name[[:space:]]+:[[:space:]]+(.+)$ ]]; then
repo_name="${BASH_REMATCH[1]}"
elif [[ "$line" =~ ^Repo-baseurl[[:space:]]+:[[:space:]]+(.+)$ ]]; then
repo_url="${BASH_REMATCH[1]}"
elif [[ "$line" =~ ^Repo-mirrors[[:space:]]+:[[:space:]]+(.+)$ ]]; then
repo_mirrors="${BASH_REMATCH[1]}"
elif [[ "$line" =~ ^Repo-status[[:space:]]+:[[:space:]]+(.+)$ ]]; then
repo_status="${BASH_REMATCH[1]}"
fi
done <<< "$repo_info"
# Process the last repository
if [[ -n "$current_repo" ]]; then
process_yum_repo repos_ref first_ref "$repo_id" "$repo_name" "$repo_url" "$repo_mirrors" "$repo_status"
fi
}
# Process a single YUM repository and add it to the JSON
process_yum_repo() {
local -n _repos_ref=$1
local -n _first_ref=$2
local repo_id="$3"
local repo_name="$4"
local repo_url="$5"
local repo_mirrors="$6"
local repo_status="$7"
# Skip if we don't have essential info
if [[ -z "$repo_id" ]]; then
return
fi
# Determine if repository is enabled
local is_enabled=false
if [[ "$repo_status" == "enabled" ]]; then
is_enabled=true
fi
# Use baseurl if available, otherwise use mirrors URL
local final_url=""
if [[ -n "$repo_url" ]]; then
# Extract first URL if multiple are listed
final_url=$(echo "$repo_url" | head -n 1 | awk '{print $1}')
elif [[ -n "$repo_mirrors" ]]; then
final_url="$repo_mirrors"
fi
# Skip if we don't have any URL
if [[ -z "$final_url" ]]; then
return
fi
# Determine if repository uses HTTPS
local is_secure=false
if [[ "$final_url" =~ ^https:// ]]; then
is_secure=true
fi
# Generate repository name if not provided
if [[ -z "$repo_name" ]]; then
repo_name="$repo_id"
fi
# Clean up repository name and URL - escape quotes and backslashes
repo_name=$(echo "$repo_name" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g')
final_url=$(echo "$final_url" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g')
# Add to JSON
if [[ "$_first_ref" == true ]]; then
_first_ref=false
else
_repos_ref+=","
fi
_repos_ref+="{\"name\":\"$repo_name\",\"url\":\"$final_url\",\"distribution\":\"$OS_VERSION\",\"components\":\"main\",\"repoType\":\"rpm\",\"isEnabled\":$is_enabled,\"isSecure\":$is_secure}"
}
# Get package information based on OS
@@ -592,7 +697,7 @@ get_package_info() {
"ubuntu"|"debian")
get_apt_packages packages_json first
;;
"centos"|"rhel"|"fedora"|"ol")
"centos"|"rhel"|"fedora"|"ol"|"rocky")
get_yum_packages packages_json first
;;
*)
@@ -604,6 +709,135 @@ get_package_info() {
echo "$packages_json"
}
# Check and handle APT locks
handle_apt_locks() {
local interactive=${1:-false} # First parameter indicates if running interactively
local lock_files=(
"/var/lib/dpkg/lock"
"/var/lib/dpkg/lock-frontend"
"/var/lib/apt/lists/lock"
"/var/cache/apt/archives/lock"
)
local processes_found=false
local hung_processes=()
# Check for running APT processes
if pgrep -x "apt-get|apt|aptitude|dpkg|unattended-upgr" > /dev/null 2>&1; then
processes_found=true
info "Found running package management processes:"
echo "" >&2
# Get process info with ACTUAL elapsed time (not CPU time)
# Using ps -eo format to get real elapsed time
while IFS= read -r line; do
[[ -z "$line" ]] && continue
local pid=$(echo "$line" | awk '{print $1}')
local elapsed=$(echo "$line" | awk '{print $2}')
local cmd=$(echo "$line" | awk '{for(i=3;i<=NF;i++) printf "%s ", $i; print ""}')
# Display process info
echo " PID $pid: $cmd (running for $elapsed)" >&2
# Parse elapsed time and convert to seconds
# Format can be: MM:SS, HH:MM:SS, DD-HH:MM:SS, or just SS
# Use 10# prefix to force base-10 (avoid octal interpretation of leading zeros)
local runtime_seconds=0
if [[ "$elapsed" =~ ^([0-9]+)-([0-9]+):([0-9]+):([0-9]+)$ ]]; then
# Format: DD-HH:MM:SS
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 86400 + 10#${BASH_REMATCH[2]} * 3600 + 10#${BASH_REMATCH[3]} * 60 + 10#${BASH_REMATCH[4]} ))
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+):([0-9]+)$ ]]; then
# Format: HH:MM:SS
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 3600 + 10#${BASH_REMATCH[2]} * 60 + 10#${BASH_REMATCH[3]} ))
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+)$ ]]; then
# Format: MM:SS
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 60 + 10#${BASH_REMATCH[2]} ))
elif [[ "$elapsed" =~ ^([0-9]+)$ ]]; then
# Format: just seconds
runtime_seconds=$((10#${BASH_REMATCH[1]}))
fi
# Consider process hung if running for more than 5 minutes
if [[ $runtime_seconds -gt 300 ]]; then
hung_processes+=("$pid:$elapsed:$cmd")
fi
done < <(ps -eo pid,etime,cmd | grep -E "apt-get|apt[^-]|aptitude|dpkg|unattended-upgr" | grep -v grep | grep -v "ps -eo")
echo "" >&2
info "Detected ${#hung_processes[@]} hung process(es), interactive=$interactive"
# If hung processes found and running interactively, offer to kill them
if [[ ${#hung_processes[@]} -gt 0 && "$interactive" == "true" ]]; then
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
echo "" >&2
for process_info in "${hung_processes[@]}"; do
IFS=':' read -r pid elapsed cmd <<< "$process_info"
echo " PID $pid: $cmd (hung for $elapsed)" >&2
done
echo "" >&2
read -p "$(echo -e "${YELLOW}⚠️ Do you want to kill these processes? [y/N]:${NC} ")" -n 1 -r >&2
echo "" >&2
if [[ $REPLY =~ ^[Yy]$ ]]; then
for process_info in "${hung_processes[@]}"; do
IFS=':' read -r pid elapsed cmd <<< "$process_info"
info "Killing process $pid..."
if kill "$pid" 2>/dev/null; then
success "Killed process $pid"
sleep 1
# Check if process is still running
if kill -0 "$pid" 2>/dev/null; then
warning "Process $pid still running, using SIGKILL..."
kill -9 "$pid" 2>/dev/null
success "Force killed process $pid"
fi
else
warning "Could not kill process $pid (may require sudo)"
fi
done
# Wait a moment for locks to clear
sleep 2
else
info "Skipping process termination"
fi
elif [[ ${#hung_processes[@]} -gt 0 ]]; then
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
info "Run this command with sudo and interactively to kill hung processes"
fi
fi
# Check for stale lock files (files that exist but no process is holding them)
for lock_file in "${lock_files[@]}"; do
if [[ -f "$lock_file" ]]; then
# Try to get the PID from the lock file if it exists
if lsof "$lock_file" > /dev/null 2>&1; then
info "Lock file $lock_file is held by an active process"
else
warning "Found stale lock file: $lock_file"
info "Attempting to remove stale lock..."
if rm -f "$lock_file" 2>/dev/null; then
success "Removed stale lock: $lock_file"
else
warning "Could not remove lock (insufficient permissions): $lock_file"
fi
fi
fi
done
# If processes were found, return failure so caller can wait
if [[ "$processes_found" == true ]]; then
return 1
else
return 0
fi
}
# Get package info for APT-based systems
get_apt_packages() {
local -n packages_ref=$1
@@ -620,10 +854,25 @@ get_apt_packages() {
else
retry_count=$((retry_count + 1))
if [[ $retry_count -lt $max_retries ]]; then
warning "APT lock detected, retrying in ${retry_delay} seconds... (attempt $retry_count/$max_retries)"
warning "APT lock detected (attempt $retry_count/$max_retries)"
# On first retry, try to handle locks
if [[ $retry_count -eq 1 ]]; then
info "Checking for stale APT locks..."
# Check if running interactively (stdin is a terminal OR stdout is a terminal)
local is_interactive=false
if [[ -t 0 ]] || [[ -t 1 ]]; then
is_interactive=true
fi
info "Interactive mode: $is_interactive"
handle_apt_locks "$is_interactive"
fi
info "Waiting ${retry_delay} seconds before retry..."
sleep $retry_delay
else
warning "APT lock persists after $max_retries attempts, continuing without update..."
warning "APT lock persists after $max_retries attempts"
warning "Continuing without updating package lists (will use cached data)"
fi
fi
done
@@ -1459,9 +1708,21 @@ main() {
"diagnostics")
show_diagnostics
;;
"clear-locks"|"unlock")
check_root
info "Checking APT locks and hung processes..."
echo ""
handle_apt_locks true
echo ""
if [[ $? -eq 0 ]]; then
success "No APT locks or processes blocking package management"
else
info "APT processes are still running - they may be legitimate operations"
fi
;;
*)
echo "PatchMon Agent v$AGENT_VERSION - API Credential Based"
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|diagnostics}"
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|clear-locks|diagnostics}"
echo ""
echo "Commands:"
echo " configure <API_ID> <API_KEY> [SERVER_URL] - Configure API credentials for this host"
@@ -1473,6 +1734,7 @@ main() {
echo " check-agent-update - Check for agent updates using timestamp comparison"
echo " update-agent - Update agent to latest version"
echo " update-crontab - Update crontab with current policy"
echo " clear-locks - Check and clear APT locks (interactive)"
echo " diagnostics - Show detailed system diagnostics"
echo ""
echo "Setup Process:"

496
agents/patchmon-docker-agent.sh Executable file
View File

@@ -0,0 +1,496 @@
#!/bin/bash
# PatchMon Docker Agent Script v1.2.9
# This script collects Docker container and image information and sends it to PatchMon
# Configuration
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
API_VERSION="v1"
AGENT_VERSION="1.2.9"
CONFIG_FILE="/etc/patchmon/agent.conf"
CREDENTIALS_FILE="/etc/patchmon/credentials"
LOG_FILE="/var/log/patchmon-docker-agent.log"
# Curl flags placeholder (replaced by server based on SSL settings)
CURL_FLAGS=""
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Logging function
log() {
if [[ -w "$(dirname "$LOG_FILE")" ]] 2>/dev/null; then
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" >> "$LOG_FILE" 2>/dev/null
fi
}
# Error handling
error() {
echo -e "${RED}ERROR: $1${NC}" >&2
log "ERROR: $1"
exit 1
}
# Info logging
info() {
echo -e "${BLUE} $1${NC}" >&2
log "INFO: $1"
}
# Success logging
success() {
echo -e "${GREEN}$1${NC}" >&2
log "SUCCESS: $1"
}
# Warning logging
warning() {
echo -e "${YELLOW}⚠️ $1${NC}" >&2
log "WARNING: $1"
}
# Check if Docker is installed and running
check_docker() {
if ! command -v docker &> /dev/null; then
error "Docker is not installed on this system"
fi
if ! docker info &> /dev/null; then
error "Docker daemon is not running or you don't have permission to access it. Try running with sudo."
fi
}
# Load credentials
load_credentials() {
if [[ ! -f "$CREDENTIALS_FILE" ]]; then
error "Credentials file not found at $CREDENTIALS_FILE. Please configure the main PatchMon agent first."
fi
source "$CREDENTIALS_FILE"
if [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
error "API credentials not found in $CREDENTIALS_FILE"
fi
# Use PATCHMON_URL from credentials if available, otherwise use default
if [[ -n "$PATCHMON_URL" ]]; then
PATCHMON_SERVER="$PATCHMON_URL"
fi
}
# Load configuration
load_config() {
if [[ -f "$CONFIG_FILE" ]]; then
source "$CONFIG_FILE"
if [[ -n "$SERVER_URL" ]]; then
PATCHMON_SERVER="$SERVER_URL"
fi
fi
}
# Collect Docker containers
collect_containers() {
info "Collecting Docker container information..."
local containers_json="["
local first=true
# Get all containers (running and stopped)
while IFS='|' read -r container_id name image status state created started ports; do
if [[ -z "$container_id" ]]; then
continue
fi
# Parse image name and tag
local image_name="${image%%:*}"
local image_tag="${image##*:}"
if [[ "$image_tag" == "$image_name" ]]; then
image_tag="latest"
fi
# Determine image source based on registry
local image_source="docker-hub"
if [[ "$image_name" == ghcr.io/* ]]; then
image_source="github"
elif [[ "$image_name" == registry.gitlab.com/* ]]; then
image_source="gitlab"
elif [[ "$image_name" == *"/"*"/"* ]]; then
image_source="private"
fi
# Get repository name (without registry prefix for common registries)
local image_repository="$image_name"
image_repository="${image_repository#ghcr.io/}"
image_repository="${image_repository#registry.gitlab.com/}"
# Get image ID
local full_image_id=$(docker inspect --format='{{.Image}}' "$container_id" 2>/dev/null || echo "unknown")
full_image_id="${full_image_id#sha256:}"
# Normalize status (extract just the status keyword)
local normalized_status="unknown"
if [[ "$status" =~ ^Up ]]; then
normalized_status="running"
elif [[ "$status" =~ ^Exited ]]; then
normalized_status="exited"
elif [[ "$status" =~ ^Created ]]; then
normalized_status="created"
elif [[ "$status" =~ ^Restarting ]]; then
normalized_status="restarting"
elif [[ "$status" =~ ^Paused ]]; then
normalized_status="paused"
elif [[ "$status" =~ ^Dead ]]; then
normalized_status="dead"
fi
# Parse ports
local ports_json="null"
if [[ -n "$ports" && "$ports" != "null" ]]; then
# Convert Docker port format to JSON
ports_json=$(echo "$ports" | jq -R -s -c 'split(",") | map(select(length > 0)) | map(split("->") | {(.[0]): .[1]}) | add // {}')
fi
# Convert dates to ISO 8601 format
# If date conversion fails, use null instead of invalid date string
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
local started_iso="null"
if [[ -n "$started" && "$started" != "null" ]]; then
started_iso=$(date -d "$started" -Iseconds 2>/dev/null || echo "null")
fi
# Add comma for JSON array
if [[ "$first" == false ]]; then
containers_json+=","
fi
first=false
# Build JSON object for this container
containers_json+="{\"container_id\":\"$container_id\","
containers_json+="\"name\":\"$name\","
containers_json+="\"image_name\":\"$image_name\","
containers_json+="\"image_tag\":\"$image_tag\","
containers_json+="\"image_repository\":\"$image_repository\","
containers_json+="\"image_source\":\"$image_source\","
containers_json+="\"image_id\":\"$full_image_id\","
containers_json+="\"status\":\"$normalized_status\","
containers_json+="\"state\":\"$state\","
containers_json+="\"ports\":$ports_json"
# Only add created_at if we have a valid date
if [[ "$created_iso" != "null" ]]; then
containers_json+=",\"created_at\":\"$created_iso\""
fi
# Only add started_at if we have a valid date
if [[ "$started_iso" != "null" ]]; then
containers_json+=",\"started_at\":\"$started_iso\""
fi
containers_json+="}"
done < <(docker ps -a --format '{{.ID}}|{{.Names}}|{{.Image}}|{{.Status}}|{{.State}}|{{.CreatedAt}}|{{.RunningFor}}|{{.Ports}}' 2>/dev/null)
containers_json+="]"
echo "$containers_json"
}
# Collect Docker images
collect_images() {
info "Collecting Docker image information..."
local images_json="["
local first=true
while IFS='|' read -r repository tag image_id created size digest; do
if [[ -z "$repository" || "$repository" == "<none>" ]]; then
continue
fi
# Clean up tag
if [[ -z "$tag" || "$tag" == "<none>" ]]; then
tag="latest"
fi
# Clean image ID
image_id="${image_id#sha256:}"
# Determine source
local source="docker-hub"
if [[ "$repository" == ghcr.io/* ]]; then
source="github"
elif [[ "$repository" == registry.gitlab.com/* ]]; then
source="gitlab"
elif [[ "$repository" == *"/"*"/"* ]]; then
source="private"
fi
# Convert size to bytes (approximate)
local size_bytes=0
if [[ "$size" =~ ([0-9.]+)([KMGT]?B) ]]; then
local num="${BASH_REMATCH[1]}"
local unit="${BASH_REMATCH[2]}"
case "$unit" in
KB) size_bytes=$(echo "$num * 1024" | bc | cut -d. -f1) ;;
MB) size_bytes=$(echo "$num * 1024 * 1024" | bc | cut -d. -f1) ;;
GB) size_bytes=$(echo "$num * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
TB) size_bytes=$(echo "$num * 1024 * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
B) size_bytes=$(echo "$num" | cut -d. -f1) ;;
esac
fi
# Convert created date to ISO 8601
# If date conversion fails, use null instead of invalid date string
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
# Add comma for JSON array
if [[ "$first" == false ]]; then
images_json+=","
fi
first=false
# Build JSON object for this image
images_json+="{\"repository\":\"$repository\","
images_json+="\"tag\":\"$tag\","
images_json+="\"image_id\":\"$image_id\","
images_json+="\"source\":\"$source\","
images_json+="\"size_bytes\":$size_bytes"
# Only add created_at if we have a valid date
if [[ "$created_iso" != "null" ]]; then
images_json+=",\"created_at\":\"$created_iso\""
fi
# Only add digest if present
if [[ -n "$digest" && "$digest" != "<none>" ]]; then
images_json+=",\"digest\":\"$digest\""
fi
images_json+="}"
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.CreatedAt}}|{{.Size}}|{{.Digest}}' --no-trunc 2>/dev/null)
images_json+="]"
echo "$images_json"
}
# Check for image updates
check_image_updates() {
info "Checking for image updates..."
local updates_json="["
local first=true
local update_count=0
# Get all images
while IFS='|' read -r repository tag image_id digest; do
if [[ -z "$repository" || "$repository" == "<none>" || "$tag" == "<none>" ]]; then
continue
fi
# Skip checking 'latest' tag as it's always considered current by name
# We'll still check digest though
local full_image="${repository}:${tag}"
# Try to get remote digest from registry
# Use docker manifest inspect to avoid pulling the image
local remote_digest=$(docker manifest inspect "$full_image" 2>/dev/null | jq -r '.config.digest // .manifests[0].digest // empty' 2>/dev/null)
if [[ -z "$remote_digest" ]]; then
# If manifest inspect fails, try buildx imagetools inspect (works for more registries)
remote_digest=$(docker buildx imagetools inspect "$full_image" 2>/dev/null | grep -oP 'Digest:\s*\K\S+' | head -1)
fi
# Clean up digests for comparison
local local_digest="${digest#sha256:}"
remote_digest="${remote_digest#sha256:}"
# If we got a remote digest and it's different from local, there's an update
if [[ -n "$remote_digest" && -n "$local_digest" && "$remote_digest" != "$local_digest" ]]; then
if [[ "$first" == false ]]; then
updates_json+=","
fi
first=false
# Build update JSON object
updates_json+="{\"repository\":\"$repository\","
updates_json+="\"current_tag\":\"$tag\","
updates_json+="\"available_tag\":\"$tag\","
updates_json+="\"current_digest\":\"$local_digest\","
updates_json+="\"available_digest\":\"$remote_digest\","
updates_json+="\"image_id\":\"${image_id#sha256:}\""
updates_json+="}"
((update_count++))
fi
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.Digest}}' --no-trunc 2>/dev/null)
updates_json+="]"
info "Found $update_count image update(s) available"
echo "$updates_json"
}
# Send Docker data to server
send_docker_data() {
load_credentials
info "Collecting Docker data..."
local containers=$(collect_containers)
local images=$(collect_images)
local updates=$(check_image_updates)
# Count collected items
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
info "Found $container_count containers, $image_count images, and $update_count update(s) available"
# Build payload
local payload="{\"apiId\":\"$API_ID\",\"apiKey\":\"$API_KEY\",\"containers\":$containers,\"images\":$images,\"updates\":$updates}"
# Send to server
info "Sending Docker data to PatchMon server..."
local response=$(curl $CURL_FLAGS -s -w "\n%{http_code}" -X POST \
-H "Content-Type: application/json" \
-d "$payload" \
"${PATCHMON_SERVER}/api/${API_VERSION}/docker/collect" 2>&1)
local http_code=$(echo "$response" | tail -n1)
local response_body=$(echo "$response" | head -n-1)
if [[ "$http_code" == "200" ]]; then
success "Docker data sent successfully!"
log "Docker data sent: $container_count containers, $image_count images"
return 0
else
error "Failed to send Docker data. HTTP Status: $http_code\nResponse: $response_body"
fi
}
# Test Docker data collection without sending
test_collection() {
check_docker
info "Testing Docker data collection (dry run)..."
echo ""
local containers=$(collect_containers)
local images=$(collect_images)
local updates=$(check_image_updates)
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo -e "${GREEN}Docker Data Collection Results${NC}"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo -e "Containers found: ${GREEN}$container_count${NC}"
echo -e "Images found: ${GREEN}$image_count${NC}"
echo -e "Updates available: ${YELLOW}$update_count${NC}"
echo ""
if command -v jq &> /dev/null; then
echo "━━━ Containers ━━━"
echo "$containers" | jq -r '.[] | "\(.name) (\(.status)) - \(.image_name):\(.image_tag)"' | head -10
if [[ $container_count -gt 10 ]]; then
echo "... and $((container_count - 10)) more"
fi
echo ""
echo "━━━ Images ━━━"
echo "$images" | jq -r '.[] | "\(.repository):\(.tag) (\(.size_bytes / 1024 / 1024 | floor)MB)"' | head -10
if [[ $image_count -gt 10 ]]; then
echo "... and $((image_count - 10)) more"
fi
if [[ $update_count -gt 0 ]]; then
echo ""
echo "━━━ Available Updates ━━━"
echo "$updates" | jq -r '.[] | "\(.repository):\(.current_tag) → \(.available_tag)"'
fi
fi
echo ""
success "Test collection completed successfully!"
}
# Show help
show_help() {
cat << EOF
PatchMon Docker Agent v${AGENT_VERSION}
This agent collects Docker container and image information and sends it to PatchMon.
USAGE:
$0 <command>
COMMANDS:
collect Collect and send Docker data to PatchMon server
test Test Docker data collection without sending (dry run)
help Show this help message
REQUIREMENTS:
- Docker must be installed and running
- Main PatchMon agent must be configured first
- Credentials file must exist at $CREDENTIALS_FILE
EXAMPLES:
# Test collection (dry run)
sudo $0 test
# Collect and send Docker data
sudo $0 collect
SCHEDULING:
To run this agent automatically, add a cron job:
# Run every 5 minutes
*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
# Run every hour
0 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
FILES:
Config: $CONFIG_FILE
Credentials: $CREDENTIALS_FILE
Log: $LOG_FILE
EOF
}
# Main function
main() {
case "$1" in
"collect")
check_docker
load_config
send_docker_data
;;
"test")
check_docker
load_config
test_collection
;;
"help"|"--help"|"-h"|"")
show_help
;;
*)
error "Unknown command: $1\n\nRun '$0 help' for usage information."
;;
esac
}
# Run main function
main "$@"

View File

@@ -1,6 +1,6 @@
{
"name": "patchmon-backend",
"version": "1.2.7",
"version": "1.2.9",
"description": "Backend API for Linux Patch Monitoring System",
"license": "AGPL-3.0",
"main": "src/server.js",
@@ -14,14 +14,18 @@
"db:studio": "prisma studio"
},
"dependencies": {
"@bull-board/api": "^6.13.0",
"@bull-board/express": "^6.13.0",
"@prisma/client": "^6.1.0",
"bcryptjs": "^2.4.3",
"bullmq": "^5.61.0",
"cors": "^2.8.5",
"dotenv": "^16.4.7",
"express": "^4.21.2",
"express": "^5.0.0",
"express-rate-limit": "^7.5.0",
"express-validator": "^7.2.0",
"helmet": "^8.0.0",
"ioredis": "^5.8.1",
"jsonwebtoken": "^9.0.2",
"moment": "^2.30.1",
"qrcode": "^1.5.4",

View File

@@ -0,0 +1,119 @@
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
DO $$
DECLARE
old_migration_exists boolean := false;
table_exists boolean := false;
failed_migration_exists boolean := false;
new_migration_exists boolean := false;
BEGIN
-- Check if the old migration name exists
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = 'add_user_sessions'
) INTO old_migration_exists;
-- Check if user_sessions table exists
SELECT EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'user_sessions'
) INTO table_exists;
-- Check if there's a failed migration attempt
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL
) INTO failed_migration_exists;
-- Check if the new migration already exists and is successful
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NOT NULL
) INTO new_migration_exists;
-- FIRST: Handle failed migration (must be marked as rolled back)
IF failed_migration_exists THEN
RAISE NOTICE 'Found failed migration attempt - marking as rolled back';
-- Mark the failed migration as rolled back (required by Prisma)
UPDATE _prisma_migrations
SET rolled_back_at = NOW()
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL;
RAISE NOTICE 'Failed migration marked as rolled back';
-- If table exists, it means the migration partially succeeded
IF table_exists THEN
RAISE NOTICE 'Table exists - migration was partially successful, will be handled by next migration';
ELSE
RAISE NOTICE 'Table does not exist - migration will retry after rollback';
END IF;
END IF;
-- SECOND: Handle old migration name (1.2.7 -> 1.2.8+ upgrade)
IF old_migration_exists AND table_exists THEN
RAISE NOTICE 'Found 1.2.7 migration "add_user_sessions" - updating to timestamped version';
-- Update the old migration name to the new timestamped version
UPDATE _prisma_migrations
SET migration_name = '20251005000000_add_user_sessions'
WHERE migration_name = 'add_user_sessions';
RAISE NOTICE 'Migration name updated: add_user_sessions -> 20251005000000_add_user_sessions';
END IF;
-- THIRD: Handle case where table exists but no migration record exists (1.2.7 upgrade scenario)
IF table_exists AND NOT old_migration_exists AND NOT new_migration_exists THEN
RAISE NOTICE 'Table exists but no migration record found - creating migration record for 1.2.7 upgrade';
-- Insert a successful migration record for the existing table
INSERT INTO _prisma_migrations (
id,
checksum,
finished_at,
migration_name,
logs,
rolled_back_at,
started_at,
applied_steps_count
) VALUES (
gen_random_uuid()::text,
'', -- Empty checksum since we're reconciling
NOW(),
'20251005000000_add_user_sessions',
'Reconciled from 1.2.7 - table already exists',
NULL,
NOW(),
1
);
RAISE NOTICE 'Migration record created for existing table';
END IF;
-- FOURTH: If we have a rolled back migration and table exists, mark it as applied
IF failed_migration_exists AND table_exists THEN
RAISE NOTICE 'Migration was rolled back but table exists - marking as successfully applied';
-- Update the rolled back migration to be successful
UPDATE _prisma_migrations
SET
finished_at = NOW(),
rolled_back_at = NULL,
logs = 'Reconciled from failed state - table already exists'
WHERE migration_name = '20251005000000_add_user_sessions';
RAISE NOTICE 'Migration marked as successfully applied';
END IF;
-- If no issues found
IF NOT old_migration_exists AND NOT failed_migration_exists AND NOT (table_exists AND NOT new_migration_exists) THEN
RAISE NOTICE 'No migration reconciliation needed';
END IF;
END $$;

View File

@@ -0,0 +1,96 @@
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
DO $$
DECLARE
old_migration_exists boolean := false;
table_exists boolean := false;
failed_migration_exists boolean := false;
BEGIN
-- Check if the old migration name exists
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = 'add_user_sessions'
) INTO old_migration_exists;
-- Check if user_sessions table exists
SELECT EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'user_sessions'
) INTO table_exists;
-- Check if there's a failed migration attempt
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL
) INTO failed_migration_exists;
-- Scenario 1: Old migration exists, table exists, no failed migration
-- This means 1.2.7 was installed and we need to update the migration name
IF old_migration_exists AND table_exists AND NOT failed_migration_exists THEN
RAISE NOTICE 'Found 1.2.7 migration "add_user_sessions" - updating to timestamped version';
-- Update the old migration name to the new timestamped version
UPDATE _prisma_migrations
SET migration_name = '20251005000000_add_user_sessions'
WHERE migration_name = 'add_user_sessions';
RAISE NOTICE 'Migration name updated: add_user_sessions -> 20251005000000_add_user_sessions';
END IF;
-- Scenario 2: Failed migration exists (upgrade attempt gone wrong)
IF failed_migration_exists THEN
RAISE NOTICE 'Found failed migration attempt - cleaning up';
-- If table exists, it means the migration partially succeeded
IF table_exists THEN
RAISE NOTICE 'Table exists - marking migration as applied';
-- Delete the failed migration record
DELETE FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL;
-- Insert a successful migration record
INSERT INTO _prisma_migrations (
id,
checksum,
finished_at,
migration_name,
logs,
rolled_back_at,
started_at,
applied_steps_count
) VALUES (
gen_random_uuid()::text,
'', -- Empty checksum since we're reconciling
NOW(),
'20251005000000_add_user_sessions',
NULL,
NULL,
NOW(),
1
);
RAISE NOTICE 'Migration marked as successfully applied';
ELSE
RAISE NOTICE 'Table does not exist - removing failed migration to allow retry';
-- Just delete the failed migration to allow it to retry
DELETE FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL;
RAISE NOTICE 'Failed migration removed - will retry on next migration run';
END IF;
END IF;
-- Scenario 3: Everything is clean (fresh install or already reconciled)
IF NOT old_migration_exists AND NOT failed_migration_exists THEN
RAISE NOTICE 'No migration reconciliation needed';
END IF;
END $$;

View File

@@ -0,0 +1,94 @@
-- CreateTable
CREATE TABLE "docker_images" (
"id" TEXT NOT NULL,
"repository" TEXT NOT NULL,
"tag" TEXT NOT NULL DEFAULT 'latest',
"image_id" TEXT NOT NULL,
"digest" TEXT,
"size_bytes" BIGINT,
"source" TEXT NOT NULL DEFAULT 'docker-hub',
"created_at" TIMESTAMP(3) NOT NULL,
"last_pulled" TIMESTAMP(3),
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL,
CONSTRAINT "docker_images_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "docker_containers" (
"id" TEXT NOT NULL,
"host_id" TEXT NOT NULL,
"container_id" TEXT NOT NULL,
"name" TEXT NOT NULL,
"image_id" TEXT,
"image_name" TEXT NOT NULL,
"image_tag" TEXT NOT NULL DEFAULT 'latest',
"status" TEXT NOT NULL,
"state" TEXT,
"ports" JSONB,
"created_at" TIMESTAMP(3) NOT NULL,
"started_at" TIMESTAMP(3),
"updated_at" TIMESTAMP(3) NOT NULL,
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "docker_containers_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "docker_image_updates" (
"id" TEXT NOT NULL,
"image_id" TEXT NOT NULL,
"current_tag" TEXT NOT NULL,
"available_tag" TEXT NOT NULL,
"is_security_update" BOOLEAN NOT NULL DEFAULT false,
"severity" TEXT,
"changelog_url" TEXT,
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL,
CONSTRAINT "docker_image_updates_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "docker_images_repository_idx" ON "docker_images"("repository");
-- CreateIndex
CREATE INDEX "docker_images_source_idx" ON "docker_images"("source");
-- CreateIndex
CREATE INDEX "docker_images_repository_tag_idx" ON "docker_images"("repository", "tag");
-- CreateIndex
CREATE UNIQUE INDEX "docker_images_repository_tag_image_id_key" ON "docker_images"("repository", "tag", "image_id");
-- CreateIndex
CREATE INDEX "docker_containers_host_id_idx" ON "docker_containers"("host_id");
-- CreateIndex
CREATE INDEX "docker_containers_image_id_idx" ON "docker_containers"("image_id");
-- CreateIndex
CREATE INDEX "docker_containers_status_idx" ON "docker_containers"("status");
-- CreateIndex
CREATE INDEX "docker_containers_name_idx" ON "docker_containers"("name");
-- CreateIndex
CREATE UNIQUE INDEX "docker_containers_host_id_container_id_key" ON "docker_containers"("host_id", "container_id");
-- CreateIndex
CREATE INDEX "docker_image_updates_image_id_idx" ON "docker_image_updates"("image_id");
-- CreateIndex
CREATE INDEX "docker_image_updates_is_security_update_idx" ON "docker_image_updates"("is_security_update");
-- CreateIndex
CREATE UNIQUE INDEX "docker_image_updates_image_id_available_tag_key" ON "docker_image_updates"("image_id", "available_tag");
-- AddForeignKey
ALTER TABLE "docker_containers" ADD CONSTRAINT "docker_containers_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "docker_image_updates" ADD CONSTRAINT "docker_image_updates_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -262,3 +262,65 @@ model auto_enrollment_tokens {
@@index([token_key])
@@index([is_active])
}
model docker_containers {
id String @id
host_id String
container_id String
name String
image_id String?
image_name String
image_tag String @default("latest")
status String
state String?
ports Json?
created_at DateTime
started_at DateTime?
updated_at DateTime
last_checked DateTime @default(now())
docker_images docker_images? @relation(fields: [image_id], references: [id], onDelete: SetNull)
@@unique([host_id, container_id])
@@index([host_id])
@@index([image_id])
@@index([status])
@@index([name])
}
model docker_images {
id String @id
repository String
tag String @default("latest")
image_id String
digest String?
size_bytes BigInt?
source String @default("docker-hub")
created_at DateTime
last_pulled DateTime?
last_checked DateTime @default(now())
updated_at DateTime
docker_containers docker_containers[]
docker_image_updates docker_image_updates[]
@@unique([repository, tag, image_id])
@@index([repository])
@@index([source])
@@index([repository, tag])
}
model docker_image_updates {
id String @id
image_id String
current_tag String
available_tag String
is_security_update Boolean @default(false)
severity String?
changelog_url String?
created_at DateTime @default(now())
updated_at DateTime
docker_images docker_images @relation(fields: [image_id], references: [id], onDelete: Cascade)
@@unique([image_id, available_tag])
@@index([image_id])
@@index([is_security_update])
}

View File

@@ -0,0 +1,362 @@
const express = require("express");
const { queueManager, QUEUE_NAMES } = require("../services/automation");
const { authenticateToken } = require("../middleware/auth");
const router = express.Router();
// Get all queue statistics
router.get("/stats", authenticateToken, async (req, res) => {
try {
const stats = await queueManager.getAllQueueStats();
res.json({
success: true,
data: stats,
});
} catch (error) {
console.error("Error fetching queue stats:", error);
res.status(500).json({
success: false,
error: "Failed to fetch queue statistics",
});
}
});
// Get specific queue statistics
router.get("/stats/:queueName", authenticateToken, async (req, res) => {
try {
const { queueName } = req.params;
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
return res.status(400).json({
success: false,
error: "Invalid queue name",
});
}
const stats = await queueManager.getQueueStats(queueName);
res.json({
success: true,
data: stats,
});
} catch (error) {
console.error("Error fetching queue stats:", error);
res.status(500).json({
success: false,
error: "Failed to fetch queue statistics",
});
}
});
// Get recent jobs for a queue
router.get("/jobs/:queueName", authenticateToken, async (req, res) => {
try {
const { queueName } = req.params;
const { limit = 10 } = req.query;
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
return res.status(400).json({
success: false,
error: "Invalid queue name",
});
}
const jobs = await queueManager.getRecentJobs(queueName, parseInt(limit));
// Format jobs for frontend
const formattedJobs = jobs.map((job) => ({
id: job.id,
name: job.name,
status: job.finishedOn
? job.failedReason
? "failed"
: "completed"
: "active",
progress: job.progress,
data: job.data,
returnvalue: job.returnvalue,
failedReason: job.failedReason,
processedOn: job.processedOn,
finishedOn: job.finishedOn,
createdAt: new Date(job.timestamp),
attemptsMade: job.attemptsMade,
delay: job.delay,
}));
res.json({
success: true,
data: formattedJobs,
});
} catch (error) {
console.error("Error fetching recent jobs:", error);
res.status(500).json({
success: false,
error: "Failed to fetch recent jobs",
});
}
});
// Trigger manual GitHub update check
router.post("/trigger/github-update", authenticateToken, async (req, res) => {
try {
const job = await queueManager.triggerGitHubUpdateCheck();
res.json({
success: true,
data: {
jobId: job.id,
message: "GitHub update check triggered successfully",
},
});
} catch (error) {
console.error("Error triggering GitHub update check:", error);
res.status(500).json({
success: false,
error: "Failed to trigger GitHub update check",
});
}
});
// Trigger manual session cleanup
router.post("/trigger/session-cleanup", authenticateToken, async (req, res) => {
try {
const job = await queueManager.triggerSessionCleanup();
res.json({
success: true,
data: {
jobId: job.id,
message: "Session cleanup triggered successfully",
},
});
} catch (error) {
console.error("Error triggering session cleanup:", error);
res.status(500).json({
success: false,
error: "Failed to trigger session cleanup",
});
}
});
// Trigger manual echo hello
router.post("/trigger/echo-hello", authenticateToken, async (req, res) => {
try {
const { message } = req.body;
const job = await queueManager.triggerEchoHello(message);
res.json({
success: true,
data: {
jobId: job.id,
message: "Echo hello triggered successfully",
},
});
} catch (error) {
console.error("Error triggering echo hello:", error);
res.status(500).json({
success: false,
error: "Failed to trigger echo hello",
});
}
});
// Trigger manual orphaned repo cleanup
router.post(
"/trigger/orphaned-repo-cleanup",
authenticateToken,
async (req, res) => {
try {
const job = await queueManager.triggerOrphanedRepoCleanup();
res.json({
success: true,
data: {
jobId: job.id,
message: "Orphaned repository cleanup triggered successfully",
},
});
} catch (error) {
console.error("Error triggering orphaned repository cleanup:", error);
res.status(500).json({
success: false,
error: "Failed to trigger orphaned repository cleanup",
});
}
},
);
// Get queue health status
router.get("/health", authenticateToken, async (req, res) => {
try {
const stats = await queueManager.getAllQueueStats();
const totalJobs = Object.values(stats).reduce((sum, queueStats) => {
return sum + queueStats.waiting + queueStats.active + queueStats.failed;
}, 0);
const health = {
status: "healthy",
totalJobs,
queues: Object.keys(stats).length,
timestamp: new Date().toISOString(),
};
// Check for unhealthy conditions
if (totalJobs > 1000) {
health.status = "warning";
health.message = "High number of queued jobs";
}
const failedJobs = Object.values(stats).reduce((sum, queueStats) => {
return sum + queueStats.failed;
}, 0);
if (failedJobs > 10) {
health.status = "error";
health.message = "High number of failed jobs";
}
res.json({
success: true,
data: health,
});
} catch (error) {
console.error("Error checking queue health:", error);
res.status(500).json({
success: false,
error: "Failed to check queue health",
});
}
});
// Get automation overview (for dashboard cards)
router.get("/overview", authenticateToken, async (req, res) => {
try {
const stats = await queueManager.getAllQueueStats();
// Get recent jobs for each queue to show last run times
const recentJobs = await Promise.all([
queueManager.getRecentJobs(QUEUE_NAMES.GITHUB_UPDATE_CHECK, 1),
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
queueManager.getRecentJobs(QUEUE_NAMES.ECHO_HELLO, 1),
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
]);
// Calculate overview metrics
const overview = {
scheduledTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
stats[QUEUE_NAMES.SYSTEM_MAINTENANCE].delayed +
stats[QUEUE_NAMES.ECHO_HELLO].delayed +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed,
runningTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
stats[QUEUE_NAMES.SYSTEM_MAINTENANCE].active +
stats[QUEUE_NAMES.ECHO_HELLO].active +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active,
failedTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
stats[QUEUE_NAMES.SYSTEM_MAINTENANCE].failed +
stats[QUEUE_NAMES.ECHO_HELLO].failed +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed,
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
return (
sum +
queueStats.completed +
queueStats.failed +
queueStats.active +
queueStats.waiting +
queueStats.delayed
);
}, 0),
// Automation details with last run times
automations: [
{
name: "GitHub Update Check",
queue: QUEUE_NAMES.GITHUB_UPDATE_CHECK,
description: "Checks for new PatchMon releases",
schedule: "Daily at midnight",
lastRun: recentJobs[0][0]?.finishedOn
? new Date(recentJobs[0][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[0][0]?.finishedOn || 0,
status: recentJobs[0][0]?.failedReason
? "Failed"
: recentJobs[0][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
},
{
name: "Session Cleanup",
queue: QUEUE_NAMES.SESSION_CLEANUP,
description: "Cleans up expired user sessions",
schedule: "Every hour",
lastRun: recentJobs[1][0]?.finishedOn
? new Date(recentJobs[1][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[1][0]?.finishedOn || 0,
status: recentJobs[1][0]?.failedReason
? "Failed"
: recentJobs[1][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.SESSION_CLEANUP],
},
{
name: "Echo Hello",
queue: QUEUE_NAMES.ECHO_HELLO,
description: "Simple test automation task",
schedule: "Manual only",
lastRun: recentJobs[2][0]?.finishedOn
? new Date(recentJobs[2][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[2][0]?.finishedOn || 0,
status: recentJobs[2][0]?.failedReason
? "Failed"
: recentJobs[2][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.ECHO_HELLO],
},
{
name: "Orphaned Repo Cleanup",
queue: QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
description: "Removes repositories with no associated hosts",
schedule: "Daily at 2 AM",
lastRun: recentJobs[3][0]?.finishedOn
? new Date(recentJobs[3][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[3][0]?.finishedOn || 0,
status: recentJobs[3][0]?.failedReason
? "Failed"
: recentJobs[3][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
},
].sort((a, b) => {
// Sort by last run timestamp (most recent first)
// If both have never run (timestamp 0), maintain original order
if (a.lastRunTimestamp === 0 && b.lastRunTimestamp === 0) return 0;
if (a.lastRunTimestamp === 0) return 1; // Never run goes to bottom
if (b.lastRunTimestamp === 0) return -1; // Never run goes to bottom
return b.lastRunTimestamp - a.lastRunTimestamp; // Most recent first
}),
};
res.json({
success: true,
data: overview,
});
} catch (error) {
console.error("Error fetching automation overview:", error);
res.status(500).json({
success: false,
error: "Failed to fetch automation overview",
});
}
});
module.exports = router;

View File

@@ -0,0 +1,779 @@
const express = require("express");
const { authenticateToken } = require("../middleware/auth");
const { PrismaClient } = require("@prisma/client");
const { v4: uuidv4 } = require("uuid");
const prisma = new PrismaClient();
const router = express.Router();
// Helper function to convert BigInt fields to strings for JSON serialization
const convertBigIntToString = (obj) => {
if (obj === null || obj === undefined) return obj;
if (typeof obj === "bigint") {
return obj.toString();
}
if (Array.isArray(obj)) {
return obj.map(convertBigIntToString);
}
if (typeof obj === "object") {
const converted = {};
for (const key in obj) {
converted[key] = convertBigIntToString(obj[key]);
}
return converted;
}
return obj;
};
// GET /api/v1/docker/dashboard - Get Docker dashboard statistics
router.get("/dashboard", authenticateToken, async (_req, res) => {
try {
// Get total hosts with Docker containers
const hostsWithDocker = await prisma.docker_containers.groupBy({
by: ["host_id"],
_count: true,
});
// Get total containers
const totalContainers = await prisma.docker_containers.count();
// Get running containers
const runningContainers = await prisma.docker_containers.count({
where: { status: "running" },
});
// Get total images
const totalImages = await prisma.docker_images.count();
// Get available updates
const availableUpdates = await prisma.docker_image_updates.count();
// Get containers by status
const containersByStatus = await prisma.docker_containers.groupBy({
by: ["status"],
_count: true,
});
// Get images by source
const imagesBySource = await prisma.docker_images.groupBy({
by: ["source"],
_count: true,
});
res.json({
stats: {
totalHostsWithDocker: hostsWithDocker.length,
totalContainers,
runningContainers,
totalImages,
availableUpdates,
},
containersByStatus,
imagesBySource,
});
} catch (error) {
console.error("Error fetching Docker dashboard:", error);
res.status(500).json({ error: "Failed to fetch Docker dashboard" });
}
});
// GET /api/v1/docker/containers - Get all containers with filters
router.get("/containers", authenticateToken, async (req, res) => {
try {
const { status, hostId, imageId, search, page = 1, limit = 50 } = req.query;
const where = {};
if (status) where.status = status;
if (hostId) where.host_id = hostId;
if (imageId) where.image_id = imageId;
if (search) {
where.OR = [
{ name: { contains: search, mode: "insensitive" } },
{ image_name: { contains: search, mode: "insensitive" } },
];
}
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
const take = parseInt(limit, 10);
const [containers, total] = await Promise.all([
prisma.docker_containers.findMany({
where,
include: {
docker_images: true,
},
orderBy: { updated_at: "desc" },
skip,
take,
}),
prisma.docker_containers.count({ where }),
]);
// Get host information for each container
const hostIds = [...new Set(containers.map((c) => c.host_id))];
const hosts = await prisma.hosts.findMany({
where: { id: { in: hostIds } },
select: { id: true, friendly_name: true, hostname: true, ip: true },
});
const hostsMap = hosts.reduce((acc, host) => {
acc[host.id] = host;
return acc;
}, {});
const containersWithHosts = containers.map((container) => ({
...container,
host: hostsMap[container.host_id],
}));
res.json(
convertBigIntToString({
containers: containersWithHosts,
pagination: {
page: parseInt(page, 10),
limit: parseInt(limit, 10),
total,
totalPages: Math.ceil(total / parseInt(limit, 10)),
},
}),
);
} catch (error) {
console.error("Error fetching containers:", error);
res.status(500).json({ error: "Failed to fetch containers" });
}
});
// GET /api/v1/docker/containers/:id - Get container detail
router.get("/containers/:id", authenticateToken, async (req, res) => {
try {
const { id } = req.params;
const container = await prisma.docker_containers.findUnique({
where: { id },
include: {
docker_images: {
include: {
docker_image_updates: true,
},
},
},
});
if (!container) {
return res.status(404).json({ error: "Container not found" });
}
// Get host information
const host = await prisma.hosts.findUnique({
where: { id: container.host_id },
select: {
id: true,
friendly_name: true,
hostname: true,
ip: true,
os_type: true,
os_version: true,
},
});
// Get other containers using the same image
const similarContainers = await prisma.docker_containers.findMany({
where: {
image_id: container.image_id,
id: { not: id },
},
take: 10,
});
res.json(
convertBigIntToString({
container: {
...container,
host,
},
similarContainers,
}),
);
} catch (error) {
console.error("Error fetching container detail:", error);
res.status(500).json({ error: "Failed to fetch container detail" });
}
});
// GET /api/v1/docker/images - Get all images with filters
router.get("/images", authenticateToken, async (req, res) => {
try {
const { source, search, page = 1, limit = 50 } = req.query;
const where = {};
if (source) where.source = source;
if (search) {
where.OR = [
{ repository: { contains: search, mode: "insensitive" } },
{ tag: { contains: search, mode: "insensitive" } },
];
}
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
const take = parseInt(limit, 10);
const [images, total] = await Promise.all([
prisma.docker_images.findMany({
where,
include: {
_count: {
select: {
docker_containers: true,
docker_image_updates: true,
},
},
docker_image_updates: {
take: 1,
orderBy: { created_at: "desc" },
},
},
orderBy: { updated_at: "desc" },
skip,
take,
}),
prisma.docker_images.count({ where }),
]);
// Get unique hosts using each image
const imagesWithHosts = await Promise.all(
images.map(async (image) => {
const containers = await prisma.docker_containers.findMany({
where: { image_id: image.id },
select: { host_id: true },
distinct: ["host_id"],
});
return {
...image,
hostsCount: containers.length,
hasUpdates: image._count.docker_image_updates > 0,
};
}),
);
res.json(
convertBigIntToString({
images: imagesWithHosts,
pagination: {
page: parseInt(page, 10),
limit: parseInt(limit, 10),
total,
totalPages: Math.ceil(total / parseInt(limit, 10)),
},
}),
);
} catch (error) {
console.error("Error fetching images:", error);
res.status(500).json({ error: "Failed to fetch images" });
}
});
// GET /api/v1/docker/images/:id - Get image detail
router.get("/images/:id", authenticateToken, async (req, res) => {
try {
const { id } = req.params;
const image = await prisma.docker_images.findUnique({
where: { id },
include: {
docker_containers: {
take: 100,
},
docker_image_updates: {
orderBy: { created_at: "desc" },
},
},
});
if (!image) {
return res.status(404).json({ error: "Image not found" });
}
// Get unique hosts using this image
const hostIds = [...new Set(image.docker_containers.map((c) => c.host_id))];
const hosts = await prisma.hosts.findMany({
where: { id: { in: hostIds } },
select: { id: true, friendly_name: true, hostname: true, ip: true },
});
res.json(
convertBigIntToString({
image,
hosts,
totalContainers: image.docker_containers.length,
totalHosts: hosts.length,
}),
);
} catch (error) {
console.error("Error fetching image detail:", error);
res.status(500).json({ error: "Failed to fetch image detail" });
}
});
// GET /api/v1/docker/hosts - Get all hosts with Docker
router.get("/hosts", authenticateToken, async (req, res) => {
try {
const { page = 1, limit = 50 } = req.query;
// Get hosts that have Docker containers
const hostsWithContainers = await prisma.docker_containers.groupBy({
by: ["host_id"],
_count: true,
});
const hostIds = hostsWithContainers.map((h) => h.host_id);
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
const take = parseInt(limit, 10);
const hosts = await prisma.hosts.findMany({
where: { id: { in: hostIds } },
skip,
take,
orderBy: { friendly_name: "asc" },
});
// Get container counts and statuses for each host
const hostsWithStats = await Promise.all(
hosts.map(async (host) => {
const [totalContainers, runningContainers, totalImages] =
await Promise.all([
prisma.docker_containers.count({
where: { host_id: host.id },
}),
prisma.docker_containers.count({
where: { host_id: host.id, status: "running" },
}),
prisma.docker_containers.findMany({
where: { host_id: host.id },
select: { image_id: true },
distinct: ["image_id"],
}),
]);
return {
...host,
dockerStats: {
totalContainers,
runningContainers,
totalImages: totalImages.length,
},
};
}),
);
res.json(
convertBigIntToString({
hosts: hostsWithStats,
pagination: {
page: parseInt(page, 10),
limit: parseInt(limit, 10),
total: hostIds.length,
totalPages: Math.ceil(hostIds.length / parseInt(limit, 10)),
},
}),
);
} catch (error) {
console.error("Error fetching Docker hosts:", error);
res.status(500).json({ error: "Failed to fetch Docker hosts" });
}
});
// GET /api/v1/docker/hosts/:id - Get host Docker detail
router.get("/hosts/:id", authenticateToken, async (req, res) => {
try {
const { id } = req.params;
const host = await prisma.hosts.findUnique({
where: { id },
});
if (!host) {
return res.status(404).json({ error: "Host not found" });
}
// Get containers on this host
const containers = await prisma.docker_containers.findMany({
where: { host_id: id },
include: {
docker_images: {
include: {
docker_image_updates: true,
},
},
},
orderBy: { name: "asc" },
});
// Get unique images on this host
const imageIds = [...new Set(containers.map((c) => c.image_id))].filter(
Boolean,
);
const images = await prisma.docker_images.findMany({
where: { id: { in: imageIds } },
});
// Get container statistics
const runningContainers = containers.filter(
(c) => c.status === "running",
).length;
const stoppedContainers = containers.filter(
(c) => c.status === "exited" || c.status === "stopped",
).length;
res.json(
convertBigIntToString({
host,
containers,
images,
stats: {
totalContainers: containers.length,
runningContainers,
stoppedContainers,
totalImages: images.length,
},
}),
);
} catch (error) {
console.error("Error fetching host Docker detail:", error);
res.status(500).json({ error: "Failed to fetch host Docker detail" });
}
});
// GET /api/v1/docker/updates - Get available updates
router.get("/updates", authenticateToken, async (req, res) => {
try {
const { page = 1, limit = 50, securityOnly = false } = req.query;
const where = {};
if (securityOnly === "true") {
where.is_security_update = true;
}
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
const take = parseInt(limit, 10);
const [updates, total] = await Promise.all([
prisma.docker_image_updates.findMany({
where,
include: {
docker_images: {
include: {
docker_containers: {
select: {
id: true,
host_id: true,
name: true,
},
},
},
},
},
orderBy: [{ is_security_update: "desc" }, { created_at: "desc" }],
skip,
take,
}),
prisma.docker_image_updates.count({ where }),
]);
// Get affected hosts for each update
const updatesWithHosts = await Promise.all(
updates.map(async (update) => {
const hostIds = [
...new Set(
update.docker_images.docker_containers.map((c) => c.host_id),
),
];
const hosts = await prisma.hosts.findMany({
where: { id: { in: hostIds } },
select: { id: true, friendly_name: true, hostname: true },
});
return {
...update,
affectedHosts: hosts,
affectedContainersCount:
update.docker_images.docker_containers.length,
};
}),
);
res.json(
convertBigIntToString({
updates: updatesWithHosts,
pagination: {
page: parseInt(page, 10),
limit: parseInt(limit, 10),
total,
totalPages: Math.ceil(total / parseInt(limit, 10)),
},
}),
);
} catch (error) {
console.error("Error fetching Docker updates:", error);
res.status(500).json({ error: "Failed to fetch Docker updates" });
}
});
// POST /api/v1/docker/collect - Collect Docker data from agent
router.post("/collect", async (req, res) => {
try {
const { apiId, apiKey, containers, images, updates } = req.body;
// Validate API credentials
const host = await prisma.hosts.findFirst({
where: { api_id: apiId, api_key: apiKey },
});
if (!host) {
return res.status(401).json({ error: "Invalid API credentials" });
}
const now = new Date();
// Helper function to validate and parse dates
const parseDate = (dateString) => {
if (!dateString) return now;
const date = new Date(dateString);
return Number.isNaN(date.getTime()) ? now : date;
};
// Process containers
if (containers && Array.isArray(containers)) {
for (const containerData of containers) {
const containerId = uuidv4();
// Find or create image
let imageId = null;
if (containerData.image_repository && containerData.image_tag) {
const image = await prisma.docker_images.upsert({
where: {
repository_tag_image_id: {
repository: containerData.image_repository,
tag: containerData.image_tag,
image_id: containerData.image_id || "unknown",
},
},
update: {
last_checked: now,
updated_at: now,
},
create: {
id: uuidv4(),
repository: containerData.image_repository,
tag: containerData.image_tag,
image_id: containerData.image_id || "unknown",
source: containerData.image_source || "docker-hub",
created_at: parseDate(containerData.created_at),
updated_at: now,
},
});
imageId = image.id;
}
// Upsert container
await prisma.docker_containers.upsert({
where: {
host_id_container_id: {
host_id: host.id,
container_id: containerData.container_id,
},
},
update: {
name: containerData.name,
image_id: imageId,
image_name: containerData.image_name,
image_tag: containerData.image_tag || "latest",
status: containerData.status,
state: containerData.state,
ports: containerData.ports || null,
started_at: containerData.started_at
? parseDate(containerData.started_at)
: null,
updated_at: now,
last_checked: now,
},
create: {
id: containerId,
host_id: host.id,
container_id: containerData.container_id,
name: containerData.name,
image_id: imageId,
image_name: containerData.image_name,
image_tag: containerData.image_tag || "latest",
status: containerData.status,
state: containerData.state,
ports: containerData.ports || null,
created_at: parseDate(containerData.created_at),
started_at: containerData.started_at
? parseDate(containerData.started_at)
: null,
updated_at: now,
},
});
}
}
// Process standalone images
if (images && Array.isArray(images)) {
for (const imageData of images) {
await prisma.docker_images.upsert({
where: {
repository_tag_image_id: {
repository: imageData.repository,
tag: imageData.tag,
image_id: imageData.image_id,
},
},
update: {
size_bytes: imageData.size_bytes
? BigInt(imageData.size_bytes)
: null,
last_checked: now,
updated_at: now,
},
create: {
id: uuidv4(),
repository: imageData.repository,
tag: imageData.tag,
image_id: imageData.image_id,
digest: imageData.digest,
size_bytes: imageData.size_bytes
? BigInt(imageData.size_bytes)
: null,
source: imageData.source || "docker-hub",
created_at: parseDate(imageData.created_at),
updated_at: now,
},
});
}
}
// Process updates
// First, get all images for this host to clean up old updates
const hostImageIds = await prisma.docker_containers
.findMany({
where: { host_id: host.id },
select: { image_id: true },
distinct: ["image_id"],
})
.then((results) => results.map((r) => r.image_id).filter(Boolean));
// Delete old updates for images on this host that are no longer reported
if (hostImageIds.length > 0) {
const reportedImageIds = [];
// Process new updates
if (updates && Array.isArray(updates)) {
for (const updateData of updates) {
// Find the image by repository, tag, and image_id
const image = await prisma.docker_images.findFirst({
where: {
repository: updateData.repository,
tag: updateData.current_tag,
image_id: updateData.image_id,
},
});
if (image) {
reportedImageIds.push(image.id);
// Store digest info in changelog_url field as JSON for now
const digestInfo = JSON.stringify({
method: "digest_comparison",
current_digest: updateData.current_digest,
available_digest: updateData.available_digest,
});
// Upsert the update record
await prisma.docker_image_updates.upsert({
where: {
image_id_available_tag: {
image_id: image.id,
available_tag: updateData.available_tag,
},
},
update: {
updated_at: now,
changelog_url: digestInfo,
severity: "digest_changed",
},
create: {
id: uuidv4(),
image_id: image.id,
current_tag: updateData.current_tag,
available_tag: updateData.available_tag,
severity: "digest_changed",
changelog_url: digestInfo,
updated_at: now,
},
});
}
}
}
// Remove stale updates for images on this host that are no longer in the updates list
const imageIdsToCleanup = hostImageIds.filter(
(id) => !reportedImageIds.includes(id),
);
if (imageIdsToCleanup.length > 0) {
await prisma.docker_image_updates.deleteMany({
where: {
image_id: { in: imageIdsToCleanup },
},
});
}
}
res.json({ success: true, message: "Docker data collected successfully" });
} catch (error) {
console.error("Error collecting Docker data:", error);
console.error("Error stack:", error.stack);
console.error("Request body:", JSON.stringify(req.body, null, 2));
res.status(500).json({
error: "Failed to collect Docker data",
message: error.message,
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
});
}
});
// GET /api/v1/docker/agent - Serve the Docker agent installation script
router.get("/agent", async (_req, res) => {
try {
const fs = require("node:fs");
const path = require("node:path");
const agentPath = path.join(
__dirname,
"../../..",
"agents",
"patchmon-docker-agent.sh",
);
// Check if file exists
if (!fs.existsSync(agentPath)) {
return res.status(404).json({ error: "Docker agent script not found" });
}
// Read and serve the file
const agentScript = fs.readFileSync(agentPath, "utf8");
res.setHeader("Content-Type", "text/x-shellscript");
res.setHeader(
"Content-Disposition",
'inline; filename="patchmon-docker-agent.sh"',
);
res.send(agentScript);
} catch (error) {
console.error("Error serving Docker agent:", error);
res.status(500).json({ error: "Failed to serve Docker agent script" });
}
});
module.exports = router;

View File

@@ -0,0 +1,236 @@
const express = require("express");
const { createPrismaClient } = require("../config/database");
const bcrypt = require("bcryptjs");
const router = express.Router();
const prisma = createPrismaClient();
// Middleware to authenticate API key
const authenticateApiKey = async (req, res, next) => {
try {
const authHeader = req.headers.authorization;
if (!authHeader || !authHeader.startsWith("Basic ")) {
return res
.status(401)
.json({ error: "Missing or invalid authorization header" });
}
// Decode base64 credentials
const base64Credentials = authHeader.split(" ")[1];
const credentials = Buffer.from(base64Credentials, "base64").toString(
"ascii",
);
const [apiKey, apiSecret] = credentials.split(":");
if (!apiKey || !apiSecret) {
return res.status(401).json({ error: "Invalid credentials format" });
}
// Find the token in database
const token = await prisma.auto_enrollment_tokens.findUnique({
where: { token_key: apiKey },
include: {
users: {
select: {
id: true,
username: true,
role: true,
},
},
},
});
if (!token) {
console.log(`API key not found: ${apiKey}`);
return res.status(401).json({ error: "Invalid API key" });
}
// Check if token is active
if (!token.is_active) {
return res.status(401).json({ error: "API key is disabled" });
}
// Check if token has expired
if (token.expires_at && new Date(token.expires_at) < new Date()) {
return res.status(401).json({ error: "API key has expired" });
}
// Check if token is for gethomepage integration
if (token.metadata?.integration_type !== "gethomepage") {
return res.status(401).json({ error: "Invalid API key type" });
}
// Verify the secret
const isValidSecret = await bcrypt.compare(apiSecret, token.token_secret);
if (!isValidSecret) {
return res.status(401).json({ error: "Invalid API secret" });
}
// Check IP restrictions if any
if (token.allowed_ip_ranges && token.allowed_ip_ranges.length > 0) {
const clientIp = req.ip || req.connection.remoteAddress;
const forwardedFor = req.headers["x-forwarded-for"];
const realIp = req.headers["x-real-ip"];
// Get the actual client IP (considering proxies)
const actualClientIp = forwardedFor
? forwardedFor.split(",")[0].trim()
: realIp || clientIp;
const isAllowedIp = token.allowed_ip_ranges.some((range) => {
// Simple IP range check (can be enhanced for CIDR support)
return actualClientIp.startsWith(range) || actualClientIp === range;
});
if (!isAllowedIp) {
console.log(
`IP validation failed. Client IP: ${actualClientIp}, Allowed ranges: ${token.allowed_ip_ranges.join(", ")}`,
);
return res.status(403).json({ error: "IP address not allowed" });
}
}
// Update last used timestamp
await prisma.auto_enrollment_tokens.update({
where: { id: token.id },
data: { last_used_at: new Date() },
});
// Attach token info to request
req.apiToken = token;
next();
} catch (error) {
console.error("API key authentication error:", error);
res.status(500).json({ error: "Authentication failed" });
}
};
// Get homepage widget statistics
router.get("/stats", authenticateApiKey, async (_req, res) => {
try {
// Get total hosts count
const totalHosts = await prisma.hosts.count({
where: { status: "active" },
});
// Get total outdated packages count
const totalOutdatedPackages = await prisma.host_packages.count({
where: { needs_update: true },
});
// Get total repositories count
const totalRepos = await prisma.repositories.count({
where: { is_active: true },
});
// Get hosts that need updates (have outdated packages)
const hostsNeedingUpdates = await prisma.hosts.count({
where: {
status: "active",
host_packages: {
some: {
needs_update: true,
},
},
},
});
// Get security updates count
const securityUpdates = await prisma.host_packages.count({
where: {
needs_update: true,
is_security_update: true,
},
});
// Get hosts with security updates
const hostsWithSecurityUpdates = await prisma.hosts.count({
where: {
status: "active",
host_packages: {
some: {
needs_update: true,
is_security_update: true,
},
},
},
});
// Get up-to-date hosts count
const upToDateHosts = totalHosts - hostsNeedingUpdates;
// Get recent update activity (last 24 hours)
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
const recentUpdates = await prisma.update_history.count({
where: {
timestamp: {
gte: oneDayAgo,
},
status: "success",
},
});
// Get OS distribution
const osDistribution = await prisma.hosts.groupBy({
by: ["os_type"],
where: { status: "active" },
_count: {
id: true,
},
orderBy: {
_count: {
id: "desc",
},
},
});
// Format OS distribution data
const osDistributionFormatted = osDistribution.map((os) => ({
name: os.os_type,
count: os._count.id,
}));
// Extract top 3 OS types for flat display in widgets
const top_os_1 = osDistributionFormatted[0] || { name: "None", count: 0 };
const top_os_2 = osDistributionFormatted[1] || { name: "None", count: 0 };
const top_os_3 = osDistributionFormatted[2] || { name: "None", count: 0 };
// Prepare response data
const stats = {
total_hosts: totalHosts,
total_outdated_packages: totalOutdatedPackages,
total_repos: totalRepos,
hosts_needing_updates: hostsNeedingUpdates,
up_to_date_hosts: upToDateHosts,
security_updates: securityUpdates,
hosts_with_security_updates: hostsWithSecurityUpdates,
recent_updates_24h: recentUpdates,
os_distribution: osDistributionFormatted,
// Flattened OS data for easy widget display
top_os_1_name: top_os_1.name,
top_os_1_count: top_os_1.count,
top_os_2_name: top_os_2.name,
top_os_2_count: top_os_2.count,
top_os_3_name: top_os_3.name,
top_os_3_count: top_os_3.count,
last_updated: new Date().toISOString(),
};
res.json(stats);
} catch (error) {
console.error("Error fetching homepage stats:", error);
res.status(500).json({ error: "Failed to fetch statistics" });
}
});
// Health check endpoint for the API
router.get("/health", authenticateApiKey, async (req, res) => {
res.json({
status: "ok",
timestamp: new Date().toISOString(),
api_key: req.apiToken.token_name,
});
});
module.exports = router;

View File

@@ -390,153 +390,190 @@ router.post(
const totalPackages = packages.length;
// Process everything in a single transaction to avoid race conditions
await prisma.$transaction(async (tx) => {
// Update host data
await tx.hosts.update({
where: { id: host.id },
data: updateData,
});
// Clear existing host packages to avoid duplicates
await tx.host_packages.deleteMany({
where: { host_id: host.id },
});
// Process each package
for (const packageData of packages) {
// Find or create package
let pkg = await tx.packages.findUnique({
where: { name: packageData.name },
await prisma.$transaction(
async (tx) => {
// Update host data
await tx.hosts.update({
where: { id: host.id },
data: updateData,
});
if (!pkg) {
pkg = await tx.packages.create({
data: {
// Clear existing host packages to avoid duplicates
await tx.host_packages.deleteMany({
where: { host_id: host.id },
});
// Process packages in batches using createMany/updateMany
const packagesToCreate = [];
const packagesToUpdate = [];
const hostPackagesToUpsert = [];
// First pass: identify what needs to be created/updated
const existingPackages = await tx.packages.findMany({
where: {
name: { in: packages.map((p) => p.name) },
},
});
const existingPackageMap = new Map(
existingPackages.map((p) => [p.name, p]),
);
for (const packageData of packages) {
const existingPkg = existingPackageMap.get(packageData.name);
if (!existingPkg) {
// Package doesn't exist, create it
const newPkg = {
id: uuidv4(),
name: packageData.name,
description: packageData.description || null,
category: packageData.category || null,
latest_version:
packageData.availableVersion || packageData.currentVersion,
created_at: new Date(),
updated_at: new Date(),
};
packagesToCreate.push(newPkg);
existingPackageMap.set(packageData.name, newPkg);
} else if (
packageData.availableVersion &&
packageData.availableVersion !== existingPkg.latest_version
) {
// Package exists but needs version update
packagesToUpdate.push({
id: existingPkg.id,
latest_version: packageData.availableVersion,
});
}
}
// Batch create new packages
if (packagesToCreate.length > 0) {
await tx.packages.createMany({
data: packagesToCreate,
skipDuplicates: true,
});
}
// Batch update existing packages
for (const update of packagesToUpdate) {
await tx.packages.update({
where: { id: update.id },
data: {
latest_version: update.latest_version,
updated_at: new Date(),
},
});
} else {
// Update package latest version if newer
if (
packageData.availableVersion &&
packageData.availableVersion !== pkg.latest_version
) {
await tx.packages.update({
where: { id: pkg.id },
data: {
latest_version: packageData.availableVersion,
updated_at: new Date(),
},
});
}
}
// Create host package relationship
// Use upsert to handle potential duplicates gracefully
await tx.host_packages.upsert({
where: {
host_id_package_id: {
host_id: host.id,
package_id: pkg.id,
},
},
update: {
current_version: packageData.currentVersion,
available_version: packageData.availableVersion || null,
needs_update: packageData.needsUpdate,
is_security_update: packageData.isSecurityUpdate || false,
last_checked: new Date(),
},
create: {
id: uuidv4(),
host_id: host.id,
package_id: pkg.id,
current_version: packageData.currentVersion,
available_version: packageData.availableVersion || null,
needs_update: packageData.needsUpdate,
is_security_update: packageData.isSecurityUpdate || false,
last_checked: new Date(),
},
});
}
// Now process host_packages
for (const packageData of packages) {
const pkg = existingPackageMap.get(packageData.name);
// Process repositories if provided
if (repositories && Array.isArray(repositories)) {
// Clear existing host repositories
await tx.host_repositories.deleteMany({
where: { host_id: host.id },
});
// Deduplicate repositories by URL+distribution+components to avoid constraint violations
const uniqueRepos = new Map();
for (const repoData of repositories) {
const key = `${repoData.url}|${repoData.distribution}|${repoData.components}`;
if (!uniqueRepos.has(key)) {
uniqueRepos.set(key, repoData);
}
}
// Process each unique repository
for (const repoData of uniqueRepos.values()) {
// Find or create repository
let repo = await tx.repositories.findFirst({
await tx.host_packages.upsert({
where: {
url: repoData.url,
distribution: repoData.distribution,
components: repoData.components,
},
});
if (!repo) {
repo = await tx.repositories.create({
data: {
id: uuidv4(),
name: repoData.name,
url: repoData.url,
distribution: repoData.distribution,
components: repoData.components,
repo_type: repoData.repoType,
is_active: true,
is_secure: repoData.isSecure || false,
description: `${repoData.repoType} repository for ${repoData.distribution}`,
updated_at: new Date(),
host_id_package_id: {
host_id: host.id,
package_id: pkg.id,
},
});
}
// Create host repository relationship
await tx.host_repositories.create({
data: {
},
update: {
current_version: packageData.currentVersion,
available_version: packageData.availableVersion || null,
needs_update: packageData.needsUpdate,
is_security_update: packageData.isSecurityUpdate || false,
last_checked: new Date(),
},
create: {
id: uuidv4(),
host_id: host.id,
repository_id: repo.id,
is_enabled: repoData.isEnabled !== false, // Default to enabled
package_id: pkg.id,
current_version: packageData.currentVersion,
available_version: packageData.availableVersion || null,
needs_update: packageData.needsUpdate,
is_security_update: packageData.isSecurityUpdate || false,
last_checked: new Date(),
},
});
}
}
// Create update history record
await tx.update_history.create({
data: {
id: uuidv4(),
host_id: host.id,
packages_count: updatesCount,
security_count: securityCount,
total_packages: totalPackages,
payload_size_kb: payloadSizeKb,
execution_time: executionTime ? parseFloat(executionTime) : null,
status: "success",
},
});
});
// Process repositories if provided
if (repositories && Array.isArray(repositories)) {
// Clear existing host repositories
await tx.host_repositories.deleteMany({
where: { host_id: host.id },
});
// Deduplicate repositories by URL+distribution+components to avoid constraint violations
const uniqueRepos = new Map();
for (const repoData of repositories) {
const key = `${repoData.url}|${repoData.distribution}|${repoData.components}`;
if (!uniqueRepos.has(key)) {
uniqueRepos.set(key, repoData);
}
}
// Process each unique repository
for (const repoData of uniqueRepos.values()) {
// Find or create repository
let repo = await tx.repositories.findFirst({
where: {
url: repoData.url,
distribution: repoData.distribution,
components: repoData.components,
},
});
if (!repo) {
repo = await tx.repositories.create({
data: {
id: uuidv4(),
name: repoData.name,
url: repoData.url,
distribution: repoData.distribution,
components: repoData.components,
repo_type: repoData.repoType,
is_active: true,
is_secure: repoData.isSecure || false,
description: `${repoData.repoType} repository for ${repoData.distribution}`,
updated_at: new Date(),
},
});
}
// Create host repository relationship
await tx.host_repositories.create({
data: {
id: uuidv4(),
host_id: host.id,
repository_id: repo.id,
is_enabled: repoData.isEnabled !== false, // Default to enabled
last_checked: new Date(),
},
});
}
}
// Create update history record
await tx.update_history.create({
data: {
id: uuidv4(),
host_id: host.id,
packages_count: updatesCount,
security_count: securityCount,
total_packages: totalPackages,
payload_size_kb: payloadSizeKb,
execution_time: executionTime ? parseFloat(executionTime) : null,
status: "success",
},
});
},
{
maxWait: 30000, // Wait up to 30s for a transaction slot
timeout: 60000, // Allow transaction to run for up to 60s
},
);
// Agent auto-update is now handled client-side by the agent itself

View File

@@ -14,13 +14,13 @@ const router = express.Router();
function getCurrentVersion() {
try {
const packageJson = require("../../package.json");
return packageJson?.version || "1.2.7";
return packageJson?.version || "1.2.9";
} catch (packageError) {
console.warn(
"Could not read version from package.json, using fallback:",
packageError.message,
);
return "1.2.7";
return "1.2.9";
}
}
@@ -126,43 +126,61 @@ async function getLatestCommit(owner, repo) {
// Helper function to get commit count difference
async function getCommitDifference(owner, repo, currentVersion) {
try {
const currentVersionTag = `v${currentVersion}`;
// Compare main branch with the released version tag
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${currentVersionTag}...main`;
// Try both with and without 'v' prefix for compatibility
const versionTags = [
currentVersion, // Try without 'v' first (new format)
`v${currentVersion}`, // Try with 'v' prefix (old format)
];
const response = await fetch(apiUrl, {
method: "GET",
headers: {
Accept: "application/vnd.github.v3+json",
"User-Agent": `PatchMon-Server/${getCurrentVersion()}`,
},
});
for (const versionTag of versionTags) {
try {
// Compare main branch with the released version tag
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${versionTag}...main`;
if (!response.ok) {
const errorText = await response.text();
if (
errorText.includes("rate limit") ||
errorText.includes("API rate limit")
) {
throw new Error("GitHub API rate limit exceeded");
const response = await fetch(apiUrl, {
method: "GET",
headers: {
Accept: "application/vnd.github.v3+json",
"User-Agent": `PatchMon-Server/${getCurrentVersion()}`,
},
});
if (!response.ok) {
const errorText = await response.text();
if (
errorText.includes("rate limit") ||
errorText.includes("API rate limit")
) {
throw new Error("GitHub API rate limit exceeded");
}
// If 404, try next tag format
if (response.status === 404) {
continue;
}
throw new Error(
`GitHub API error: ${response.status} ${response.statusText}`,
);
}
throw new Error(
`GitHub API error: ${response.status} ${response.statusText}`,
);
}
const compareData = await response.json();
return {
commitsBehind: compareData.behind_by || 0, // How many commits main is behind release
commitsAhead: compareData.ahead_by || 0, // How many commits main is ahead of release
totalCommits: compareData.total_commits || 0,
branchInfo: "main branch vs release",
};
} catch (error) {
console.error("Error fetching commit difference:", error.message);
throw error;
const compareData = await response.json();
return {
commitsBehind: compareData.behind_by || 0, // How many commits main is behind release
commitsAhead: compareData.ahead_by || 0, // How many commits main is ahead of release
totalCommits: compareData.total_commits || 0,
branchInfo: "main branch vs release",
};
} catch (error) {
// If rate limit, throw immediately
if (error.message.includes("rate limit")) {
throw error;
}
}
}
// If all attempts failed, throw error
throw new Error(
`Could not find tag '${currentVersion}' or 'v${currentVersion}' in repository`,
);
}
// Helper function to compare version strings (semantic versioning)
@@ -274,11 +292,11 @@ router.get(
) {
console.log("GitHub API rate limited, providing fallback data");
latestRelease = {
tagName: "v1.2.7",
version: "1.2.7",
tagName: "v1.2.8",
version: "1.2.8",
publishedAt: "2025-10-02T17:12:53Z",
htmlUrl:
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.7",
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.8",
};
latestCommit = {
sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd",
@@ -296,10 +314,13 @@ router.get(
};
} else {
// Fall back to cached data for other errors
const githubRepoUrl = settings.githubRepoUrl || DEFAULT_GITHUB_REPO;
latestRelease = settings.latest_version
? {
version: settings.latest_version,
tagName: `v${settings.latest_version}`,
publishedAt: null, // Only use date from GitHub API, not cached data
htmlUrl: `${githubRepoUrl.replace(/\.git$/, "")}/releases/tag/v${settings.latest_version}`,
}
: null;
}

View File

@@ -62,9 +62,13 @@ const versionRoutes = require("./routes/versionRoutes");
const tfaRoutes = require("./routes/tfaRoutes");
const searchRoutes = require("./routes/searchRoutes");
const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
const gethomepageRoutes = require("./routes/gethomepageRoutes");
const automationRoutes = require("./routes/automationRoutes");
const dockerRoutes = require("./routes/dockerRoutes");
const updateScheduler = require("./services/updateScheduler");
const { initSettings } = require("./services/settingsService");
const { cleanup_expired_sessions } = require("./utils/session_manager");
const { queueManager } = require("./services/automation");
// Initialize Prisma client with optimized connection pooling for multiple instances
const prisma = createPrismaClient();
@@ -422,6 +426,9 @@ app.use(
authLimiter,
autoEnrollmentRoutes,
);
app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes);
app.use(`/api/${apiVersion}/automation`, automationRoutes);
app.use(`/api/${apiVersion}/docker`, dockerRoutes);
// Error handling middleware
app.use((err, _req, res, _next) => {
@@ -448,6 +455,7 @@ process.on("SIGINT", async () => {
clearInterval(app.locals.session_cleanup_interval);
}
updateScheduler.stop();
await queueManager.shutdown();
await disconnectPrisma(prisma);
process.exit(0);
});
@@ -460,6 +468,7 @@ process.on("SIGTERM", async () => {
clearInterval(app.locals.session_cleanup_interval);
}
updateScheduler.stop();
await queueManager.shutdown();
await disconnectPrisma(prisma);
process.exit(0);
});
@@ -728,6 +737,12 @@ async function startServer() {
// Initialize dashboard preferences for all users
await initializeDashboardPreferences();
// Initialize BullMQ queue manager
await queueManager.initialize();
// Schedule recurring jobs
await queueManager.scheduleAllJobs();
// Initial session cleanup
await cleanup_expired_sessions();

View File

@@ -0,0 +1,67 @@
/**
* Echo Hello Automation
* Simple test automation task
*/
class EchoHello {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "echo-hello";
}
/**
* Process echo hello job
*/
async process(job) {
const startTime = Date.now();
console.log("👋 Starting echo hello task...");
try {
// Simple echo task
const message = job.data.message || "Hello from BullMQ!";
const timestamp = new Date().toISOString();
// Simulate some work
await new Promise((resolve) => setTimeout(resolve, 100));
const executionTime = Date.now() - startTime;
console.log(`✅ Echo hello completed in ${executionTime}ms: ${message}`);
return {
success: true,
message,
timestamp,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ Echo hello failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Echo hello is manual only - no scheduling
*/
async schedule() {
console.log(" Echo hello is manual only - no scheduling needed");
return null;
}
/**
* Trigger manual echo hello
*/
async triggerManual(message = "Hello from BullMQ!") {
const job = await this.queueManager.queues[this.queueName].add(
"echo-hello-manual",
{ message },
{ priority: 1 },
);
console.log("✅ Manual echo hello triggered");
return job;
}
}
module.exports = EchoHello;

View File

@@ -0,0 +1,153 @@
const { prisma } = require("./shared/prisma");
const { compareVersions, checkPublicRepo } = require("./shared/utils");
/**
* GitHub Update Check Automation
* Checks for new releases on GitHub using HTTPS API
*/
class GitHubUpdateCheck {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "github-update-check";
}
/**
* Process GitHub update check job
*/
async process(job) {
const startTime = Date.now();
console.log("🔍 Starting GitHub update check...");
try {
// Get settings
const settings = await prisma.settings.findFirst();
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
let owner, repo;
// Parse GitHub repository URL (supports both HTTPS and SSH formats)
if (repoUrl.includes("git@github.com:")) {
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
if (match) {
[, owner, repo] = match;
}
} else if (repoUrl.includes("github.com/")) {
const match = repoUrl.match(
/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/,
);
if (match) {
[, owner, repo] = match;
}
}
if (!owner || !repo) {
throw new Error("Could not parse GitHub repository URL");
}
// Always use HTTPS GitHub API (simpler and more reliable)
const latestVersion = await checkPublicRepo(owner, repo);
if (!latestVersion) {
throw new Error("Could not determine latest version");
}
// Read version from package.json
let currentVersion = "1.2.7"; // fallback
try {
const packageJson = require("../../../package.json");
if (packageJson?.version) {
currentVersion = packageJson.version;
}
} catch (packageError) {
console.warn(
"Could not read version from package.json:",
packageError.message,
);
}
const isUpdateAvailable =
compareVersions(latestVersion, currentVersion) > 0;
// Update settings with check results
await prisma.settings.update({
where: { id: settings.id },
data: {
last_update_check: new Date(),
update_available: isUpdateAvailable,
latest_version: latestVersion,
},
});
const executionTime = Date.now() - startTime;
console.log(
`✅ GitHub update check completed in ${executionTime}ms - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`,
);
return {
success: true,
currentVersion,
latestVersion,
isUpdateAvailable,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ GitHub update check failed after ${executionTime}ms:`,
error.message,
);
// Update last check time even on error
try {
const settings = await prisma.settings.findFirst();
if (settings) {
await prisma.settings.update({
where: { id: settings.id },
data: {
last_update_check: new Date(),
update_available: false,
},
});
}
} catch (updateError) {
console.error(
"❌ Error updating last check time:",
updateError.message,
);
}
throw error;
}
}
/**
* Schedule recurring GitHub update check (daily at midnight)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"github-update-check",
{},
{
repeat: { cron: "0 0 * * *" }, // Daily at midnight
jobId: "github-update-check-recurring",
},
);
console.log("✅ GitHub update check scheduled");
return job;
}
/**
* Trigger manual GitHub update check
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"github-update-check-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual GitHub update check triggered");
return job;
}
}
module.exports = GitHubUpdateCheck;

View File

@@ -0,0 +1,283 @@
const { Queue, Worker } = require("bullmq");
const { redis, redisConnection } = require("./shared/redis");
const { prisma } = require("./shared/prisma");
// Import automation classes
const GitHubUpdateCheck = require("./githubUpdateCheck");
const SessionCleanup = require("./sessionCleanup");
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
const EchoHello = require("./echoHello");
// Queue names
const QUEUE_NAMES = {
GITHUB_UPDATE_CHECK: "github-update-check",
SESSION_CLEANUP: "session-cleanup",
SYSTEM_MAINTENANCE: "system-maintenance",
ECHO_HELLO: "echo-hello",
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
};
/**
* Main Queue Manager
* Manages all BullMQ queues and workers
*/
class QueueManager {
constructor() {
this.queues = {};
this.workers = {};
this.automations = {};
this.isInitialized = false;
}
/**
* Initialize all queues, workers, and automations
*/
async initialize() {
try {
console.log("✅ Redis connection successful");
// Initialize queues
await this.initializeQueues();
// Initialize automation classes
await this.initializeAutomations();
// Initialize workers
await this.initializeWorkers();
// Setup event listeners
this.setupEventListeners();
this.isInitialized = true;
console.log("✅ Queue manager initialized successfully");
} catch (error) {
console.error("❌ Failed to initialize queue manager:", error.message);
throw error;
}
}
/**
* Initialize all queues
*/
async initializeQueues() {
for (const [key, queueName] of Object.entries(QUEUE_NAMES)) {
this.queues[queueName] = new Queue(queueName, {
connection: redisConnection,
defaultJobOptions: {
removeOnComplete: 50, // Keep last 50 completed jobs
removeOnFail: 20, // Keep last 20 failed jobs
attempts: 3, // Retry failed jobs 3 times
backoff: {
type: "exponential",
delay: 2000,
},
},
});
console.log(`✅ Queue '${queueName}' initialized`);
}
}
/**
* Initialize automation classes
*/
async initializeAutomations() {
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new GitHubUpdateCheck(
this,
);
this.automations[QUEUE_NAMES.SESSION_CLEANUP] = new SessionCleanup(this);
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] =
new OrphanedRepoCleanup(this);
this.automations[QUEUE_NAMES.ECHO_HELLO] = new EchoHello(this);
console.log("✅ All automation classes initialized");
}
/**
* Initialize all workers
*/
async initializeWorkers() {
// GitHub Update Check Worker
this.workers[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new Worker(
QUEUE_NAMES.GITHUB_UPDATE_CHECK,
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].process.bind(
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Session Cleanup Worker
this.workers[QUEUE_NAMES.SESSION_CLEANUP] = new Worker(
QUEUE_NAMES.SESSION_CLEANUP,
this.automations[QUEUE_NAMES.SESSION_CLEANUP].process.bind(
this.automations[QUEUE_NAMES.SESSION_CLEANUP],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Orphaned Repo Cleanup Worker
this.workers[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] = new Worker(
QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].process.bind(
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Echo Hello Worker
this.workers[QUEUE_NAMES.ECHO_HELLO] = new Worker(
QUEUE_NAMES.ECHO_HELLO,
this.automations[QUEUE_NAMES.ECHO_HELLO].process.bind(
this.automations[QUEUE_NAMES.ECHO_HELLO],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Add error handling for all workers
Object.values(this.workers).forEach((worker) => {
worker.on("error", (error) => {
console.error("Worker error:", error);
});
});
console.log("✅ All workers initialized");
}
/**
* Setup event listeners for all queues
*/
setupEventListeners() {
for (const queueName of Object.values(QUEUE_NAMES)) {
const queue = this.queues[queueName];
queue.on("error", (error) => {
console.error(`❌ Queue '${queueName}' experienced an error:`, error);
});
queue.on("failed", (job, err) => {
console.error(
`❌ Job '${job.id}' in queue '${queueName}' failed:`,
err,
);
});
queue.on("completed", (job) => {
console.log(`✅ Job '${job.id}' in queue '${queueName}' completed.`);
});
}
console.log("✅ Queue events initialized");
}
/**
* Schedule all recurring jobs
*/
async scheduleAllJobs() {
await this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].schedule();
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
await this.automations[QUEUE_NAMES.ECHO_HELLO].schedule();
}
/**
* Manual job triggers
*/
async triggerGitHubUpdateCheck() {
return this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].triggerManual();
}
async triggerSessionCleanup() {
return this.automations[QUEUE_NAMES.SESSION_CLEANUP].triggerManual();
}
async triggerOrphanedRepoCleanup() {
return this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].triggerManual();
}
async triggerEchoHello(message = "Hello from BullMQ!") {
return this.automations[QUEUE_NAMES.ECHO_HELLO].triggerManual(message);
}
/**
* Get queue statistics
*/
async getQueueStats(queueName) {
const queue = this.queues[queueName];
if (!queue) {
throw new Error(`Queue ${queueName} not found`);
}
const [waiting, active, completed, failed, delayed] = await Promise.all([
queue.getWaiting(),
queue.getActive(),
queue.getCompleted(),
queue.getFailed(),
queue.getDelayed(),
]);
return {
waiting: waiting.length,
active: active.length,
completed: completed.length,
failed: failed.length,
delayed: delayed.length,
};
}
/**
* Get all queue statistics
*/
async getAllQueueStats() {
const stats = {};
for (const queueName of Object.values(QUEUE_NAMES)) {
stats[queueName] = await this.getQueueStats(queueName);
}
return stats;
}
/**
* Get recent jobs for a queue
*/
async getRecentJobs(queueName, limit = 10) {
const queue = this.queues[queueName];
if (!queue) {
throw new Error(`Queue ${queueName} not found`);
}
const [completed, failed] = await Promise.all([
queue.getCompleted(0, limit - 1),
queue.getFailed(0, limit - 1),
]);
return [...completed, ...failed]
.sort((a, b) => new Date(b.finishedOn) - new Date(a.finishedOn))
.slice(0, limit);
}
/**
* Graceful shutdown
*/
async shutdown() {
console.log("🛑 Shutting down queue manager...");
for (const queueName of Object.keys(this.queues)) {
await this.queues[queueName].close();
await this.workers[queueName].close();
}
await redis.quit();
console.log("✅ Queue manager shutdown complete");
}
}
const queueManager = new QueueManager();
module.exports = { queueManager, QUEUE_NAMES };

View File

@@ -0,0 +1,114 @@
const { prisma } = require("./shared/prisma");
/**
* Orphaned Repository Cleanup Automation
* Removes repositories with no associated hosts
*/
class OrphanedRepoCleanup {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "orphaned-repo-cleanup";
}
/**
* Process orphaned repository cleanup job
*/
async process(job) {
const startTime = Date.now();
console.log("🧹 Starting orphaned repository cleanup...");
try {
// Find repositories with 0 hosts
const orphanedRepos = await prisma.repositories.findMany({
where: {
host_repositories: {
none: {},
},
},
include: {
_count: {
select: {
host_repositories: true,
},
},
},
});
let deletedCount = 0;
const deletedRepos = [];
// Delete orphaned repositories
for (const repo of orphanedRepos) {
try {
await prisma.repositories.delete({
where: { id: repo.id },
});
deletedCount++;
deletedRepos.push({
id: repo.id,
name: repo.name,
url: repo.url,
});
console.log(
`🗑️ Deleted orphaned repository: ${repo.name} (${repo.url})`,
);
} catch (deleteError) {
console.error(
`❌ Failed to delete repository ${repo.id}:`,
deleteError.message,
);
}
}
const executionTime = Date.now() - startTime;
console.log(
`✅ Orphaned repository cleanup completed in ${executionTime}ms - Deleted ${deletedCount} repositories`,
);
return {
success: true,
deletedCount,
deletedRepos,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ Orphaned repository cleanup failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Schedule recurring orphaned repository cleanup (daily at 2 AM)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"orphaned-repo-cleanup",
{},
{
repeat: { cron: "0 2 * * *" }, // Daily at 2 AM
jobId: "orphaned-repo-cleanup-recurring",
},
);
console.log("✅ Orphaned repository cleanup scheduled");
return job;
}
/**
* Trigger manual orphaned repository cleanup
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"orphaned-repo-cleanup-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual orphaned repository cleanup triggered");
return job;
}
}
module.exports = OrphanedRepoCleanup;

View File

@@ -0,0 +1,78 @@
const { prisma } = require("./shared/prisma");
const { cleanup_expired_sessions } = require("../../utils/session_manager");
/**
* Session Cleanup Automation
* Cleans up expired user sessions
*/
class SessionCleanup {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "session-cleanup";
}
/**
* Process session cleanup job
*/
async process(job) {
const startTime = Date.now();
console.log("🧹 Starting session cleanup...");
try {
const result = await prisma.user_sessions.deleteMany({
where: {
OR: [{ expires_at: { lt: new Date() } }, { is_revoked: true }],
},
});
const executionTime = Date.now() - startTime;
console.log(
`✅ Session cleanup completed in ${executionTime}ms - Cleaned up ${result.count} expired sessions`,
);
return {
success: true,
sessionsCleaned: result.count,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ Session cleanup failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Schedule recurring session cleanup (every hour)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"session-cleanup",
{},
{
repeat: { cron: "0 * * * *" }, // Every hour
jobId: "session-cleanup-recurring",
},
);
console.log("✅ Session cleanup scheduled");
return job;
}
/**
* Trigger manual session cleanup
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"session-cleanup-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual session cleanup triggered");
return job;
}
}
module.exports = SessionCleanup;

View File

@@ -0,0 +1,5 @@
const { PrismaClient } = require("@prisma/client");
const prisma = new PrismaClient();
module.exports = { prisma };

View File

@@ -0,0 +1,16 @@
const IORedis = require("ioredis");
// Redis connection configuration
const redisConnection = {
host: process.env.REDIS_HOST || "localhost",
port: parseInt(process.env.REDIS_PORT) || 6379,
password: process.env.REDIS_PASSWORD || undefined,
db: parseInt(process.env.REDIS_DB) || 0,
retryDelayOnFailover: 100,
maxRetriesPerRequest: null, // BullMQ requires this to be null
};
// Create Redis connection
const redis = new IORedis(redisConnection);
module.exports = { redis, redisConnection };

View File

@@ -0,0 +1,82 @@
// Common utilities for automation jobs
/**
* Compare two semantic versions
* @param {string} version1 - First version
* @param {string} version2 - Second version
* @returns {number} - 1 if version1 > version2, -1 if version1 < version2, 0 if equal
*/
function compareVersions(version1, version2) {
const v1parts = version1.split(".").map(Number);
const v2parts = version2.split(".").map(Number);
const maxLength = Math.max(v1parts.length, v2parts.length);
for (let i = 0; i < maxLength; i++) {
const v1part = v1parts[i] || 0;
const v2part = v2parts[i] || 0;
if (v1part > v2part) return 1;
if (v1part < v2part) return -1;
}
return 0;
}
/**
* Check public GitHub repository for latest release
* @param {string} owner - Repository owner
* @param {string} repo - Repository name
* @returns {Promise<string|null>} - Latest version or null
*/
async function checkPublicRepo(owner, repo) {
try {
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
let currentVersion = "1.2.7"; // fallback
try {
const packageJson = require("../../../package.json");
if (packageJson?.version) {
currentVersion = packageJson.version;
}
} catch (packageError) {
console.warn(
"Could not read version from package.json for User-Agent, using fallback:",
packageError.message,
);
}
const response = await fetch(httpsRepoUrl, {
method: "GET",
headers: {
Accept: "application/vnd.github.v3+json",
"User-Agent": `PatchMon-Server/${currentVersion}`,
},
});
if (!response.ok) {
const errorText = await response.text();
if (
errorText.includes("rate limit") ||
errorText.includes("API rate limit")
) {
console.log("⚠️ GitHub API rate limit exceeded, skipping update check");
return null;
}
throw new Error(
`GitHub API error: ${response.status} ${response.statusText}`,
);
}
const releaseData = await response.json();
return releaseData.tag_name.replace("v", "");
} catch (error) {
console.error("GitHub API error:", error.message);
throw error;
}
}
module.exports = {
compareVersions,
checkPublicRepo,
};

View File

@@ -104,7 +104,7 @@ class UpdateScheduler {
}
// Read version from package.json dynamically
let currentVersion = "1.2.7"; // fallback
let currentVersion = "1.2.9"; // fallback
try {
const packageJson = require("../../package.json");
if (packageJson?.version) {
@@ -214,7 +214,7 @@ class UpdateScheduler {
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
// Get current version for User-Agent
let currentVersion = "1.2.7"; // fallback
let currentVersion = "1.2.9"; // fallback
try {
const packageJson = require("../../package.json");
if (packageJson?.version) {

View File

@@ -8,19 +8,94 @@ log() {
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" >&2
}
# Copy files from agents_backup to agents if agents directory is empty and no .sh files are present
if [ -d "/app/agents" ] && [ -z "$(find /app/agents -maxdepth 1 -type f -name '*.sh' | head -n 1)" ]; then
if [ -d "/app/agents_backup" ]; then
log "Agents directory is empty, copying from backup..."
cp -r /app/agents_backup/* /app/agents/
# Function to extract version from agent script
get_agent_version() {
local file="$1"
if [ -f "$file" ]; then
grep -m 1 '^AGENT_VERSION=' "$file" | cut -d'"' -f2 2>/dev/null || echo "0.0.0"
else
log "Warning: agents_backup directory not found"
echo "0.0.0"
fi
else
log "Agents directory already contains files, skipping copy"
fi
}
log "Starting PatchMon Backend (${NODE_ENV:-production})..."
# Function to compare versions (returns 0 if $1 > $2)
version_greater() {
# Use sort -V for version comparison
test "$(printf '%s\n' "$1" "$2" | sort -V | tail -n1)" = "$1" && test "$1" != "$2"
}
# Check and update agent files if necessary
update_agents() {
local backup_agent="/app/agents_backup/patchmon-agent.sh"
local current_agent="/app/agents/patchmon-agent.sh"
# Check if agents directory exists
if [ ! -d "/app/agents" ]; then
log "ERROR: /app/agents directory not found"
return 1
fi
# Check if backup exists
if [ ! -d "/app/agents_backup" ]; then
log "WARNING: agents_backup directory not found, skipping agent update"
return 0
fi
# Get versions
local backup_version=$(get_agent_version "$backup_agent")
local current_version=$(get_agent_version "$current_agent")
log "Agent version check:"
log " Image version: ${backup_version}"
log " Volume version: ${current_version}"
# Determine if update is needed
local needs_update=0
# Case 1: No agents in volume (first time setup)
if [ -z "$(find /app/agents -maxdepth 1 -type f -name '*.sh' 2>/dev/null | head -n 1)" ]; then
log "Agents directory is empty - performing initial copy"
needs_update=1
# Case 2: Backup version is newer
elif version_greater "$backup_version" "$current_version"; then
log "Newer agent version available (${backup_version} > ${current_version})"
needs_update=1
else
log "Agents are up to date"
needs_update=0
fi
# Perform update if needed
if [ $needs_update -eq 1 ]; then
log "Updating agents to version ${backup_version}..."
# Create backup of existing agents if they exist
if [ -f "$current_agent" ]; then
local backup_timestamp=$(date +%Y%m%d_%H%M%S)
local backup_name="/app/agents/patchmon-agent.sh.backup.${backup_timestamp}"
cp "$current_agent" "$backup_name" 2>/dev/null || true
log "Previous agent backed up to: $(basename $backup_name)"
fi
# Copy new agents
cp -r /app/agents_backup/* /app/agents/
# Verify update
local new_version=$(get_agent_version "$current_agent")
if [ "$new_version" = "$backup_version" ]; then
log "✅ Agents successfully updated to version ${new_version}"
else
log "⚠️ Warning: Agent update may have failed (expected: ${backup_version}, got: ${new_version})"
fi
fi
}
# Main execution
log "PatchMon Backend Container Starting..."
log "Environment: ${NODE_ENV:-production}"
# Update agents (version-aware)
update_agents
log "Running database migrations..."
npx prisma migrate deploy

View File

@@ -1,7 +1,7 @@
{
"name": "patchmon-frontend",
"private": true,
"version": "1.2.7",
"version": "1.2.9",
"license": "AGPL-3.0",
"type": "module",
"scripts": {
@@ -20,7 +20,7 @@
"clsx": "^2.1.1",
"cors": "^2.8.5",
"date-fns": "^4.1.0",
"express": "^4.21.2",
"express": "^5.0.0",
"http-proxy-middleware": "^3.0.3",
"lucide-react": "^0.468.0",
"react": "^18.3.1",

View File

@@ -1,3 +1,4 @@
import { lazy, Suspense } from "react";
import { Route, Routes } from "react-router-dom";
import FirstTimeAdminSetup from "./components/FirstTimeAdminSetup";
import Layout from "./components/Layout";
@@ -8,24 +9,48 @@ import { isAuthPhase } from "./constants/authPhases";
import { AuthProvider, useAuth } from "./contexts/AuthContext";
import { ThemeProvider } from "./contexts/ThemeContext";
import { UpdateNotificationProvider } from "./contexts/UpdateNotificationContext";
import Dashboard from "./pages/Dashboard";
import HostDetail from "./pages/HostDetail";
import Hosts from "./pages/Hosts";
import Login from "./pages/Login";
import PackageDetail from "./pages/PackageDetail";
import Packages from "./pages/Packages";
import Profile from "./pages/Profile";
import Queue from "./pages/Queue";
import Repositories from "./pages/Repositories";
import RepositoryDetail from "./pages/RepositoryDetail";
import AlertChannels from "./pages/settings/AlertChannels";
import Integrations from "./pages/settings/Integrations";
import Notifications from "./pages/settings/Notifications";
import PatchManagement from "./pages/settings/PatchManagement";
import SettingsAgentConfig from "./pages/settings/SettingsAgentConfig";
import SettingsHostGroups from "./pages/settings/SettingsHostGroups";
import SettingsServerConfig from "./pages/settings/SettingsServerConfig";
import SettingsUsers from "./pages/settings/SettingsUsers";
// Lazy load pages
const Dashboard = lazy(() => import("./pages/Dashboard"));
const HostDetail = lazy(() => import("./pages/HostDetail"));
const Hosts = lazy(() => import("./pages/Hosts"));
const Login = lazy(() => import("./pages/Login"));
const PackageDetail = lazy(() => import("./pages/PackageDetail"));
const Packages = lazy(() => import("./pages/Packages"));
const Profile = lazy(() => import("./pages/Profile"));
const Automation = lazy(() => import("./pages/Automation"));
const Repositories = lazy(() => import("./pages/Repositories"));
const RepositoryDetail = lazy(() => import("./pages/RepositoryDetail"));
const Docker = lazy(() => import("./pages/Docker"));
const DockerContainerDetail = lazy(
() => import("./pages/docker/ContainerDetail"),
);
const DockerImageDetail = lazy(() => import("./pages/docker/ImageDetail"));
const DockerHostDetail = lazy(() => import("./pages/docker/HostDetail"));
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
const Integrations = lazy(() => import("./pages/settings/Integrations"));
const Notifications = lazy(() => import("./pages/settings/Notifications"));
const PatchManagement = lazy(() => import("./pages/settings/PatchManagement"));
const SettingsAgentConfig = lazy(
() => import("./pages/settings/SettingsAgentConfig"),
);
const SettingsHostGroups = lazy(
() => import("./pages/settings/SettingsHostGroups"),
);
const SettingsServerConfig = lazy(
() => import("./pages/settings/SettingsServerConfig"),
);
const SettingsUsers = lazy(() => import("./pages/settings/SettingsUsers"));
// Loading fallback component
const LoadingFallback = () => (
<div className="min-h-screen bg-gradient-to-br from-primary-50 to-secondary-50 dark:from-secondary-900 dark:to-secondary-800 flex items-center justify-center">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary-600 mx-auto mb-4"></div>
<p className="text-secondary-600 dark:text-secondary-300">Loading...</p>
</div>
</div>
);
function AppRoutes() {
const { needsFirstTimeSetup, authPhase, isAuthenticated } = useAuth();
@@ -54,295 +79,337 @@ function AppRoutes() {
}
return (
<Routes>
<Route path="/login" element={<Login />} />
<Route
path="/"
element={
<ProtectedRoute requirePermission="can_view_dashboard">
<Layout>
<Dashboard />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/hosts"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Hosts />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/hosts/:hostId"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<HostDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/packages"
element={
<ProtectedRoute requirePermission="can_view_packages">
<Layout>
<Packages />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/repositories"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Repositories />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/repositories/:repositoryId"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<RepositoryDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/queue"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Queue />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/users"
element={
<ProtectedRoute requirePermission="can_view_users">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/permissions"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/users"
element={
<ProtectedRoute requirePermission="can_view_users">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/roles"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/profile"
element={
<ProtectedRoute>
<Layout>
<SettingsLayout>
<Profile />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/host-groups"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsHostGroups />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/notifications"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsLayout>
<Notifications />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-config"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-config/management"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-config"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-config/version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/alert-channels"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsLayout>
<AlertChannels />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/integrations"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<Integrations />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/patch-management"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<PatchManagement />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-url"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/branding"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/options"
element={
<ProtectedRoute requirePermission="can_manage_hosts">
<Layout>
<SettingsHostGroups />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/packages/:packageId"
element={
<ProtectedRoute requirePermission="can_view_packages">
<Layout>
<PackageDetail />
</Layout>
</ProtectedRoute>
}
/>
</Routes>
<Suspense fallback={<LoadingFallback />}>
<Routes>
<Route path="/login" element={<Login />} />
<Route
path="/"
element={
<ProtectedRoute requirePermission="can_view_dashboard">
<Layout>
<Dashboard />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/hosts"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Hosts />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/hosts/:hostId"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<HostDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/packages"
element={
<ProtectedRoute requirePermission="can_view_packages">
<Layout>
<Packages />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/repositories"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Repositories />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/repositories/:repositoryId"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<RepositoryDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/automation"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Automation />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/docker"
element={
<ProtectedRoute requirePermission="can_view_reports">
<Layout>
<Docker />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/docker/containers/:id"
element={
<ProtectedRoute requirePermission="can_view_reports">
<Layout>
<DockerContainerDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/docker/images/:id"
element={
<ProtectedRoute requirePermission="can_view_reports">
<Layout>
<DockerImageDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/docker/hosts/:id"
element={
<ProtectedRoute requirePermission="can_view_reports">
<Layout>
<DockerHostDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/users"
element={
<ProtectedRoute requirePermission="can_view_users">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/permissions"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/users"
element={
<ProtectedRoute requirePermission="can_view_users">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/roles"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/profile"
element={
<ProtectedRoute>
<Layout>
<SettingsLayout>
<Profile />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/host-groups"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsHostGroups />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/notifications"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsLayout>
<Notifications />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-config"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-config/management"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-config"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-config/version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/alert-channels"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsLayout>
<AlertChannels />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/integrations"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<Integrations />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/patch-management"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<PatchManagement />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-url"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/branding"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/options"
element={
<ProtectedRoute requirePermission="can_manage_hosts">
<Layout>
<SettingsHostGroups />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/packages/:packageId"
element={
<ProtectedRoute requirePermission="can_view_packages">
<Layout>
<PackageDetail />
</Layout>
</ProtectedRoute>
}
/>
</Routes>
</Suspense>
);
}

View File

@@ -11,7 +11,6 @@ import {
Github,
Globe,
Home,
List,
LogOut,
Mail,
Menu,
@@ -113,18 +112,26 @@ const Layout = ({ children }) => {
});
}
// Add Automation item (available to all users with inventory access)
inventoryItems.push({
name: "Automation",
href: "/automation",
icon: RefreshCw,
beta: true,
});
if (canViewReports()) {
inventoryItems.push(
{
name: "Services",
href: "/services",
icon: Activity,
comingSoon: true,
},
{
name: "Docker",
href: "/docker",
icon: Container,
beta: true,
},
{
name: "Services",
href: "/services",
icon: Activity,
comingSoon: true,
},
{
@@ -136,21 +143,13 @@ const Layout = ({ children }) => {
);
}
// Add Pro-Action and Queue items (available to all users with inventory access)
inventoryItems.push(
{
name: "Pro-Action",
href: "/pro-action",
icon: Zap,
comingSoon: true,
},
{
name: "Queue",
href: "/queue",
icon: List,
comingSoon: true,
},
);
// Add Pro-Action item (available to all users with inventory access)
inventoryItems.push({
name: "Pro-Action",
href: "/pro-action",
icon: Zap,
comingSoon: true,
});
if (inventoryItems.length > 0) {
nav.push({
@@ -210,7 +209,7 @@ const Layout = ({ children }) => {
if (path === "/services") return "Services";
if (path === "/docker") return "Docker";
if (path === "/pro-action") return "Pro-Action";
if (path === "/queue") return "Queue";
if (path === "/automation") return "Automation";
if (path === "/users") return "Users";
if (path === "/permissions") return "Permissions";
if (path === "/settings") return "Settings";
@@ -436,6 +435,11 @@ const Layout = ({ children }) => {
Soon
</span>
)}
{subItem.beta && (
<span className="text-xs bg-blue-100 dark:bg-blue-900 text-blue-600 dark:text-blue-200 px-1.5 py-0.5 rounded font-medium">
Beta
</span>
)}
</span>
</Link>
)}
@@ -707,6 +711,11 @@ const Layout = ({ children }) => {
Soon
</span>
)}
{subItem.beta && (
<span className="text-xs bg-blue-100 dark:bg-blue-900 text-blue-600 dark:text-blue-200 px-1.5 py-0.5 rounded font-medium">
Beta
</span>
)}
{subItem.showUpgradeIcon && (
<UpgradeNotificationIcon className="h-3 w-3" />
)}
@@ -929,31 +938,42 @@ const Layout = ({ children }) => {
<div className="h-6 w-px bg-secondary-200 dark:bg-secondary-600 lg:hidden" />
<div className="flex flex-1 gap-x-4 self-stretch lg:gap-x-6">
{/* Page title - hidden on dashboard to give more space to search */}
{location.pathname !== "/" && (
<div className="relative flex items-center">
<h2 className="text-lg font-semibold text-secondary-900 dark:text-secondary-100 whitespace-nowrap">
{getPageTitle()}
</h2>
</div>
)}
{/* Page title - hidden on dashboard, hosts, repositories, packages, automation, docker, and host details to give more space to search */}
{![
"/",
"/hosts",
"/repositories",
"/packages",
"/automation",
"/docker",
].includes(location.pathname) &&
!location.pathname.startsWith("/hosts/") &&
!location.pathname.startsWith("/docker/") && (
<div className="relative flex items-center">
<h2 className="text-lg font-semibold text-secondary-900 dark:text-secondary-100 whitespace-nowrap">
{getPageTitle()}
</h2>
</div>
)}
{/* Global Search Bar */}
<div
className={`flex items-center ${location.pathname === "/" ? "flex-1 max-w-none" : "max-w-sm"}`}
className={`flex items-center ${["/", "/hosts", "/repositories", "/packages", "/automation", "/docker"].includes(location.pathname) || location.pathname.startsWith("/hosts/") || location.pathname.startsWith("/docker/") ? "flex-1 max-w-none" : "max-w-sm"}`}
>
<GlobalSearch />
</div>
<div className="flex flex-1 items-center gap-x-4 lg:gap-x-6 justify-end">
{/* External Links */}
<div className="hidden md:flex items-center gap-2">
<div className="hidden md:flex items-center gap-1">
{/* 1) GitHub */}
<a
href="https://github.com/PatchMon/PatchMon"
target="_blank"
rel="noopener noreferrer"
className="flex items-center justify-center gap-1.5 px-3 py-2 bg-gray-50 dark:bg-gray-800 text-secondary-600 dark:text-secondary-300 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg transition-colors shadow-sm group relative"
title="GitHub"
aria-label="GitHub"
>
<Github className="h-5 w-5 flex-shrink-0" />
{githubStars !== null && (

View File

@@ -1,11 +1,16 @@
import { useQuery } from "@tanstack/react-query";
import { useEffect } from "react";
import { isAuthReady } from "../constants/authPhases";
import { useAuth } from "../contexts/AuthContext";
import { settingsAPI } from "../utils/api";
const LogoProvider = ({ children }) => {
const { authPhase, isAuthenticated } = useAuth();
const { data: settings } = useQuery({
queryKey: ["settings"],
queryFn: () => settingsAPI.get().then((res) => res.data),
enabled: isAuthReady(authPhase, isAuthenticated()),
});
useEffect(() => {

View File

@@ -319,9 +319,8 @@ const UsersTab = () => {
user={editingUser}
isOpen={!!editingUser}
onClose={() => setEditingUser(null)}
onUserUpdated={() => {
queryClient.invalidateQueries(["users"]);
}}
onUpdateUser={updateUserMutation.mutate}
isLoading={updateUserMutation.isPending}
roles={roles}
/>
)}
@@ -591,7 +590,14 @@ const AddUserModal = ({ isOpen, onClose, onUserCreated, roles }) => {
};
// Edit User Modal Component
const EditUserModal = ({ user, isOpen, onClose, onUserUpdated, roles }) => {
const EditUserModal = ({
user,
isOpen,
onClose,
onUpdateUser,
isLoading,
roles,
}) => {
const editUsernameId = useId();
const editEmailId = useId();
const editFirstNameId = useId();
@@ -607,7 +613,6 @@ const EditUserModal = ({ user, isOpen, onClose, onUserUpdated, roles }) => {
role: user?.role || "user",
is_active: user?.is_active ?? true,
});
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState("");
const [success, setSuccess] = useState(false);
@@ -635,22 +640,18 @@ const EditUserModal = ({ user, isOpen, onClose, onUserUpdated, roles }) => {
const handleSubmit = async (e) => {
e.preventDefault();
setIsLoading(true);
setError("");
setSuccess(false);
try {
await adminUsersAPI.update(user.id, formData);
await onUpdateUser({ id: user.id, data: formData });
setSuccess(true);
onUserUpdated();
// Auto-close after 1.5 seconds
setTimeout(() => {
onClose();
}, 1500);
} catch (err) {
setError(err.response?.data?.error || "Failed to update user");
} finally {
setIsLoading(false);
}
};

View File

@@ -128,12 +128,14 @@ const VersionUpdateTab = () => {
<span className="text-lg font-mono text-secondary-900 dark:text-white">
{versionInfo.github.latestRelease.tagName}
</span>
<div className="text-xs text-secondary-500 dark:text-secondary-400">
Published:{" "}
{new Date(
versionInfo.github.latestRelease.publishedAt,
).toLocaleDateString()}
</div>
{versionInfo.github.latestRelease.publishedAt && (
<div className="text-xs text-secondary-500 dark:text-secondary-400">
Published:{" "}
{new Date(
versionInfo.github.latestRelease.publishedAt,
).toLocaleDateString()}
</div>
)}
</div>
</div>
)}

View File

@@ -0,0 +1,581 @@
import { useQuery } from "@tanstack/react-query";
import {
Activity,
AlertCircle,
ArrowDown,
ArrowUp,
ArrowUpDown,
Bot,
CheckCircle,
Clock,
Play,
RefreshCw,
Settings,
XCircle,
Zap,
} from "lucide-react";
import { useEffect, useState } from "react";
import api from "../utils/api";
const Automation = () => {
const [activeTab, setActiveTab] = useState("overview");
const [sortField, setSortField] = useState("nextRunTimestamp");
const [sortDirection, setSortDirection] = useState("asc");
// Fetch automation overview data
const { data: overview, isLoading: overviewLoading } = useQuery({
queryKey: ["automation-overview"],
queryFn: async () => {
const response = await api.get("/automation/overview");
return response.data.data;
},
refetchInterval: 30000, // Refresh every 30 seconds
});
// Fetch queue statistics
const { data: queueStats, isLoading: statsLoading } = useQuery({
queryKey: ["automation-stats"],
queryFn: async () => {
const response = await api.get("/automation/stats");
return response.data.data;
},
refetchInterval: 30000,
});
// Fetch recent jobs
const { data: recentJobs, isLoading: jobsLoading } = useQuery({
queryKey: ["automation-jobs"],
queryFn: async () => {
const jobs = await Promise.all([
api
.get("/automation/jobs/github-update-check?limit=5")
.then((r) => r.data.data || []),
api
.get("/automation/jobs/session-cleanup?limit=5")
.then((r) => r.data.data || []),
]);
return {
githubUpdate: jobs[0],
sessionCleanup: jobs[1],
};
},
refetchInterval: 30000,
});
const getStatusIcon = (status) => {
switch (status) {
case "completed":
return <CheckCircle className="h-4 w-4 text-green-500" />;
case "failed":
return <XCircle className="h-4 w-4 text-red-500" />;
case "active":
return <Activity className="h-4 w-4 text-blue-500 animate-pulse" />;
default:
return <Clock className="h-4 w-4 text-gray-500" />;
}
};
const getStatusColor = (status) => {
switch (status) {
case "completed":
return "bg-green-100 text-green-800";
case "failed":
return "bg-red-100 text-red-800";
case "active":
return "bg-blue-100 text-blue-800";
default:
return "bg-gray-100 text-gray-800";
}
};
const formatDate = (dateString) => {
if (!dateString) return "N/A";
return new Date(dateString).toLocaleString();
};
const formatDuration = (ms) => {
if (!ms) return "N/A";
return `${ms}ms`;
};
const getStatusBadge = (status) => {
switch (status) {
case "Success":
return (
<span className="px-2 py-1 text-xs font-medium rounded-full bg-green-100 text-green-800">
Success
</span>
);
case "Failed":
return (
<span className="px-2 py-1 text-xs font-medium rounded-full bg-red-100 text-red-800">
Failed
</span>
);
case "Never run":
return (
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
Never run
</span>
);
default:
return (
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
{status}
</span>
);
}
};
const getNextRunTime = (schedule, lastRun) => {
if (schedule === "Manual only") return "Manual trigger only";
if (schedule === "Daily at midnight") {
const now = new Date();
const tomorrow = new Date(now);
tomorrow.setDate(tomorrow.getDate() + 1);
tomorrow.setHours(0, 0, 0, 0);
return tomorrow.toLocaleString([], {
hour12: true,
hour: "numeric",
minute: "2-digit",
day: "numeric",
month: "numeric",
year: "numeric",
});
}
if (schedule === "Daily at 2 AM") {
const now = new Date();
const tomorrow = new Date(now);
tomorrow.setDate(tomorrow.getDate() + 1);
tomorrow.setHours(2, 0, 0, 0);
return tomorrow.toLocaleString([], {
hour12: true,
hour: "numeric",
minute: "2-digit",
day: "numeric",
month: "numeric",
year: "numeric",
});
}
if (schedule === "Every hour") {
const now = new Date();
const nextHour = new Date(now);
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
return nextHour.toLocaleString([], {
hour12: true,
hour: "numeric",
minute: "2-digit",
day: "numeric",
month: "numeric",
year: "numeric",
});
}
return "Unknown";
};
const getNextRunTimestamp = (schedule) => {
if (schedule === "Manual only") return Number.MAX_SAFE_INTEGER; // Manual tasks go to bottom
if (schedule === "Daily at midnight") {
const now = new Date();
const tomorrow = new Date(now);
tomorrow.setDate(tomorrow.getDate() + 1);
tomorrow.setHours(0, 0, 0, 0);
return tomorrow.getTime();
}
if (schedule === "Daily at 2 AM") {
const now = new Date();
const tomorrow = new Date(now);
tomorrow.setDate(tomorrow.getDate() + 1);
tomorrow.setHours(2, 0, 0, 0);
return tomorrow.getTime();
}
if (schedule === "Every hour") {
const now = new Date();
const nextHour = new Date(now);
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
return nextHour.getTime();
}
return Number.MAX_SAFE_INTEGER; // Unknown schedules go to bottom
};
const triggerManualJob = async (jobType, data = {}) => {
try {
let endpoint;
if (jobType === "github") {
endpoint = "/automation/trigger/github-update";
} else if (jobType === "sessions") {
endpoint = "/automation/trigger/session-cleanup";
} else if (jobType === "echo") {
endpoint = "/automation/trigger/echo-hello";
} else if (jobType === "orphaned-repos") {
endpoint = "/automation/trigger/orphaned-repo-cleanup";
}
const response = await api.post(endpoint, data);
// Refresh data
window.location.reload();
} catch (error) {
console.error("Error triggering job:", error);
alert(
"Failed to trigger job: " +
(error.response?.data?.error || error.message),
);
}
};
const handleSort = (field) => {
if (sortField === field) {
setSortDirection(sortDirection === "asc" ? "desc" : "asc");
} else {
setSortField(field);
setSortDirection("asc");
}
};
const getSortIcon = (field) => {
if (sortField !== field) return <ArrowUpDown className="h-4 w-4" />;
return sortDirection === "asc" ? (
<ArrowUp className="h-4 w-4" />
) : (
<ArrowDown className="h-4 w-4" />
);
};
// Sort automations based on current sort settings
const sortedAutomations = overview?.automations
? [...overview.automations].sort((a, b) => {
let aValue, bValue;
switch (sortField) {
case "name":
aValue = a.name.toLowerCase();
bValue = b.name.toLowerCase();
break;
case "schedule":
aValue = a.schedule.toLowerCase();
bValue = b.schedule.toLowerCase();
break;
case "lastRun":
// Convert "Never" to empty string for proper sorting
aValue = a.lastRun === "Never" ? "" : a.lastRun;
bValue = b.lastRun === "Never" ? "" : b.lastRun;
break;
case "lastRunTimestamp":
aValue = a.lastRunTimestamp || 0;
bValue = b.lastRunTimestamp || 0;
break;
case "nextRunTimestamp":
aValue = getNextRunTimestamp(a.schedule);
bValue = getNextRunTimestamp(b.schedule);
break;
case "status":
aValue = a.status.toLowerCase();
bValue = b.status.toLowerCase();
break;
default:
aValue = a[sortField];
bValue = b[sortField];
}
if (aValue < bValue) return sortDirection === "asc" ? -1 : 1;
if (aValue > bValue) return sortDirection === "asc" ? 1 : -1;
return 0;
})
: [];
const tabs = [{ id: "overview", name: "Overview", icon: Settings }];
return (
<div className="space-y-6">
{/* Page Header */}
<div className="flex items-center justify-between">
<div>
<h1 className="text-2xl font-semibold text-secondary-900 dark:text-white">
Automation Management
</h1>
<p className="text-sm text-secondary-600 dark:text-secondary-400 mt-1">
Monitor and manage automated server operations, agent
communications, and patch deployments
</p>
</div>
<div className="flex items-center gap-3">
<button
type="button"
onClick={() => triggerManualJob("github")}
className="btn-outline flex items-center gap-2"
title="Trigger manual GitHub update check"
>
<RefreshCw className="h-4 w-4" />
Check Updates
</button>
<button
type="button"
onClick={() => triggerManualJob("sessions")}
className="btn-outline flex items-center gap-2"
title="Trigger manual session cleanup"
>
<RefreshCw className="h-4 w-4" />
Clean Sessions
</button>
<button
type="button"
onClick={() =>
triggerManualJob("echo", {
message: "Hello from Automation Page!",
})
}
className="btn-outline flex items-center gap-2"
title="Trigger echo hello task"
>
<RefreshCw className="h-4 w-4" />
Echo Hello
</button>
</div>
</div>
{/* Stats Cards */}
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
{/* Scheduled Tasks Card */}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Clock className="h-5 w-5 text-warning-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Scheduled Tasks
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{overviewLoading ? "..." : overview?.scheduledTasks || 0}
</p>
</div>
</div>
</div>
{/* Running Tasks Card */}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Play className="h-5 w-5 text-success-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Running Tasks
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{overviewLoading ? "..." : overview?.runningTasks || 0}
</p>
</div>
</div>
</div>
{/* Failed Tasks Card */}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<XCircle className="h-5 w-5 text-red-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Failed Tasks
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{overviewLoading ? "..." : overview?.failedTasks || 0}
</p>
</div>
</div>
</div>
{/* Total Task Runs Card */}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Zap className="h-5 w-5 text-secondary-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Total Task Runs
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{overviewLoading ? "..." : overview?.totalAutomations || 0}
</p>
</div>
</div>
</div>
</div>
{/* Tabs */}
<div className="mb-6">
<div className="border-b border-gray-200 dark:border-gray-700">
<nav className="-mb-px flex space-x-8">
{tabs.map((tab) => (
<button
type="button"
key={tab.id}
onClick={() => setActiveTab(tab.id)}
className={`py-2 px-1 border-b-2 font-medium text-sm flex items-center gap-2 ${
activeTab === tab.id
? "border-blue-500 text-blue-600 dark:text-blue-400"
: "border-transparent text-gray-500 hover:text-gray-700 hover:border-gray-300 dark:text-gray-400 dark:hover:text-gray-300"
}`}
>
<tab.icon className="h-4 w-4" />
{tab.name}
</button>
))}
</nav>
</div>
</div>
{/* Tab Content */}
{activeTab === "overview" && (
<div className="card p-6">
{overviewLoading ? (
<div className="text-center py-8">
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-600 mx-auto"></div>
<p className="mt-2 text-sm text-secondary-500">
Loading automations...
</p>
</div>
) : (
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-600">
<thead className="bg-secondary-50 dark:bg-secondary-700">
<tr>
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
Run
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("name")}
>
<div className="flex items-center gap-1">
Task
{getSortIcon("name")}
</div>
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("schedule")}
>
<div className="flex items-center gap-1">
Frequency
{getSortIcon("schedule")}
</div>
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("lastRunTimestamp")}
>
<div className="flex items-center gap-1">
Last Run
{getSortIcon("lastRunTimestamp")}
</div>
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("nextRunTimestamp")}
>
<div className="flex items-center gap-1">
Next Run
{getSortIcon("nextRunTimestamp")}
</div>
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("status")}
>
<div className="flex items-center gap-1">
Status
{getSortIcon("status")}
</div>
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-600">
{sortedAutomations.map((automation) => (
<tr
key={automation.queue}
className="hover:bg-secondary-50 dark:hover:bg-secondary-700"
>
<td className="px-4 py-2 whitespace-nowrap">
{automation.schedule !== "Manual only" ? (
<button
type="button"
onClick={() => {
if (automation.queue.includes("github")) {
triggerManualJob("github");
} else if (automation.queue.includes("session")) {
triggerManualJob("sessions");
} else if (automation.queue.includes("echo")) {
triggerManualJob("echo", {
message: "Manual trigger from table",
});
} else if (
automation.queue.includes("orphaned-repo")
) {
triggerManualJob("orphaned-repos");
}
}}
className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200"
title="Run Now"
>
<Play className="h-3 w-3" />
</button>
) : (
<button
type="button"
onClick={() => {
if (automation.queue.includes("echo")) {
triggerManualJob("echo", {
message: "Manual trigger from table",
});
}
}}
className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200"
title="Trigger"
>
<Play className="h-3 w-3" />
</button>
)}
</td>
<td className="px-4 py-2 whitespace-nowrap">
<div>
<div className="text-sm font-medium text-secondary-900 dark:text-white">
{automation.name}
</div>
<div className="text-xs text-secondary-500 dark:text-secondary-400">
{automation.description}
</div>
</div>
</td>
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
{automation.schedule}
</td>
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
{automation.lastRun}
</td>
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
{getNextRunTime(
automation.schedule,
automation.lastRun,
)}
</td>
<td className="px-4 py-2 whitespace-nowrap">
{getStatusBadge(automation.status)}
</td>
</tr>
))}
</tbody>
</table>
</div>
)}
</div>
)}
</div>
);
};
export default Automation;

File diff suppressed because it is too large Load Diff

View File

@@ -8,6 +8,7 @@ import {
Clock,
Copy,
Cpu,
Database,
Eye,
EyeOff,
HardDrive,
@@ -31,6 +32,7 @@ import {
dashboardAPI,
formatDate,
formatRelativeTime,
repositoryAPI,
settingsAPI,
} from "../utils/api";
import { OSIcon } from "../utils/osIcons.jsx";
@@ -42,7 +44,6 @@ const HostDetail = () => {
const [showCredentialsModal, setShowCredentialsModal] = useState(false);
const [showDeleteModal, setShowDeleteModal] = useState(false);
const [activeTab, setActiveTab] = useState("host");
const [_forceInstall, _setForceInstall] = useState(false);
const [historyPage, setHistoryPage] = useState(0);
const [historyLimit] = useState(10);
@@ -65,6 +66,15 @@ const HostDetail = () => {
refetchOnWindowFocus: false, // Don't refetch when window regains focus
});
// Fetch repository count for this host
const { data: repositories, isLoading: isLoadingRepos } = useQuery({
queryKey: ["host-repositories", hostId],
queryFn: () => repositoryAPI.getByHost(hostId).then((res) => res.data),
staleTime: 5 * 60 * 1000, // 5 minutes - data stays fresh longer
refetchOnWindowFocus: false, // Don't refetch when window regains focus
enabled: !!hostId,
});
// Tab change handler
const handleTabChange = (tabName) => {
setActiveTab(tabName);
@@ -291,7 +301,7 @@ const HostDetail = () => {
</div>
{/* Package Statistics Cards */}
<div className="grid grid-cols-1 sm:grid-cols-3 gap-4 mb-6">
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-4 mb-6">
<button
type="button"
onClick={() => navigate(`/packages?host=${hostId}`)}
@@ -348,6 +358,25 @@ const HostDetail = () => {
</div>
</div>
</button>
<button
type="button"
onClick={() => navigate(`/repositories?host=${hostId}`)}
className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 text-left w-full"
title="View repositories for this host"
>
<div className="flex items-center">
<Database className="h-5 w-5 text-blue-600 mr-2" />
<div>
<p className="text-sm text-secondary-500 dark:text-white">
Repos
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{isLoadingRepos ? "..." : repositories?.length || 0}
</p>
</div>
</div>
</button>
</div>
{/* Main Content - Full Width */}

View File

@@ -1,699 +0,0 @@
import {
Activity,
AlertCircle,
CheckCircle,
Clock,
Download,
Eye,
Filter,
Package,
Pause,
Play,
RefreshCw,
Search,
Server,
XCircle,
} from "lucide-react";
import { useState } from "react";
const Queue = () => {
const [activeTab, setActiveTab] = useState("server");
const [filterStatus, setFilterStatus] = useState("all");
const [searchQuery, setSearchQuery] = useState("");
// Mock data for demonstration
const serverQueueData = [
{
id: 1,
type: "Server Update Check",
description: "Check for server updates from GitHub",
status: "running",
priority: "high",
createdAt: "2024-01-15 10:30:00",
estimatedCompletion: "2024-01-15 10:35:00",
progress: 75,
retryCount: 0,
maxRetries: 3,
},
{
id: 2,
type: "Session Cleanup",
description: "Clear expired login sessions",
status: "pending",
priority: "medium",
createdAt: "2024-01-15 10:25:00",
estimatedCompletion: "2024-01-15 10:40:00",
progress: 0,
retryCount: 0,
maxRetries: 2,
},
{
id: 3,
type: "Database Optimization",
description: "Optimize database indexes and cleanup old records",
status: "completed",
priority: "low",
createdAt: "2024-01-15 09:00:00",
completedAt: "2024-01-15 09:45:00",
progress: 100,
retryCount: 0,
maxRetries: 1,
},
{
id: 4,
type: "Backup Creation",
description: "Create system backup",
status: "failed",
priority: "high",
createdAt: "2024-01-15 08:00:00",
errorMessage: "Insufficient disk space",
progress: 45,
retryCount: 2,
maxRetries: 3,
},
];
const agentQueueData = [
{
id: 1,
hostname: "web-server-01",
ip: "192.168.1.100",
type: "Agent Update Collection",
description: "Agent v1.2.7 → v1.2.8",
status: "pending",
priority: "medium",
lastCommunication: "2024-01-15 10:00:00",
nextExpectedCommunication: "2024-01-15 11:00:00",
currentVersion: "1.2.7",
targetVersion: "1.2.8",
retryCount: 0,
maxRetries: 5,
},
{
id: 2,
hostname: "db-server-02",
ip: "192.168.1.101",
type: "Data Collection",
description: "Collect package and system information",
status: "running",
priority: "high",
lastCommunication: "2024-01-15 10:15:00",
nextExpectedCommunication: "2024-01-15 11:15:00",
currentVersion: "1.2.8",
targetVersion: "1.2.8",
retryCount: 0,
maxRetries: 3,
},
{
id: 3,
hostname: "app-server-03",
ip: "192.168.1.102",
type: "Agent Update Collection",
description: "Agent v1.2.6 → v1.2.8",
status: "completed",
priority: "low",
lastCommunication: "2024-01-15 09:30:00",
completedAt: "2024-01-15 09:45:00",
currentVersion: "1.2.8",
targetVersion: "1.2.8",
retryCount: 0,
maxRetries: 5,
},
{
id: 4,
hostname: "test-server-04",
ip: "192.168.1.103",
type: "Data Collection",
description: "Collect package and system information",
status: "failed",
priority: "medium",
lastCommunication: "2024-01-15 08:00:00",
errorMessage: "Connection timeout",
retryCount: 3,
maxRetries: 3,
},
];
const patchQueueData = [
{
id: 1,
hostname: "web-server-01",
ip: "192.168.1.100",
packages: ["nginx", "openssl", "curl"],
type: "Security Updates",
description: "Apply critical security patches",
status: "pending",
priority: "high",
scheduledFor: "2024-01-15 19:00:00",
lastCommunication: "2024-01-15 18:00:00",
nextExpectedCommunication: "2024-01-15 19:00:00",
retryCount: 0,
maxRetries: 3,
},
{
id: 2,
hostname: "db-server-02",
ip: "192.168.1.101",
packages: ["postgresql", "python3"],
type: "Feature Updates",
description: "Update database and Python packages",
status: "running",
priority: "medium",
scheduledFor: "2024-01-15 20:00:00",
lastCommunication: "2024-01-15 19:15:00",
nextExpectedCommunication: "2024-01-15 20:15:00",
retryCount: 0,
maxRetries: 2,
},
{
id: 3,
hostname: "app-server-03",
ip: "192.168.1.102",
packages: ["nodejs", "npm"],
type: "Maintenance Updates",
description: "Update Node.js and npm packages",
status: "completed",
priority: "low",
scheduledFor: "2024-01-15 18:30:00",
completedAt: "2024-01-15 18:45:00",
retryCount: 0,
maxRetries: 2,
},
{
id: 4,
hostname: "test-server-04",
ip: "192.168.1.103",
packages: ["docker", "docker-compose"],
type: "Security Updates",
description: "Update Docker components",
status: "failed",
priority: "high",
scheduledFor: "2024-01-15 17:00:00",
errorMessage: "Package conflicts detected",
retryCount: 2,
maxRetries: 3,
},
];
const getStatusIcon = (status) => {
switch (status) {
case "running":
return <RefreshCw className="h-4 w-4 text-blue-500 animate-spin" />;
case "completed":
return <CheckCircle className="h-4 w-4 text-green-500" />;
case "failed":
return <XCircle className="h-4 w-4 text-red-500" />;
case "pending":
return <Clock className="h-4 w-4 text-yellow-500" />;
case "paused":
return <Pause className="h-4 w-4 text-gray-500" />;
default:
return <AlertCircle className="h-4 w-4 text-gray-500" />;
}
};
const getStatusColor = (status) => {
switch (status) {
case "running":
return "bg-blue-100 text-blue-800";
case "completed":
return "bg-green-100 text-green-800";
case "failed":
return "bg-red-100 text-red-800";
case "pending":
return "bg-yellow-100 text-yellow-800";
case "paused":
return "bg-gray-100 text-gray-800";
default:
return "bg-gray-100 text-gray-800";
}
};
const getPriorityColor = (priority) => {
switch (priority) {
case "high":
return "bg-red-100 text-red-800";
case "medium":
return "bg-yellow-100 text-yellow-800";
case "low":
return "bg-green-100 text-green-800";
default:
return "bg-gray-100 text-gray-800";
}
};
const filteredData = (data) => {
let filtered = data;
if (filterStatus !== "all") {
filtered = filtered.filter((item) => item.status === filterStatus);
}
if (searchQuery) {
filtered = filtered.filter(
(item) =>
item.hostname?.toLowerCase().includes(searchQuery.toLowerCase()) ||
item.type?.toLowerCase().includes(searchQuery.toLowerCase()) ||
item.description?.toLowerCase().includes(searchQuery.toLowerCase()),
);
}
return filtered;
};
const tabs = [
{
id: "server",
name: "Server Queue",
icon: Server,
data: serverQueueData,
count: serverQueueData.length,
},
{
id: "agent",
name: "Agent Queue",
icon: Download,
data: agentQueueData,
count: agentQueueData.length,
},
{
id: "patch",
name: "Patch Management",
icon: Package,
data: patchQueueData,
count: patchQueueData.length,
},
];
const renderServerQueueItem = (item) => (
<div
key={item.id}
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
>
<div className="flex items-start justify-between">
<div className="flex-1">
<div className="flex items-center gap-3 mb-2">
{getStatusIcon(item.status)}
<h3 className="font-medium text-gray-900 dark:text-white">
{item.type}
</h3>
<span
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
>
{item.status}
</span>
<span
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
>
{item.priority}
</span>
</div>
<p className="text-sm text-gray-600 dark:text-gray-400 mb-3">
{item.description}
</p>
{item.status === "running" && (
<div className="mb-3">
<div className="flex justify-between text-xs text-gray-500 mb-1">
<span>Progress</span>
<span>{item.progress}%</span>
</div>
<div className="w-full bg-gray-200 rounded-full h-2">
<div
className="bg-blue-600 h-2 rounded-full transition-all duration-300"
style={{ width: `${item.progress}%` }}
></div>
</div>
</div>
)}
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
<div>
<span className="font-medium">Created:</span> {item.createdAt}
</div>
{item.status === "running" && (
<div>
<span className="font-medium">ETA:</span>{" "}
{item.estimatedCompletion}
</div>
)}
{item.status === "completed" && (
<div>
<span className="font-medium">Completed:</span>{" "}
{item.completedAt}
</div>
)}
{item.status === "failed" && (
<div className="col-span-2">
<span className="font-medium">Error:</span> {item.errorMessage}
</div>
)}
</div>
{item.retryCount > 0 && (
<div className="mt-2 text-xs text-orange-600">
Retries: {item.retryCount}/{item.maxRetries}
</div>
)}
</div>
<div className="flex gap-2 ml-4">
{item.status === "running" && (
<button
type="button"
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
>
<Pause className="h-4 w-4" />
</button>
)}
{item.status === "paused" && (
<button
type="button"
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
>
<Play className="h-4 w-4" />
</button>
)}
{item.status === "failed" && (
<button
type="button"
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
>
<RefreshCw className="h-4 w-4" />
</button>
)}
<button
type="button"
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
>
<Eye className="h-4 w-4" />
</button>
</div>
</div>
</div>
);
const renderAgentQueueItem = (item) => (
<div
key={item.id}
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
>
<div className="flex items-start justify-between">
<div className="flex-1">
<div className="flex items-center gap-3 mb-2">
{getStatusIcon(item.status)}
<h3 className="font-medium text-gray-900 dark:text-white">
{item.hostname}
</h3>
<span
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
>
{item.status}
</span>
<span
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
>
{item.priority}
</span>
</div>
<p className="text-sm text-gray-600 dark:text-gray-400 mb-2">
{item.type}
</p>
<p className="text-sm text-gray-500 mb-3">{item.description}</p>
{item.type === "Agent Update Collection" && (
<div className="mb-3 p-2 bg-gray-50 dark:bg-gray-700 rounded">
<div className="text-xs text-gray-600 dark:text-gray-400">
<span className="font-medium">Version:</span>{" "}
{item.currentVersion} {item.targetVersion}
</div>
</div>
)}
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
<div>
<span className="font-medium">IP:</span> {item.ip}
</div>
<div>
<span className="font-medium">Last Comm:</span>{" "}
{item.lastCommunication}
</div>
<div>
<span className="font-medium">Next Expected:</span>{" "}
{item.nextExpectedCommunication}
</div>
{item.status === "completed" && (
<div>
<span className="font-medium">Completed:</span>{" "}
{item.completedAt}
</div>
)}
{item.status === "failed" && (
<div className="col-span-2">
<span className="font-medium">Error:</span> {item.errorMessage}
</div>
)}
</div>
{item.retryCount > 0 && (
<div className="mt-2 text-xs text-orange-600">
Retries: {item.retryCount}/{item.maxRetries}
</div>
)}
</div>
<div className="flex gap-2 ml-4">
{item.status === "failed" && (
<button
type="button"
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
>
<RefreshCw className="h-4 w-4" />
</button>
)}
<button
type="button"
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
>
<Eye className="h-4 w-4" />
</button>
</div>
</div>
</div>
);
const renderPatchQueueItem = (item) => (
<div
key={item.id}
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
>
<div className="flex items-start justify-between">
<div className="flex-1">
<div className="flex items-center gap-3 mb-2">
{getStatusIcon(item.status)}
<h3 className="font-medium text-gray-900 dark:text-white">
{item.hostname}
</h3>
<span
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
>
{item.status}
</span>
<span
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
>
{item.priority}
</span>
</div>
<p className="text-sm text-gray-600 dark:text-gray-400 mb-2">
{item.type}
</p>
<p className="text-sm text-gray-500 mb-3">{item.description}</p>
<div className="mb-3">
<div className="text-xs text-gray-600 dark:text-gray-400 mb-1">
<span className="font-medium">Packages:</span>
</div>
<div className="flex flex-wrap gap-1">
{item.packages.map((pkg) => (
<span
key={pkg}
className="px-2 py-1 bg-blue-100 text-blue-800 text-xs rounded"
>
{pkg}
</span>
))}
</div>
</div>
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
<div>
<span className="font-medium">IP:</span> {item.ip}
</div>
<div>
<span className="font-medium">Scheduled:</span>{" "}
{item.scheduledFor}
</div>
<div>
<span className="font-medium">Last Comm:</span>{" "}
{item.lastCommunication}
</div>
<div>
<span className="font-medium">Next Expected:</span>{" "}
{item.nextExpectedCommunication}
</div>
{item.status === "completed" && (
<div>
<span className="font-medium">Completed:</span>{" "}
{item.completedAt}
</div>
)}
{item.status === "failed" && (
<div className="col-span-2">
<span className="font-medium">Error:</span> {item.errorMessage}
</div>
)}
</div>
{item.retryCount > 0 && (
<div className="mt-2 text-xs text-orange-600">
Retries: {item.retryCount}/{item.maxRetries}
</div>
)}
</div>
<div className="flex gap-2 ml-4">
{item.status === "failed" && (
<button
type="button"
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
>
<RefreshCw className="h-4 w-4" />
</button>
)}
<button
type="button"
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
>
<Eye className="h-4 w-4" />
</button>
</div>
</div>
</div>
);
const currentTab = tabs.find((tab) => tab.id === activeTab);
const filteredItems = filteredData(currentTab?.data || []);
return (
<div className="min-h-screen bg-gray-50 dark:bg-gray-900">
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
{/* Header */}
<div className="mb-8">
<h1 className="text-3xl font-bold text-gray-900 dark:text-white mb-2">
Queue Management
</h1>
<p className="text-gray-600 dark:text-gray-400">
Monitor and manage server operations, agent communications, and
patch deployments
</p>
</div>
{/* Tabs */}
<div className="mb-6">
<div className="border-b border-gray-200 dark:border-gray-700">
<nav className="-mb-px flex space-x-8">
{tabs.map((tab) => (
<button
type="button"
key={tab.id}
onClick={() => setActiveTab(tab.id)}
className={`py-2 px-1 border-b-2 font-medium text-sm flex items-center gap-2 ${
activeTab === tab.id
? "border-blue-500 text-blue-600 dark:text-blue-400"
: "border-transparent text-gray-500 hover:text-gray-700 hover:border-gray-300 dark:text-gray-400 dark:hover:text-gray-300"
}`}
>
<tab.icon className="h-4 w-4" />
{tab.name}
<span className="bg-gray-100 dark:bg-gray-700 text-gray-600 dark:text-gray-300 px-2 py-0.5 rounded-full text-xs">
{tab.count}
</span>
</button>
))}
</nav>
</div>
</div>
{/* Filters and Search */}
<div className="mb-6 flex flex-col sm:flex-row gap-4">
<div className="flex-1">
<div className="relative">
<Search className="absolute left-3 top-1/2 transform -translate-y-1/2 text-gray-400 h-4 w-4" />
<input
type="text"
placeholder="Search queues..."
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
className="w-full pl-10 pr-4 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent"
/>
</div>
</div>
<div className="flex gap-2">
<select
value={filterStatus}
onChange={(e) => setFilterStatus(e.target.value)}
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent"
>
<option value="all">All Status</option>
<option value="pending">Pending</option>
<option value="running">Running</option>
<option value="completed">Completed</option>
<option value="failed">Failed</option>
<option value="paused">Paused</option>
</select>
<button
type="button"
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white hover:bg-gray-50 dark:hover:bg-gray-700 flex items-center gap-2"
>
<Filter className="h-4 w-4" />
More Filters
</button>
</div>
</div>
{/* Queue Items */}
<div className="space-y-4">
{filteredItems.length === 0 ? (
<div className="text-center py-12">
<Activity className="mx-auto h-12 w-12 text-gray-400" />
<h3 className="mt-2 text-sm font-medium text-gray-900 dark:text-white">
No queue items found
</h3>
<p className="mt-1 text-sm text-gray-500 dark:text-gray-400">
{searchQuery
? "Try adjusting your search criteria"
: "No items match the current filters"}
</p>
</div>
) : (
filteredItems.map((item) => {
switch (activeTab) {
case "server":
return renderServerQueueItem(item);
case "agent":
return renderAgentQueueItem(item);
case "patch":
return renderPatchQueueItem(item);
default:
return null;
}
})
)}
</div>
</div>
</div>
);
};
export default Queue;

View File

@@ -18,21 +18,31 @@ import {
Unlock,
X,
} from "lucide-react";
import { useMemo, useState } from "react";
import { useNavigate } from "react-router-dom";
import { repositoryAPI } from "../utils/api";
import { useEffect, useMemo, useState } from "react";
import { useNavigate, useSearchParams } from "react-router-dom";
import { dashboardAPI, repositoryAPI } from "../utils/api";
const Repositories = () => {
const queryClient = useQueryClient();
const navigate = useNavigate();
const [searchParams] = useSearchParams();
const [searchTerm, setSearchTerm] = useState("");
const [filterType, setFilterType] = useState("all"); // all, secure, insecure
const [filterStatus, setFilterStatus] = useState("all"); // all, active, inactive
const [hostFilter, setHostFilter] = useState("");
const [sortField, setSortField] = useState("name");
const [sortDirection, setSortDirection] = useState("asc");
const [showColumnSettings, setShowColumnSettings] = useState(false);
const [deleteModalData, setDeleteModalData] = useState(null);
// Handle host filter from URL parameter
useEffect(() => {
const hostParam = searchParams.get("host");
if (hostParam) {
setHostFilter(hostParam);
}
}, [searchParams]);
// Column configuration
const [columnConfig, setColumnConfig] = useState(() => {
const defaultConfig = [
@@ -82,6 +92,17 @@ const Repositories = () => {
queryFn: () => repositoryAPI.getStats().then((res) => res.data),
});
// Fetch host information when filtering by host
const { data: hosts } = useQuery({
queryKey: ["hosts"],
queryFn: () => dashboardAPI.getHosts().then((res) => res.data),
staleTime: 5 * 60 * 1000,
enabled: !!hostFilter,
});
// Get the filtered host information
const filteredHost = hosts?.find((host) => host.id === hostFilter);
// Delete repository mutation
const deleteRepositoryMutation = useMutation({
mutationFn: (repositoryId) => repositoryAPI.delete(repositoryId),
@@ -202,7 +223,11 @@ const Repositories = () => {
(filterStatus === "active" && repo.is_active === true) ||
(filterStatus === "inactive" && repo.is_active === false);
return matchesSearch && matchesType && matchesStatus;
// Filter by host if hostFilter is set
const matchesHost =
!hostFilter || repo.hosts?.some((host) => host.id === hostFilter);
return matchesSearch && matchesType && matchesStatus && matchesHost;
});
// Sort repositories
@@ -237,6 +262,7 @@ const Repositories = () => {
filterStatus,
sortField,
sortDirection,
hostFilter,
]);
if (isLoading) {
@@ -421,6 +447,31 @@ const Repositories = () => {
</div>
</div>
{/* Host Filter Indicator */}
{hostFilter && filteredHost && (
<div className="flex items-center gap-2 px-3 py-2 bg-primary-50 dark:bg-primary-900 border border-primary-200 dark:border-primary-700 rounded-md">
<Server className="h-4 w-4 text-primary-600 dark:text-primary-400" />
<span className="text-sm text-primary-700 dark:text-primary-300">
Filtered by: {filteredHost.friendly_name}
</span>
<button
type="button"
onClick={() => {
setHostFilter("");
// Update URL to remove host parameter
const newSearchParams = new URLSearchParams(searchParams);
newSearchParams.delete("host");
navigate(`/repositories?${newSearchParams.toString()}`, {
replace: true,
});
}}
className="text-primary-500 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-200"
>
<X className="h-4 w-4" />
</button>
</div>
)}
{/* Security Filter */}
<div className="sm:w-48">
<select

View File

@@ -0,0 +1,389 @@
import { useQuery } from "@tanstack/react-query";
import {
AlertTriangle,
ArrowLeft,
CheckCircle,
Container,
ExternalLink,
RefreshCw,
Server,
} from "lucide-react";
import { Link, useParams } from "react-router-dom";
import api, { formatRelativeTime } from "../../utils/api";
const ContainerDetail = () => {
const { id } = useParams();
const { data, isLoading, error } = useQuery({
queryKey: ["docker", "container", id],
queryFn: async () => {
const response = await api.get(`/docker/containers/${id}`);
return response.data;
},
refetchInterval: 30000,
});
const container = data?.container;
const similarContainers = data?.similarContainers || [];
if (isLoading) {
return (
<div className="flex items-center justify-center min-h-screen">
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
</div>
);
}
if (error || !container) {
return (
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
<div className="flex">
<AlertTriangle className="h-5 w-5 text-red-400" />
<div className="ml-3">
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
Container not found
</h3>
<p className="mt-2 text-sm text-red-700 dark:text-red-300">
The container you're looking for doesn't exist or has been
removed.
</p>
</div>
</div>
</div>
<Link
to="/docker"
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
</div>
);
}
const getStatusBadge = (status) => {
const statusClasses = {
running:
"bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200",
exited: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200",
paused:
"bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200",
restarting:
"bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200",
};
return (
<span
className={`inline-flex items-center px-3 py-1 rounded-full text-sm font-medium ${
statusClasses[status] ||
"bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200"
}`}
>
{status}
</span>
);
};
return (
<div className="space-y-6">
{/* Header */}
<div>
<Link
to="/docker"
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
<div className="flex items-center">
<Container className="h-8 w-8 text-secondary-400 mr-3" />
<div>
<div className="flex items-center gap-3">
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
{container.name}
</h1>
{getStatusBadge(container.status)}
</div>
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
Container ID: {container.container_id.substring(0, 12)}
</p>
</div>
</div>
</div>
{/* Overview Cards */}
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
{/* Update Status Card */}
{container.docker_images?.docker_image_updates &&
container.docker_images.docker_image_updates.length > 0 ? (
<div className="card p-4 bg-yellow-50 dark:bg-yellow-900/20 border-yellow-200 dark:border-yellow-800">
<div className="flex items-center">
<div className="flex-shrink-0">
<AlertTriangle className="h-5 w-5 text-yellow-600 dark:text-yellow-400 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-yellow-200">
Update Available
</p>
<p className="text-sm font-medium text-secondary-900 dark:text-yellow-100 truncate">
{
container.docker_images.docker_image_updates[0]
.available_tag
}
</p>
</div>
</div>
</div>
) : (
<div className="card p-4 bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800">
<div className="flex items-center">
<div className="flex-shrink-0">
<CheckCircle className="h-5 w-5 text-green-600 dark:text-green-400 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-green-200">
Update Status
</p>
<p className="text-sm font-medium text-secondary-900 dark:text-green-100">
Up to date
</p>
</div>
</div>
</div>
)}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Server className="h-5 w-5 text-purple-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">Host</p>
<Link
to={`/hosts/${container.host?.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 truncate block"
>
{container.host?.friendly_name || container.host?.hostname}
</Link>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-green-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
State
</p>
<p className="text-sm font-medium text-secondary-900 dark:text-white">
{container.state || container.status}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<RefreshCw className="h-5 w-5 text-secondary-400 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Last Checked
</p>
<p className="text-sm font-medium text-secondary-900 dark:text-white">
{formatRelativeTime(container.last_checked)}
</p>
</div>
</div>
</div>
</div>
{/* Container and Image Information - Side by Side */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
{/* Container Details */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Container Information
</h3>
</div>
<div className="px-6 py-5">
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Container ID
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono break-all">
{container.container_id}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Image Tag
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{container.image_tag}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Created
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{formatRelativeTime(container.created_at)}
</dd>
</div>
{container.started_at && (
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Started
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{formatRelativeTime(container.started_at)}
</dd>
</div>
)}
{container.ports && Object.keys(container.ports).length > 0 && (
<div className="sm:col-span-2">
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Port Mappings
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
<div className="flex flex-wrap gap-2">
{Object.entries(container.ports).map(([key, value]) => (
<span
key={key}
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200"
>
{key} {value}
</span>
))}
</div>
</dd>
</div>
)}
</div>
</div>
</div>
{/* Image Information */}
{container.docker_images && (
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Image Information
</h3>
</div>
<div className="px-6 py-5">
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Repository
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
<Link
to={`/docker/images/${container.docker_images.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
{container.docker_images.repository}
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Tag
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{container.docker_images.tag}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Source
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{container.docker_images.source}
</dd>
</div>
{container.docker_images.size_bytes && (
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Size
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{(
Number(container.docker_images.size_bytes) /
1024 /
1024
).toFixed(2)}{" "}
MB
</dd>
</div>
)}
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Image ID
</dt>
<dd className="mt-1 text-xs text-secondary-900 dark:text-white font-mono break-all">
{container.docker_images.image_id?.substring(0, 12)}...
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Created
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{formatRelativeTime(container.docker_images.created_at)}
</dd>
</div>
</div>
</div>
</div>
)}
</div>
{/* Similar Containers */}
{similarContainers.length > 0 && (
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Similar Containers (Same Image)
</h3>
</div>
<div className="px-6 py-5">
<ul className="divide-y divide-secondary-200 dark:divide-secondary-700">
{similarContainers.map((similar) => (
<li
key={similar.id}
className="py-4 flex items-center justify-between"
>
<div className="flex items-center">
<Container className="h-5 w-5 text-secondary-400 mr-3" />
<div>
<Link
to={`/docker/containers/${similar.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{similar.name}
</Link>
<p className="text-sm text-secondary-500 dark:text-secondary-400">
{similar.status}
</p>
</div>
</div>
</li>
))}
</ul>
</div>
</div>
)}
</div>
);
};
export default ContainerDetail;

View File

@@ -0,0 +1,354 @@
import { useQuery } from "@tanstack/react-query";
import {
AlertTriangle,
ArrowLeft,
Container,
ExternalLink,
Package,
RefreshCw,
Server,
} from "lucide-react";
import { Link, useParams } from "react-router-dom";
import api from "../../utils/api";
const HostDetail = () => {
const { id } = useParams();
const { data, isLoading, error } = useQuery({
queryKey: ["docker", "host", id],
queryFn: async () => {
const response = await api.get(`/docker/hosts/${id}`);
return response.data;
},
refetchInterval: 30000,
});
const host = data?.host;
const containers = data?.containers || [];
const images = data?.images || [];
const stats = data?.stats;
if (isLoading) {
return (
<div className="flex items-center justify-center min-h-screen">
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
</div>
);
}
if (error || !host) {
return (
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
<div className="flex">
<AlertTriangle className="h-5 w-5 text-red-400" />
<div className="ml-3">
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
Host not found
</h3>
</div>
</div>
</div>
<Link
to="/docker"
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
</div>
);
}
return (
<div className="space-y-6">
<div>
<Link
to="/docker"
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
<div className="flex items-start justify-between">
<div className="flex items-center">
<Server className="h-8 w-8 text-secondary-400 mr-3" />
<div>
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
{host.friendly_name || host.hostname}
</h1>
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
{host.ip}
</p>
</div>
</div>
<Link
to={`/hosts/${id}`}
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
View Full Host Details
<ExternalLink className="ml-2 h-4 w-4" />
</Link>
</div>
</div>
{/* Overview Cards */}
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-blue-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Total Containers
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{stats?.totalContainers || 0}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-green-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Running
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{stats?.runningContainers || 0}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-red-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Stopped
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{stats?.stoppedContainers || 0}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Package className="h-5 w-5 text-purple-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Images
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{stats?.totalImages || 0}
</p>
</div>
</div>
</div>
</div>
{/* Host Information */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Host Information
</h3>
</div>
<div className="px-6 py-5 space-y-6">
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Friendly Name
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{host.friendly_name}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Hostname
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{host.hostname}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
IP Address
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{host.ip}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
OS
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{host.os_type} {host.os_version}
</dd>
</div>
</div>
</div>
</div>
{/* Containers */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Containers ({containers.length})
</h3>
</div>
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
<thead className="bg-secondary-50 dark:bg-secondary-900">
<tr>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Container Name
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Image
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Status
</th>
<th
scope="col"
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Actions
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
{containers.map((container) => (
<tr key={container.id}>
<td className="px-6 py-4 whitespace-nowrap">
<Link
to={`/docker/containers/${container.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{container.name}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
{container.image_name}:{container.image_tag}
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
{container.status}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
<Link
to={`/docker/containers/${container.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
View
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
{/* Images */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Images ({images.length})
</h3>
</div>
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
<thead className="bg-secondary-50 dark:bg-secondary-900">
<tr>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Repository
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Tag
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Source
</th>
<th
scope="col"
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Actions
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
{images.map((image) => (
<tr key={image.id}>
<td className="px-6 py-4 whitespace-nowrap">
<Link
to={`/docker/images/${image.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{image.repository}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
{image.tag}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
{image.source}
</td>
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
<Link
to={`/docker/images/${image.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
View
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
</div>
);
};
export default HostDetail;

View File

@@ -0,0 +1,439 @@
import { useQuery } from "@tanstack/react-query";
import {
AlertTriangle,
ArrowLeft,
Container,
ExternalLink,
Package,
RefreshCw,
Server,
Shield,
} from "lucide-react";
import { Link, useParams } from "react-router-dom";
import api, { formatRelativeTime } from "../../utils/api";
const ImageDetail = () => {
const { id } = useParams();
const { data, isLoading, error } = useQuery({
queryKey: ["docker", "image", id],
queryFn: async () => {
const response = await api.get(`/docker/images/${id}`);
return response.data;
},
refetchInterval: 30000,
});
const image = data?.image;
const hosts = data?.hosts || [];
const containers = image?.docker_containers || [];
const updates = image?.docker_image_updates || [];
if (isLoading) {
return (
<div className="flex items-center justify-center min-h-screen">
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
</div>
);
}
if (error || !image) {
return (
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
<div className="flex">
<AlertTriangle className="h-5 w-5 text-red-400" />
<div className="ml-3">
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
Image not found
</h3>
</div>
</div>
</div>
<Link
to="/docker"
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
</div>
);
}
return (
<div className="space-y-6">
<div>
<Link
to="/docker"
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
<div className="flex items-start justify-between">
<div className="flex items-center">
<Package className="h-8 w-8 text-secondary-400 mr-3" />
<div>
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
{image.repository}:{image.tag}
</h1>
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
Image ID: {image.image_id.substring(0, 12)}
</p>
</div>
</div>
</div>
</div>
{/* Overview Cards */}
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-green-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Containers
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{containers.length}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Server className="h-5 w-5 text-purple-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Hosts
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{hosts.length}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Package className="h-5 w-5 text-blue-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">Size</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{image.size_bytes ? (
<>{(Number(image.size_bytes) / 1024 / 1024).toFixed(0)} MB</>
) : (
"N/A"
)}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<AlertTriangle className="h-5 w-5 text-warning-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Updates
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{updates.length}
</p>
</div>
</div>
</div>
</div>
{/* Available Updates with Digest Comparison */}
{updates.length > 0 && (
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4">
<div className="flex">
<AlertTriangle className="h-5 w-5 text-yellow-400" />
<div className="ml-3 flex-1">
<h3 className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
Updates Available
</h3>
<div className="mt-2 space-y-3">
{updates.map((update) => {
let digestInfo = null;
try {
if (update.changelog_url) {
digestInfo = JSON.parse(update.changelog_url);
}
} catch (_e) {
// Ignore parse errors
}
return (
<div
key={update.id}
className="bg-white dark:bg-secondary-800 rounded-lg p-3 border border-yellow-200 dark:border-yellow-700"
>
<div className="flex items-center justify-between mb-2">
<div className="flex items-center gap-2">
{update.is_security_update && (
<Shield className="h-4 w-4 text-red-500" />
)}
<span className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
New version available:{" "}
<span className="font-semibold">
{update.available_tag}
</span>
</span>
</div>
{update.is_security_update && (
<span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-medium bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200">
Security
</span>
)}
</div>
{digestInfo &&
digestInfo.method === "digest_comparison" && (
<div className="mt-2 pt-2 border-t border-yellow-200 dark:border-yellow-700">
<p className="text-xs text-secondary-600 dark:text-secondary-400 mb-1">
Detected via digest comparison:
</p>
<div className="font-mono text-xs space-y-1">
<div className="text-red-600 dark:text-red-400">
<span className="font-bold">- Current: </span>
{digestInfo.current_digest}
</div>
<div className="text-green-600 dark:text-green-400">
<span className="font-bold">+ Available: </span>
{digestInfo.available_digest}
</div>
</div>
</div>
)}
</div>
);
})}
</div>
</div>
</div>
</div>
)}
{/* Image Information */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Image Information
</h3>
</div>
<div className="px-6 py-5 space-y-6">
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Repository
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.repository}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Tag
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.tag}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Source
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.source}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Created
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.created_at
? formatRelativeTime(image.created_at)
: "Unknown"}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Image ID
</dt>
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white">
{image.image_id}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Last Checked
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.last_checked
? formatRelativeTime(image.last_checked)
: "Never"}
</dd>
</div>
{image.digest && (
<div className="sm:col-span-2">
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Digest
</dt>
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white break-all">
{image.digest}
</dd>
</div>
)}
</div>
</div>
</div>
{/* Containers using this image */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Containers ({containers.length})
</h3>
</div>
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
<thead className="bg-secondary-50 dark:bg-secondary-900">
<tr>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Container Name
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Status
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Host
</th>
<th
scope="col"
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Actions
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
{containers.map((container) => (
<tr key={container.id}>
<td className="px-6 py-4 whitespace-nowrap">
<Link
to={`/docker/containers/${container.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{container.name}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
{container.status}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
{container.host_id}
</td>
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
<Link
to={`/docker/containers/${container.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
View
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
{/* Hosts using this image */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Hosts ({hosts.length})
</h3>
</div>
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
<thead className="bg-secondary-50 dark:bg-secondary-900">
<tr>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Host Name
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
IP Address
</th>
<th
scope="col"
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Actions
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
{hosts.map((host) => (
<tr key={host.id}>
<td className="px-6 py-4 whitespace-nowrap">
<Link
to={`/hosts/${host.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{host.friendly_name || host.hostname}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
{host.ip}
</td>
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
<Link
to={`/hosts/${host.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
View
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
</div>
);
};
export default ImageDetail;

File diff suppressed because it is too large Load Diff

View File

@@ -43,5 +43,25 @@ export default defineConfig({
outDir: "dist",
sourcemap: process.env.NODE_ENV !== "production",
target: "es2018",
rollupOptions: {
output: {
manualChunks: {
// React core
"react-vendor": ["react", "react-dom", "react-router-dom"],
// Large utility libraries
"utils-vendor": ["axios", "@tanstack/react-query", "date-fns"],
// Chart libraries
"chart-vendor": ["chart.js", "react-chartjs-2"],
// Icon libraries
"icons-vendor": ["lucide-react", "react-icons"],
// DnD libraries
"dnd-vendor": [
"@dnd-kit/core",
"@dnd-kit/sortable",
"@dnd-kit/utilities",
],
},
},
},
},
});

911
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "patchmon",
"version": "1.2.7",
"version": "1.2.9",
"description": "Linux Patch Monitoring System",
"license": "AGPL-3.0",
"private": true,

382
setup.sh
View File

@@ -34,7 +34,7 @@ BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Global variables
SCRIPT_VERSION="self-hosting-install.sh v1.2.7-selfhost-2025-01-20-1"
SCRIPT_VERSION="self-hosting-install.sh v1.2.9-selfhost-2025-10-11-1"
DEFAULT_GITHUB_REPO="https://github.com/PatchMon/PatchMon.git"
FQDN=""
CUSTOM_FQDN=""
@@ -60,6 +60,9 @@ SERVICE_USE_LETSENCRYPT="true" # Will be set based on user input
SERVER_PROTOCOL_SEL="https"
SERVER_PORT_SEL="" # Will be set to BACKEND_PORT in init_instance_vars
SETUP_NGINX="true"
UPDATE_MODE="false"
SELECTED_INSTANCE=""
SELECTED_SERVICE_NAME=""
# Functions
print_status() {
@@ -642,31 +645,61 @@ EOF
# Setup database for instance
setup_database() {
print_info "Creating database: $DB_NAME"
print_info "Setting up database: $DB_NAME"
# Check if sudo is available for user switching
if command -v sudo >/dev/null 2>&1; then
# Drop and recreate database and user for clean state
sudo -u postgres psql -c "DROP DATABASE IF EXISTS $DB_NAME;" || true
sudo -u postgres psql -c "DROP USER IF EXISTS $DB_USER;" || true
# Check if user exists
user_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'" || echo "0")
# Create database and user
sudo -u postgres psql -c "CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';"
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;"
if [ "$user_exists" = "1" ]; then
print_info "Database user $DB_USER already exists, skipping creation"
else
print_info "Creating database user $DB_USER"
sudo -u postgres psql -c "CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';"
fi
# Check if database exists
db_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" || echo "0")
if [ "$db_exists" = "1" ]; then
print_info "Database $DB_NAME already exists, skipping creation"
else
print_info "Creating database $DB_NAME"
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;"
fi
# Always grant privileges (in case they were revoked)
sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;"
else
# Alternative method for systems without sudo (run as postgres user directly)
print_warning "sudo not available, using alternative method for PostgreSQL setup"
# Switch to postgres user using su
su - postgres -c "psql -c \"DROP DATABASE IF EXISTS $DB_NAME;\"" || true
su - postgres -c "psql -c \"DROP USER IF EXISTS $DB_USER;\"" || true
su - postgres -c "psql -c \"CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';\""
su - postgres -c "psql -c \"CREATE DATABASE $DB_NAME OWNER $DB_USER;\""
# Check if user exists
user_exists=$(su - postgres -c "psql -tAc \"SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'\"" || echo "0")
if [ "$user_exists" = "1" ]; then
print_info "Database user $DB_USER already exists, skipping creation"
else
print_info "Creating database user $DB_USER"
su - postgres -c "psql -c \"CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';\""
fi
# Check if database exists
db_exists=$(su - postgres -c "psql -tAc \"SELECT 1 FROM pg_database WHERE datname='$DB_NAME'\"" || echo "0")
if [ "$db_exists" = "1" ]; then
print_info "Database $DB_NAME already exists, skipping creation"
else
print_info "Creating database $DB_NAME"
su - postgres -c "psql -c \"CREATE DATABASE $DB_NAME OWNER $DB_USER;\""
fi
# Always grant privileges (in case they were revoked)
su - postgres -c "psql -c \"GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;\""
fi
print_status "Database $DB_NAME created with user $DB_USER"
print_status "Database setup complete for $DB_NAME"
}
# Clone application repository
@@ -834,7 +867,7 @@ EOF
cat > frontend/.env << EOF
VITE_API_URL=$SERVER_PROTOCOL_SEL://$FQDN/api/v1
VITE_APP_NAME=PatchMon
VITE_APP_VERSION=1.2.7
VITE_APP_VERSION=1.2.9
EOF
print_status "Environment files created"
@@ -1206,7 +1239,7 @@ create_agent_version() {
# Priority 2: Use fallback version if not found
if [ "$current_version" = "N/A" ] || [ -z "$current_version" ]; then
current_version="1.2.7"
current_version="1.2.9"
print_warning "Could not determine version, using fallback: $current_version"
fi
@@ -1550,11 +1583,295 @@ deploy_instance() {
:
}
# Detect existing PatchMon installations
detect_installations() {
local installations=()
# Find all directories in /opt that contain PatchMon installations
if [ -d "/opt" ]; then
for dir in /opt/*/; do
local dirname=$(basename "$dir")
# Skip backup directories
if [[ "$dirname" =~ \.backup\. ]]; then
continue
fi
# Check if it's a PatchMon installation
if [ -f "$dir/backend/package.json" ] && grep -q "patchmon" "$dir/backend/package.json" 2>/dev/null; then
installations+=("$dirname")
fi
done
fi
echo "${installations[@]}"
}
# Select installation to update
select_installation_to_update() {
local installations=($(detect_installations))
if [ ${#installations[@]} -eq 0 ]; then
print_error "No existing PatchMon installations found in /opt"
exit 1
fi
print_info "Found ${#installations[@]} existing installation(s):"
echo ""
local i=1
declare -A install_map
for install in "${installations[@]}"; do
# Get current version if possible
local version="unknown"
if [ -f "/opt/$install/backend/package.json" ]; then
version=$(grep '"version"' "/opt/$install/backend/package.json" | head -1 | sed 's/.*"version": "\([^"]*\)".*/\1/')
fi
# Get service status - try multiple naming conventions
# Convention 1: Just the install name (e.g., patchmon.internal)
local service_name="$install"
# Convention 2: patchmon. prefix (e.g., patchmon.patchmon.internal)
local alt_service_name1="patchmon.$install"
# Convention 3: patchmon- prefix with underscores (e.g., patchmon-patchmon_internal)
local alt_service_name2="patchmon-$(echo "$install" | tr '.' '_')"
local status="unknown"
# Try convention 1 first (most common)
if systemctl is-active --quiet "$service_name" 2>/dev/null; then
status="running"
elif systemctl is-enabled --quiet "$service_name" 2>/dev/null; then
status="stopped"
# Try convention 2
elif systemctl is-active --quiet "$alt_service_name1" 2>/dev/null; then
status="running"
service_name="$alt_service_name1"
elif systemctl is-enabled --quiet "$alt_service_name1" 2>/dev/null; then
status="stopped"
service_name="$alt_service_name1"
# Try convention 3
elif systemctl is-active --quiet "$alt_service_name2" 2>/dev/null; then
status="running"
service_name="$alt_service_name2"
elif systemctl is-enabled --quiet "$alt_service_name2" 2>/dev/null; then
status="stopped"
service_name="$alt_service_name2"
fi
printf "%2d. %-30s (v%-10s - %s)\n" "$i" "$install" "$version" "$status"
install_map[$i]="$install"
# Store the service name for later use
declare -g "service_map_$i=$service_name"
i=$((i + 1))
done
echo ""
while true; do
read_input "Select installation number to update" SELECTION "1"
if [[ "$SELECTION" =~ ^[0-9]+$ ]] && [ -n "${install_map[$SELECTION]}" ]; then
SELECTED_INSTANCE="${install_map[$SELECTION]}"
# Get the stored service name
local varname="service_map_$SELECTION"
SELECTED_SERVICE_NAME="${!varname}"
print_status "Selected: $SELECTED_INSTANCE"
print_info "Service: $SELECTED_SERVICE_NAME"
return 0
else
print_error "Invalid selection. Please enter a number from 1 to ${#installations[@]}"
fi
done
}
# Update existing installation
update_installation() {
local instance_dir="/opt/$SELECTED_INSTANCE"
local service_name="$SELECTED_SERVICE_NAME"
print_info "Updating PatchMon installation: $SELECTED_INSTANCE"
print_info "Installation directory: $instance_dir"
print_info "Service name: $service_name"
# Verify it's a git repository
if [ ! -d "$instance_dir/.git" ]; then
print_error "Installation directory is not a git repository"
print_error "Cannot perform git-based update"
exit 1
fi
# Add git safe.directory to avoid ownership issues when running as root
print_info "Configuring git safe.directory..."
git config --global --add safe.directory "$instance_dir" 2>/dev/null || true
# Load existing .env to get database credentials
if [ -f "$instance_dir/backend/.env" ]; then
source "$instance_dir/backend/.env"
print_status "Loaded existing configuration"
# Parse DATABASE_URL to extract credentials
# Format: postgresql://user:password@host:port/database
if [ -n "$DATABASE_URL" ]; then
# Extract components using regex
DB_USER=$(echo "$DATABASE_URL" | sed -n 's|postgresql://\([^:]*\):.*|\1|p')
DB_PASS=$(echo "$DATABASE_URL" | sed -n 's|postgresql://[^:]*:\([^@]*\)@.*|\1|p')
DB_HOST=$(echo "$DATABASE_URL" | sed -n 's|.*@\([^:]*\):.*|\1|p')
DB_PORT=$(echo "$DATABASE_URL" | sed -n 's|.*:\([0-9]*\)/.*|\1|p')
DB_NAME=$(echo "$DATABASE_URL" | sed -n 's|.*/\([^?]*\).*|\1|p')
print_info "Database: $DB_NAME (user: $DB_USER)"
else
print_error "DATABASE_URL not found in .env file"
exit 1
fi
else
print_error "Cannot find .env file at $instance_dir/backend/.env"
exit 1
fi
# Select branch/version to update to
select_branch
print_info "Updating to: $DEPLOYMENT_BRANCH"
echo ""
read_yes_no "Proceed with update? This will pull new code and restart services" CONFIRM_UPDATE "y"
if [ "$CONFIRM_UPDATE" != "y" ]; then
print_warning "Update cancelled by user"
exit 0
fi
# Stop the service
print_info "Stopping service: $service_name"
systemctl stop "$service_name" || true
# Create backup directory
local timestamp=$(date +%Y%m%d_%H%M%S)
local backup_dir="$instance_dir.backup.$timestamp"
local db_backup_file="$backup_dir/database_backup_$timestamp.sql"
print_info "Creating backup directory: $backup_dir"
mkdir -p "$backup_dir"
# Backup database
print_info "Backing up database: $DB_NAME"
if PGPASSWORD="$DB_PASS" pg_dump -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -F c -f "$db_backup_file" 2>/dev/null; then
print_status "Database backup created: $db_backup_file"
else
print_warning "Database backup failed, but continuing with code backup"
fi
# Backup code
print_info "Backing up code files..."
cp -r "$instance_dir" "$backup_dir/code"
print_status "Code backup created"
# Update code
print_info "Pulling latest code from branch: $DEPLOYMENT_BRANCH"
cd "$instance_dir"
# Clean up any untracked files that might conflict with incoming changes
print_info "Cleaning up untracked files to prevent merge conflicts..."
git clean -fd
# Reset any local changes to ensure clean state
print_info "Resetting local changes to ensure clean state..."
git reset --hard HEAD
# Fetch latest changes
git fetch origin
# Checkout the selected branch/tag
git checkout "$DEPLOYMENT_BRANCH"
git pull origin "$DEPLOYMENT_BRANCH" || git pull # For tags, just pull
print_status "Code updated successfully"
# Update dependencies
print_info "Updating backend dependencies..."
cd "$instance_dir/backend"
npm install --production --ignore-scripts
print_info "Updating frontend dependencies..."
cd "$instance_dir/frontend"
npm install --ignore-scripts
# Build frontend
print_info "Building frontend..."
npm run build
# Run database migrations and generate Prisma client
print_info "Running database migrations..."
cd "$instance_dir/backend"
npx prisma generate
npx prisma migrate deploy
# Start the service
print_info "Starting service: $service_name"
systemctl start "$service_name"
# Wait a moment and check status
sleep 3
if systemctl is-active --quiet "$service_name"; then
print_success "✅ Update completed successfully!"
print_status "Service $service_name is running"
# Get new version
local new_version=$(grep '"version"' "$instance_dir/backend/package.json" | head -1 | sed 's/.*"version": "\([^"]*\)".*/\1/')
print_info "Updated to version: $new_version"
echo ""
print_info "Backup Information:"
print_info " Code backup: $backup_dir/code"
print_info " Database backup: $db_backup_file"
echo ""
print_info "To restore database if needed:"
print_info " PGPASSWORD=\"$DB_PASS\" pg_restore -h \"$DB_HOST\" -U \"$DB_USER\" -d \"$DB_NAME\" -c \"$db_backup_file\""
echo ""
else
print_error "Service failed to start after update"
echo ""
print_warning "ROLLBACK INSTRUCTIONS:"
print_info "1. Restore code:"
print_info " sudo rm -rf $instance_dir"
print_info " sudo mv $backup_dir/code $instance_dir"
echo ""
print_info "2. Restore database:"
print_info " PGPASSWORD=\"$DB_PASS\" pg_restore -h \"$DB_HOST\" -U \"$DB_USER\" -d \"$DB_NAME\" -c \"$db_backup_file\""
echo ""
print_info "3. Restart service:"
print_info " sudo systemctl start $service_name"
echo ""
print_info "Check logs: journalctl -u $service_name -f"
exit 1
fi
}
# Main script execution
main() {
# Log script entry
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Interactive installation started" >> "$DEBUG_LOG"
# Parse command-line arguments
if [ "$1" = "--update" ]; then
UPDATE_MODE="true"
fi
# Log script entry
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Script started - Update mode: $UPDATE_MODE" >> "$DEBUG_LOG"
# Handle update mode
if [ "$UPDATE_MODE" = "true" ]; then
print_banner
print_info "🔄 PatchMon Update Mode"
echo ""
# Select installation to update
select_installation_to_update
# Perform update
update_installation
exit 0
fi
# Normal installation mode
# Run interactive setup
interactive_setup
@@ -1588,5 +1905,30 @@ main() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] deploy_instance function completed" >> "$DEBUG_LOG"
}
# Run main function (no arguments needed for interactive mode)
main
# Show usage/help
show_usage() {
echo "PatchMon Self-Hosting Installation & Update Script"
echo "Version: $SCRIPT_VERSION"
echo ""
echo "Usage:"
echo " $0 # Interactive installation (default)"
echo " $0 --update # Update existing installation"
echo " $0 --help # Show this help message"
echo ""
echo "Examples:"
echo " # New installation:"
echo " sudo bash $0"
echo ""
echo " # Update existing installation:"
echo " sudo bash $0 --update"
echo ""
}
# Check for help flag
if [ "$1" = "--help" ] || [ "$1" = "-h" ]; then
show_usage
exit 0
fi
# Run main function
main "$@"