mirror of
https://github.com/9technologygroup/patchmon.net.git
synced 2025-11-03 13:33:30 +00:00
Compare commits
17 Commits
v1.2.8
...
6ce131636d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6ce131636d | ||
|
|
4d5040e0e9 | ||
|
|
28c5310b99 | ||
|
|
a2e9743da6 | ||
|
|
3863d641fa | ||
|
|
cc8f77a946 | ||
|
|
36455e2bfd | ||
|
|
af65d38cad | ||
|
|
29266b6d77 | ||
|
|
f96e468482 | ||
|
|
9f8c88badf | ||
|
|
7985a225d7 | ||
|
|
8c538bd99c | ||
|
|
623bf5e2c8 | ||
|
|
ed8cc81b89 | ||
|
|
5c4353a688 | ||
|
|
0ad1a96871 |
@@ -1,12 +1,12 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# PatchMon Agent Script v1.2.8
|
# PatchMon Agent Script v1.2.9
|
||||||
# This script sends package update information to the PatchMon server using API credentials
|
# This script sends package update information to the PatchMon server using API credentials
|
||||||
|
|
||||||
# Configuration
|
# Configuration
|
||||||
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
|
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
|
||||||
API_VERSION="v1"
|
API_VERSION="v1"
|
||||||
AGENT_VERSION="1.2.8"
|
AGENT_VERSION="1.2.9"
|
||||||
CONFIG_FILE="/etc/patchmon/agent.conf"
|
CONFIG_FILE="/etc/patchmon/agent.conf"
|
||||||
CREDENTIALS_FILE="/etc/patchmon/credentials"
|
CREDENTIALS_FILE="/etc/patchmon/credentials"
|
||||||
LOG_FILE="/var/log/patchmon-agent.log"
|
LOG_FILE="/var/log/patchmon-agent.log"
|
||||||
@@ -38,21 +38,21 @@ error() {
|
|||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
# Info logging (cleaner output - only stdout, no duplicate logging)
|
# Info logging (cleaner output - only stderr, no duplicate logging)
|
||||||
info() {
|
info() {
|
||||||
echo -e "${BLUE}ℹ️ $1${NC}"
|
echo -e "${BLUE}ℹ️ $1${NC}" >&2
|
||||||
log "INFO: $1"
|
log "INFO: $1"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Success logging (cleaner output - only stdout, no duplicate logging)
|
# Success logging (cleaner output - only stderr, no duplicate logging)
|
||||||
success() {
|
success() {
|
||||||
echo -e "${GREEN}✅ $1${NC}"
|
echo -e "${GREEN}✅ $1${NC}" >&2
|
||||||
log "SUCCESS: $1"
|
log "SUCCESS: $1"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Warning logging (cleaner output - only stdout, no duplicate logging)
|
# Warning logging (cleaner output - only stderr, no duplicate logging)
|
||||||
warning() {
|
warning() {
|
||||||
echo -e "${YELLOW}⚠️ $1${NC}"
|
echo -e "${YELLOW}⚠️ $1${NC}" >&2
|
||||||
log "WARNING: $1"
|
log "WARNING: $1"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -709,6 +709,135 @@ get_package_info() {
|
|||||||
echo "$packages_json"
|
echo "$packages_json"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Check and handle APT locks
|
||||||
|
handle_apt_locks() {
|
||||||
|
local interactive=${1:-false} # First parameter indicates if running interactively
|
||||||
|
|
||||||
|
local lock_files=(
|
||||||
|
"/var/lib/dpkg/lock"
|
||||||
|
"/var/lib/dpkg/lock-frontend"
|
||||||
|
"/var/lib/apt/lists/lock"
|
||||||
|
"/var/cache/apt/archives/lock"
|
||||||
|
)
|
||||||
|
|
||||||
|
local processes_found=false
|
||||||
|
local hung_processes=()
|
||||||
|
|
||||||
|
# Check for running APT processes
|
||||||
|
if pgrep -x "apt-get|apt|aptitude|dpkg|unattended-upgr" > /dev/null 2>&1; then
|
||||||
|
processes_found=true
|
||||||
|
info "Found running package management processes:"
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
# Get process info with ACTUAL elapsed time (not CPU time)
|
||||||
|
# Using ps -eo format to get real elapsed time
|
||||||
|
while IFS= read -r line; do
|
||||||
|
[[ -z "$line" ]] && continue
|
||||||
|
|
||||||
|
local pid=$(echo "$line" | awk '{print $1}')
|
||||||
|
local elapsed=$(echo "$line" | awk '{print $2}')
|
||||||
|
local cmd=$(echo "$line" | awk '{for(i=3;i<=NF;i++) printf "%s ", $i; print ""}')
|
||||||
|
|
||||||
|
# Display process info
|
||||||
|
echo " PID $pid: $cmd (running for $elapsed)" >&2
|
||||||
|
|
||||||
|
# Parse elapsed time and convert to seconds
|
||||||
|
# Format can be: MM:SS, HH:MM:SS, DD-HH:MM:SS, or just SS
|
||||||
|
# Use 10# prefix to force base-10 (avoid octal interpretation of leading zeros)
|
||||||
|
local runtime_seconds=0
|
||||||
|
if [[ "$elapsed" =~ ^([0-9]+)-([0-9]+):([0-9]+):([0-9]+)$ ]]; then
|
||||||
|
# Format: DD-HH:MM:SS
|
||||||
|
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 86400 + 10#${BASH_REMATCH[2]} * 3600 + 10#${BASH_REMATCH[3]} * 60 + 10#${BASH_REMATCH[4]} ))
|
||||||
|
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+):([0-9]+)$ ]]; then
|
||||||
|
# Format: HH:MM:SS
|
||||||
|
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 3600 + 10#${BASH_REMATCH[2]} * 60 + 10#${BASH_REMATCH[3]} ))
|
||||||
|
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+)$ ]]; then
|
||||||
|
# Format: MM:SS
|
||||||
|
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 60 + 10#${BASH_REMATCH[2]} ))
|
||||||
|
elif [[ "$elapsed" =~ ^([0-9]+)$ ]]; then
|
||||||
|
# Format: just seconds
|
||||||
|
runtime_seconds=$((10#${BASH_REMATCH[1]}))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Consider process hung if running for more than 5 minutes
|
||||||
|
if [[ $runtime_seconds -gt 300 ]]; then
|
||||||
|
hung_processes+=("$pid:$elapsed:$cmd")
|
||||||
|
fi
|
||||||
|
done < <(ps -eo pid,etime,cmd | grep -E "apt-get|apt[^-]|aptitude|dpkg|unattended-upgr" | grep -v grep | grep -v "ps -eo")
|
||||||
|
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
info "Detected ${#hung_processes[@]} hung process(es), interactive=$interactive"
|
||||||
|
|
||||||
|
# If hung processes found and running interactively, offer to kill them
|
||||||
|
if [[ ${#hung_processes[@]} -gt 0 && "$interactive" == "true" ]]; then
|
||||||
|
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
for process_info in "${hung_processes[@]}"; do
|
||||||
|
IFS=':' read -r pid elapsed cmd <<< "$process_info"
|
||||||
|
echo " PID $pid: $cmd (hung for $elapsed)" >&2
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "" >&2
|
||||||
|
read -p "$(echo -e "${YELLOW}⚠️ Do you want to kill these processes? [y/N]:${NC} ")" -n 1 -r >&2
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
for process_info in "${hung_processes[@]}"; do
|
||||||
|
IFS=':' read -r pid elapsed cmd <<< "$process_info"
|
||||||
|
info "Killing process $pid..."
|
||||||
|
if kill "$pid" 2>/dev/null; then
|
||||||
|
success "Killed process $pid"
|
||||||
|
sleep 1
|
||||||
|
# Check if process is still running
|
||||||
|
if kill -0 "$pid" 2>/dev/null; then
|
||||||
|
warning "Process $pid still running, using SIGKILL..."
|
||||||
|
kill -9 "$pid" 2>/dev/null
|
||||||
|
success "Force killed process $pid"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
warning "Could not kill process $pid (may require sudo)"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Wait a moment for locks to clear
|
||||||
|
sleep 2
|
||||||
|
else
|
||||||
|
info "Skipping process termination"
|
||||||
|
fi
|
||||||
|
elif [[ ${#hung_processes[@]} -gt 0 ]]; then
|
||||||
|
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
|
||||||
|
info "Run this command with sudo and interactively to kill hung processes"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for stale lock files (files that exist but no process is holding them)
|
||||||
|
for lock_file in "${lock_files[@]}"; do
|
||||||
|
if [[ -f "$lock_file" ]]; then
|
||||||
|
# Try to get the PID from the lock file if it exists
|
||||||
|
if lsof "$lock_file" > /dev/null 2>&1; then
|
||||||
|
info "Lock file $lock_file is held by an active process"
|
||||||
|
else
|
||||||
|
warning "Found stale lock file: $lock_file"
|
||||||
|
info "Attempting to remove stale lock..."
|
||||||
|
if rm -f "$lock_file" 2>/dev/null; then
|
||||||
|
success "Removed stale lock: $lock_file"
|
||||||
|
else
|
||||||
|
warning "Could not remove lock (insufficient permissions): $lock_file"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# If processes were found, return failure so caller can wait
|
||||||
|
if [[ "$processes_found" == true ]]; then
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# Get package info for APT-based systems
|
# Get package info for APT-based systems
|
||||||
get_apt_packages() {
|
get_apt_packages() {
|
||||||
local -n packages_ref=$1
|
local -n packages_ref=$1
|
||||||
@@ -725,10 +854,25 @@ get_apt_packages() {
|
|||||||
else
|
else
|
||||||
retry_count=$((retry_count + 1))
|
retry_count=$((retry_count + 1))
|
||||||
if [[ $retry_count -lt $max_retries ]]; then
|
if [[ $retry_count -lt $max_retries ]]; then
|
||||||
warning "APT lock detected, retrying in ${retry_delay} seconds... (attempt $retry_count/$max_retries)"
|
warning "APT lock detected (attempt $retry_count/$max_retries)"
|
||||||
|
|
||||||
|
# On first retry, try to handle locks
|
||||||
|
if [[ $retry_count -eq 1 ]]; then
|
||||||
|
info "Checking for stale APT locks..."
|
||||||
|
# Check if running interactively (stdin is a terminal OR stdout is a terminal)
|
||||||
|
local is_interactive=false
|
||||||
|
if [[ -t 0 ]] || [[ -t 1 ]]; then
|
||||||
|
is_interactive=true
|
||||||
|
fi
|
||||||
|
info "Interactive mode: $is_interactive"
|
||||||
|
handle_apt_locks "$is_interactive"
|
||||||
|
fi
|
||||||
|
|
||||||
|
info "Waiting ${retry_delay} seconds before retry..."
|
||||||
sleep $retry_delay
|
sleep $retry_delay
|
||||||
else
|
else
|
||||||
warning "APT lock persists after $max_retries attempts, continuing without update..."
|
warning "APT lock persists after $max_retries attempts"
|
||||||
|
warning "Continuing without updating package lists (will use cached data)"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
@@ -1564,9 +1708,21 @@ main() {
|
|||||||
"diagnostics")
|
"diagnostics")
|
||||||
show_diagnostics
|
show_diagnostics
|
||||||
;;
|
;;
|
||||||
|
"clear-locks"|"unlock")
|
||||||
|
check_root
|
||||||
|
info "Checking APT locks and hung processes..."
|
||||||
|
echo ""
|
||||||
|
handle_apt_locks true
|
||||||
|
echo ""
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
success "No APT locks or processes blocking package management"
|
||||||
|
else
|
||||||
|
info "APT processes are still running - they may be legitimate operations"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
echo "PatchMon Agent v$AGENT_VERSION - API Credential Based"
|
echo "PatchMon Agent v$AGENT_VERSION - API Credential Based"
|
||||||
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|diagnostics}"
|
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|clear-locks|diagnostics}"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Commands:"
|
echo "Commands:"
|
||||||
echo " configure <API_ID> <API_KEY> [SERVER_URL] - Configure API credentials for this host"
|
echo " configure <API_ID> <API_KEY> [SERVER_URL] - Configure API credentials for this host"
|
||||||
@@ -1578,6 +1734,7 @@ main() {
|
|||||||
echo " check-agent-update - Check for agent updates using timestamp comparison"
|
echo " check-agent-update - Check for agent updates using timestamp comparison"
|
||||||
echo " update-agent - Update agent to latest version"
|
echo " update-agent - Update agent to latest version"
|
||||||
echo " update-crontab - Update crontab with current policy"
|
echo " update-crontab - Update crontab with current policy"
|
||||||
|
echo " clear-locks - Check and clear APT locks (interactive)"
|
||||||
echo " diagnostics - Show detailed system diagnostics"
|
echo " diagnostics - Show detailed system diagnostics"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Setup Process:"
|
echo "Setup Process:"
|
||||||
|
|||||||
496
agents/patchmon-docker-agent.sh
Executable file
496
agents/patchmon-docker-agent.sh
Executable file
@@ -0,0 +1,496 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# PatchMon Docker Agent Script v1.2.9
|
||||||
|
# This script collects Docker container and image information and sends it to PatchMon
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
|
||||||
|
API_VERSION="v1"
|
||||||
|
AGENT_VERSION="1.2.9"
|
||||||
|
CONFIG_FILE="/etc/patchmon/agent.conf"
|
||||||
|
CREDENTIALS_FILE="/etc/patchmon/credentials"
|
||||||
|
LOG_FILE="/var/log/patchmon-docker-agent.log"
|
||||||
|
|
||||||
|
# Curl flags placeholder (replaced by server based on SSL settings)
|
||||||
|
CURL_FLAGS=""
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Logging function
|
||||||
|
log() {
|
||||||
|
if [[ -w "$(dirname "$LOG_FILE")" ]] 2>/dev/null; then
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" >> "$LOG_FILE" 2>/dev/null
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Error handling
|
||||||
|
error() {
|
||||||
|
echo -e "${RED}ERROR: $1${NC}" >&2
|
||||||
|
log "ERROR: $1"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Info logging
|
||||||
|
info() {
|
||||||
|
echo -e "${BLUE}ℹ️ $1${NC}" >&2
|
||||||
|
log "INFO: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Success logging
|
||||||
|
success() {
|
||||||
|
echo -e "${GREEN}✅ $1${NC}" >&2
|
||||||
|
log "SUCCESS: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Warning logging
|
||||||
|
warning() {
|
||||||
|
echo -e "${YELLOW}⚠️ $1${NC}" >&2
|
||||||
|
log "WARNING: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if Docker is installed and running
|
||||||
|
check_docker() {
|
||||||
|
if ! command -v docker &> /dev/null; then
|
||||||
|
error "Docker is not installed on this system"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! docker info &> /dev/null; then
|
||||||
|
error "Docker daemon is not running or you don't have permission to access it. Try running with sudo."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Load credentials
|
||||||
|
load_credentials() {
|
||||||
|
if [[ ! -f "$CREDENTIALS_FILE" ]]; then
|
||||||
|
error "Credentials file not found at $CREDENTIALS_FILE. Please configure the main PatchMon agent first."
|
||||||
|
fi
|
||||||
|
|
||||||
|
source "$CREDENTIALS_FILE"
|
||||||
|
|
||||||
|
if [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
|
||||||
|
error "API credentials not found in $CREDENTIALS_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Use PATCHMON_URL from credentials if available, otherwise use default
|
||||||
|
if [[ -n "$PATCHMON_URL" ]]; then
|
||||||
|
PATCHMON_SERVER="$PATCHMON_URL"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Load configuration
|
||||||
|
load_config() {
|
||||||
|
if [[ -f "$CONFIG_FILE" ]]; then
|
||||||
|
source "$CONFIG_FILE"
|
||||||
|
if [[ -n "$SERVER_URL" ]]; then
|
||||||
|
PATCHMON_SERVER="$SERVER_URL"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Collect Docker containers
|
||||||
|
collect_containers() {
|
||||||
|
info "Collecting Docker container information..."
|
||||||
|
|
||||||
|
local containers_json="["
|
||||||
|
local first=true
|
||||||
|
|
||||||
|
# Get all containers (running and stopped)
|
||||||
|
while IFS='|' read -r container_id name image status state created started ports; do
|
||||||
|
if [[ -z "$container_id" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse image name and tag
|
||||||
|
local image_name="${image%%:*}"
|
||||||
|
local image_tag="${image##*:}"
|
||||||
|
if [[ "$image_tag" == "$image_name" ]]; then
|
||||||
|
image_tag="latest"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Determine image source based on registry
|
||||||
|
local image_source="docker-hub"
|
||||||
|
if [[ "$image_name" == ghcr.io/* ]]; then
|
||||||
|
image_source="github"
|
||||||
|
elif [[ "$image_name" == registry.gitlab.com/* ]]; then
|
||||||
|
image_source="gitlab"
|
||||||
|
elif [[ "$image_name" == *"/"*"/"* ]]; then
|
||||||
|
image_source="private"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get repository name (without registry prefix for common registries)
|
||||||
|
local image_repository="$image_name"
|
||||||
|
image_repository="${image_repository#ghcr.io/}"
|
||||||
|
image_repository="${image_repository#registry.gitlab.com/}"
|
||||||
|
|
||||||
|
# Get image ID
|
||||||
|
local full_image_id=$(docker inspect --format='{{.Image}}' "$container_id" 2>/dev/null || echo "unknown")
|
||||||
|
full_image_id="${full_image_id#sha256:}"
|
||||||
|
|
||||||
|
# Normalize status (extract just the status keyword)
|
||||||
|
local normalized_status="unknown"
|
||||||
|
if [[ "$status" =~ ^Up ]]; then
|
||||||
|
normalized_status="running"
|
||||||
|
elif [[ "$status" =~ ^Exited ]]; then
|
||||||
|
normalized_status="exited"
|
||||||
|
elif [[ "$status" =~ ^Created ]]; then
|
||||||
|
normalized_status="created"
|
||||||
|
elif [[ "$status" =~ ^Restarting ]]; then
|
||||||
|
normalized_status="restarting"
|
||||||
|
elif [[ "$status" =~ ^Paused ]]; then
|
||||||
|
normalized_status="paused"
|
||||||
|
elif [[ "$status" =~ ^Dead ]]; then
|
||||||
|
normalized_status="dead"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse ports
|
||||||
|
local ports_json="null"
|
||||||
|
if [[ -n "$ports" && "$ports" != "null" ]]; then
|
||||||
|
# Convert Docker port format to JSON
|
||||||
|
ports_json=$(echo "$ports" | jq -R -s -c 'split(",") | map(select(length > 0)) | map(split("->") | {(.[0]): .[1]}) | add // {}')
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert dates to ISO 8601 format
|
||||||
|
# If date conversion fails, use null instead of invalid date string
|
||||||
|
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
|
||||||
|
local started_iso="null"
|
||||||
|
if [[ -n "$started" && "$started" != "null" ]]; then
|
||||||
|
started_iso=$(date -d "$started" -Iseconds 2>/dev/null || echo "null")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add comma for JSON array
|
||||||
|
if [[ "$first" == false ]]; then
|
||||||
|
containers_json+=","
|
||||||
|
fi
|
||||||
|
first=false
|
||||||
|
|
||||||
|
# Build JSON object for this container
|
||||||
|
containers_json+="{\"container_id\":\"$container_id\","
|
||||||
|
containers_json+="\"name\":\"$name\","
|
||||||
|
containers_json+="\"image_name\":\"$image_name\","
|
||||||
|
containers_json+="\"image_tag\":\"$image_tag\","
|
||||||
|
containers_json+="\"image_repository\":\"$image_repository\","
|
||||||
|
containers_json+="\"image_source\":\"$image_source\","
|
||||||
|
containers_json+="\"image_id\":\"$full_image_id\","
|
||||||
|
containers_json+="\"status\":\"$normalized_status\","
|
||||||
|
containers_json+="\"state\":\"$state\","
|
||||||
|
containers_json+="\"ports\":$ports_json"
|
||||||
|
|
||||||
|
# Only add created_at if we have a valid date
|
||||||
|
if [[ "$created_iso" != "null" ]]; then
|
||||||
|
containers_json+=",\"created_at\":\"$created_iso\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Only add started_at if we have a valid date
|
||||||
|
if [[ "$started_iso" != "null" ]]; then
|
||||||
|
containers_json+=",\"started_at\":\"$started_iso\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
containers_json+="}"
|
||||||
|
|
||||||
|
done < <(docker ps -a --format '{{.ID}}|{{.Names}}|{{.Image}}|{{.Status}}|{{.State}}|{{.CreatedAt}}|{{.RunningFor}}|{{.Ports}}' 2>/dev/null)
|
||||||
|
|
||||||
|
containers_json+="]"
|
||||||
|
|
||||||
|
echo "$containers_json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Collect Docker images
|
||||||
|
collect_images() {
|
||||||
|
info "Collecting Docker image information..."
|
||||||
|
|
||||||
|
local images_json="["
|
||||||
|
local first=true
|
||||||
|
|
||||||
|
while IFS='|' read -r repository tag image_id created size digest; do
|
||||||
|
if [[ -z "$repository" || "$repository" == "<none>" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up tag
|
||||||
|
if [[ -z "$tag" || "$tag" == "<none>" ]]; then
|
||||||
|
tag="latest"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean image ID
|
||||||
|
image_id="${image_id#sha256:}"
|
||||||
|
|
||||||
|
# Determine source
|
||||||
|
local source="docker-hub"
|
||||||
|
if [[ "$repository" == ghcr.io/* ]]; then
|
||||||
|
source="github"
|
||||||
|
elif [[ "$repository" == registry.gitlab.com/* ]]; then
|
||||||
|
source="gitlab"
|
||||||
|
elif [[ "$repository" == *"/"*"/"* ]]; then
|
||||||
|
source="private"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert size to bytes (approximate)
|
||||||
|
local size_bytes=0
|
||||||
|
if [[ "$size" =~ ([0-9.]+)([KMGT]?B) ]]; then
|
||||||
|
local num="${BASH_REMATCH[1]}"
|
||||||
|
local unit="${BASH_REMATCH[2]}"
|
||||||
|
case "$unit" in
|
||||||
|
KB) size_bytes=$(echo "$num * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
MB) size_bytes=$(echo "$num * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
GB) size_bytes=$(echo "$num * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
TB) size_bytes=$(echo "$num * 1024 * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
B) size_bytes=$(echo "$num" | cut -d. -f1) ;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert created date to ISO 8601
|
||||||
|
# If date conversion fails, use null instead of invalid date string
|
||||||
|
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
|
||||||
|
|
||||||
|
# Add comma for JSON array
|
||||||
|
if [[ "$first" == false ]]; then
|
||||||
|
images_json+=","
|
||||||
|
fi
|
||||||
|
first=false
|
||||||
|
|
||||||
|
# Build JSON object for this image
|
||||||
|
images_json+="{\"repository\":\"$repository\","
|
||||||
|
images_json+="\"tag\":\"$tag\","
|
||||||
|
images_json+="\"image_id\":\"$image_id\","
|
||||||
|
images_json+="\"source\":\"$source\","
|
||||||
|
images_json+="\"size_bytes\":$size_bytes"
|
||||||
|
|
||||||
|
# Only add created_at if we have a valid date
|
||||||
|
if [[ "$created_iso" != "null" ]]; then
|
||||||
|
images_json+=",\"created_at\":\"$created_iso\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Only add digest if present
|
||||||
|
if [[ -n "$digest" && "$digest" != "<none>" ]]; then
|
||||||
|
images_json+=",\"digest\":\"$digest\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
images_json+="}"
|
||||||
|
|
||||||
|
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.CreatedAt}}|{{.Size}}|{{.Digest}}' --no-trunc 2>/dev/null)
|
||||||
|
|
||||||
|
images_json+="]"
|
||||||
|
|
||||||
|
echo "$images_json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for image updates
|
||||||
|
check_image_updates() {
|
||||||
|
info "Checking for image updates..."
|
||||||
|
|
||||||
|
local updates_json="["
|
||||||
|
local first=true
|
||||||
|
local update_count=0
|
||||||
|
|
||||||
|
# Get all images
|
||||||
|
while IFS='|' read -r repository tag image_id digest; do
|
||||||
|
if [[ -z "$repository" || "$repository" == "<none>" || "$tag" == "<none>" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Skip checking 'latest' tag as it's always considered current by name
|
||||||
|
# We'll still check digest though
|
||||||
|
local full_image="${repository}:${tag}"
|
||||||
|
|
||||||
|
# Try to get remote digest from registry
|
||||||
|
# Use docker manifest inspect to avoid pulling the image
|
||||||
|
local remote_digest=$(docker manifest inspect "$full_image" 2>/dev/null | jq -r '.config.digest // .manifests[0].digest // empty' 2>/dev/null)
|
||||||
|
|
||||||
|
if [[ -z "$remote_digest" ]]; then
|
||||||
|
# If manifest inspect fails, try buildx imagetools inspect (works for more registries)
|
||||||
|
remote_digest=$(docker buildx imagetools inspect "$full_image" 2>/dev/null | grep -oP 'Digest:\s*\K\S+' | head -1)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up digests for comparison
|
||||||
|
local local_digest="${digest#sha256:}"
|
||||||
|
remote_digest="${remote_digest#sha256:}"
|
||||||
|
|
||||||
|
# If we got a remote digest and it's different from local, there's an update
|
||||||
|
if [[ -n "$remote_digest" && -n "$local_digest" && "$remote_digest" != "$local_digest" ]]; then
|
||||||
|
if [[ "$first" == false ]]; then
|
||||||
|
updates_json+=","
|
||||||
|
fi
|
||||||
|
first=false
|
||||||
|
|
||||||
|
# Build update JSON object
|
||||||
|
updates_json+="{\"repository\":\"$repository\","
|
||||||
|
updates_json+="\"current_tag\":\"$tag\","
|
||||||
|
updates_json+="\"available_tag\":\"$tag\","
|
||||||
|
updates_json+="\"current_digest\":\"$local_digest\","
|
||||||
|
updates_json+="\"available_digest\":\"$remote_digest\","
|
||||||
|
updates_json+="\"image_id\":\"${image_id#sha256:}\""
|
||||||
|
updates_json+="}"
|
||||||
|
|
||||||
|
((update_count++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.Digest}}' --no-trunc 2>/dev/null)
|
||||||
|
|
||||||
|
updates_json+="]"
|
||||||
|
|
||||||
|
info "Found $update_count image update(s) available"
|
||||||
|
|
||||||
|
echo "$updates_json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send Docker data to server
|
||||||
|
send_docker_data() {
|
||||||
|
load_credentials
|
||||||
|
|
||||||
|
info "Collecting Docker data..."
|
||||||
|
|
||||||
|
local containers=$(collect_containers)
|
||||||
|
local images=$(collect_images)
|
||||||
|
local updates=$(check_image_updates)
|
||||||
|
|
||||||
|
# Count collected items
|
||||||
|
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
|
||||||
|
info "Found $container_count containers, $image_count images, and $update_count update(s) available"
|
||||||
|
|
||||||
|
# Build payload
|
||||||
|
local payload="{\"apiId\":\"$API_ID\",\"apiKey\":\"$API_KEY\",\"containers\":$containers,\"images\":$images,\"updates\":$updates}"
|
||||||
|
|
||||||
|
# Send to server
|
||||||
|
info "Sending Docker data to PatchMon server..."
|
||||||
|
|
||||||
|
local response=$(curl $CURL_FLAGS -s -w "\n%{http_code}" -X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$payload" \
|
||||||
|
"${PATCHMON_SERVER}/api/${API_VERSION}/docker/collect" 2>&1)
|
||||||
|
|
||||||
|
local http_code=$(echo "$response" | tail -n1)
|
||||||
|
local response_body=$(echo "$response" | head -n-1)
|
||||||
|
|
||||||
|
if [[ "$http_code" == "200" ]]; then
|
||||||
|
success "Docker data sent successfully!"
|
||||||
|
log "Docker data sent: $container_count containers, $image_count images"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
error "Failed to send Docker data. HTTP Status: $http_code\nResponse: $response_body"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test Docker data collection without sending
|
||||||
|
test_collection() {
|
||||||
|
check_docker
|
||||||
|
|
||||||
|
info "Testing Docker data collection (dry run)..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
local containers=$(collect_containers)
|
||||||
|
local images=$(collect_images)
|
||||||
|
local updates=$(check_image_updates)
|
||||||
|
|
||||||
|
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
|
||||||
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||||
|
echo -e "${GREEN}Docker Data Collection Results${NC}"
|
||||||
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||||
|
echo -e "Containers found: ${GREEN}$container_count${NC}"
|
||||||
|
echo -e "Images found: ${GREEN}$image_count${NC}"
|
||||||
|
echo -e "Updates available: ${YELLOW}$update_count${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if command -v jq &> /dev/null; then
|
||||||
|
echo "━━━ Containers ━━━"
|
||||||
|
echo "$containers" | jq -r '.[] | "\(.name) (\(.status)) - \(.image_name):\(.image_tag)"' | head -10
|
||||||
|
if [[ $container_count -gt 10 ]]; then
|
||||||
|
echo "... and $((container_count - 10)) more"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
echo "━━━ Images ━━━"
|
||||||
|
echo "$images" | jq -r '.[] | "\(.repository):\(.tag) (\(.size_bytes / 1024 / 1024 | floor)MB)"' | head -10
|
||||||
|
if [[ $image_count -gt 10 ]]; then
|
||||||
|
echo "... and $((image_count - 10)) more"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $update_count -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "━━━ Available Updates ━━━"
|
||||||
|
echo "$updates" | jq -r '.[] | "\(.repository):\(.current_tag) → \(.available_tag)"'
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
success "Test collection completed successfully!"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show help
|
||||||
|
show_help() {
|
||||||
|
cat << EOF
|
||||||
|
PatchMon Docker Agent v${AGENT_VERSION}
|
||||||
|
|
||||||
|
This agent collects Docker container and image information and sends it to PatchMon.
|
||||||
|
|
||||||
|
USAGE:
|
||||||
|
$0 <command>
|
||||||
|
|
||||||
|
COMMANDS:
|
||||||
|
collect Collect and send Docker data to PatchMon server
|
||||||
|
test Test Docker data collection without sending (dry run)
|
||||||
|
help Show this help message
|
||||||
|
|
||||||
|
REQUIREMENTS:
|
||||||
|
- Docker must be installed and running
|
||||||
|
- Main PatchMon agent must be configured first
|
||||||
|
- Credentials file must exist at $CREDENTIALS_FILE
|
||||||
|
|
||||||
|
EXAMPLES:
|
||||||
|
# Test collection (dry run)
|
||||||
|
sudo $0 test
|
||||||
|
|
||||||
|
# Collect and send Docker data
|
||||||
|
sudo $0 collect
|
||||||
|
|
||||||
|
SCHEDULING:
|
||||||
|
To run this agent automatically, add a cron job:
|
||||||
|
|
||||||
|
# Run every 5 minutes
|
||||||
|
*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
|
||||||
|
|
||||||
|
# Run every hour
|
||||||
|
0 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
|
||||||
|
|
||||||
|
FILES:
|
||||||
|
Config: $CONFIG_FILE
|
||||||
|
Credentials: $CREDENTIALS_FILE
|
||||||
|
Log: $LOG_FILE
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main function
|
||||||
|
main() {
|
||||||
|
case "$1" in
|
||||||
|
"collect")
|
||||||
|
check_docker
|
||||||
|
load_config
|
||||||
|
send_docker_data
|
||||||
|
;;
|
||||||
|
"test")
|
||||||
|
check_docker
|
||||||
|
load_config
|
||||||
|
test_collection
|
||||||
|
;;
|
||||||
|
"help"|"--help"|"-h"|"")
|
||||||
|
show_help
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
error "Unknown command: $1\n\nRun '$0 help' for usage information."
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon-backend",
|
"name": "patchmon-backend",
|
||||||
"version": "1.2.8",
|
"version": "1.2.9",
|
||||||
"description": "Backend API for Linux Patch Monitoring System",
|
"description": "Backend API for Linux Patch Monitoring System",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"main": "src/server.js",
|
"main": "src/server.js",
|
||||||
@@ -14,14 +14,18 @@
|
|||||||
"db:studio": "prisma studio"
|
"db:studio": "prisma studio"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@bull-board/api": "^6.13.0",
|
||||||
|
"@bull-board/express": "^6.13.0",
|
||||||
"@prisma/client": "^6.1.0",
|
"@prisma/client": "^6.1.0",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
|
"bullmq": "^5.61.0",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"dotenv": "^16.4.7",
|
"dotenv": "^16.4.7",
|
||||||
"express": "^4.21.2",
|
"express": "^5.0.0",
|
||||||
"express-rate-limit": "^7.5.0",
|
"express-rate-limit": "^7.5.0",
|
||||||
"express-validator": "^7.2.0",
|
"express-validator": "^7.2.0",
|
||||||
"helmet": "^8.0.0",
|
"helmet": "^8.0.0",
|
||||||
|
"ioredis": "^5.8.1",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"moment": "^2.30.1",
|
"moment": "^2.30.1",
|
||||||
"qrcode": "^1.5.4",
|
"qrcode": "^1.5.4",
|
||||||
|
|||||||
@@ -0,0 +1,119 @@
|
|||||||
|
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
|
||||||
|
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
|
||||||
|
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
old_migration_exists boolean := false;
|
||||||
|
table_exists boolean := false;
|
||||||
|
failed_migration_exists boolean := false;
|
||||||
|
new_migration_exists boolean := false;
|
||||||
|
BEGIN
|
||||||
|
-- Check if the old migration name exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = 'add_user_sessions'
|
||||||
|
) INTO old_migration_exists;
|
||||||
|
|
||||||
|
-- Check if user_sessions table exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'user_sessions'
|
||||||
|
) INTO table_exists;
|
||||||
|
|
||||||
|
-- Check if there's a failed migration attempt
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL
|
||||||
|
) INTO failed_migration_exists;
|
||||||
|
|
||||||
|
-- Check if the new migration already exists and is successful
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NOT NULL
|
||||||
|
) INTO new_migration_exists;
|
||||||
|
|
||||||
|
-- FIRST: Handle failed migration (must be marked as rolled back)
|
||||||
|
IF failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'Found failed migration attempt - marking as rolled back';
|
||||||
|
|
||||||
|
-- Mark the failed migration as rolled back (required by Prisma)
|
||||||
|
UPDATE _prisma_migrations
|
||||||
|
SET rolled_back_at = NOW()
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL;
|
||||||
|
|
||||||
|
RAISE NOTICE 'Failed migration marked as rolled back';
|
||||||
|
|
||||||
|
-- If table exists, it means the migration partially succeeded
|
||||||
|
IF table_exists THEN
|
||||||
|
RAISE NOTICE 'Table exists - migration was partially successful, will be handled by next migration';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'Table does not exist - migration will retry after rollback';
|
||||||
|
END IF;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- SECOND: Handle old migration name (1.2.7 -> 1.2.8+ upgrade)
|
||||||
|
IF old_migration_exists AND table_exists THEN
|
||||||
|
RAISE NOTICE 'Found 1.2.7 migration "add_user_sessions" - updating to timestamped version';
|
||||||
|
|
||||||
|
-- Update the old migration name to the new timestamped version
|
||||||
|
UPDATE _prisma_migrations
|
||||||
|
SET migration_name = '20251005000000_add_user_sessions'
|
||||||
|
WHERE migration_name = 'add_user_sessions';
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration name updated: add_user_sessions -> 20251005000000_add_user_sessions';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- THIRD: Handle case where table exists but no migration record exists (1.2.7 upgrade scenario)
|
||||||
|
IF table_exists AND NOT old_migration_exists AND NOT new_migration_exists THEN
|
||||||
|
RAISE NOTICE 'Table exists but no migration record found - creating migration record for 1.2.7 upgrade';
|
||||||
|
|
||||||
|
-- Insert a successful migration record for the existing table
|
||||||
|
INSERT INTO _prisma_migrations (
|
||||||
|
id,
|
||||||
|
checksum,
|
||||||
|
finished_at,
|
||||||
|
migration_name,
|
||||||
|
logs,
|
||||||
|
rolled_back_at,
|
||||||
|
started_at,
|
||||||
|
applied_steps_count
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid()::text,
|
||||||
|
'', -- Empty checksum since we're reconciling
|
||||||
|
NOW(),
|
||||||
|
'20251005000000_add_user_sessions',
|
||||||
|
'Reconciled from 1.2.7 - table already exists',
|
||||||
|
NULL,
|
||||||
|
NOW(),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration record created for existing table';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- FOURTH: If we have a rolled back migration and table exists, mark it as applied
|
||||||
|
IF failed_migration_exists AND table_exists THEN
|
||||||
|
RAISE NOTICE 'Migration was rolled back but table exists - marking as successfully applied';
|
||||||
|
|
||||||
|
-- Update the rolled back migration to be successful
|
||||||
|
UPDATE _prisma_migrations
|
||||||
|
SET
|
||||||
|
finished_at = NOW(),
|
||||||
|
rolled_back_at = NULL,
|
||||||
|
logs = 'Reconciled from failed state - table already exists'
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions';
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration marked as successfully applied';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- If no issues found
|
||||||
|
IF NOT old_migration_exists AND NOT failed_migration_exists AND NOT (table_exists AND NOT new_migration_exists) THEN
|
||||||
|
RAISE NOTICE 'No migration reconciliation needed';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
END $$;
|
||||||
@@ -0,0 +1,96 @@
|
|||||||
|
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
|
||||||
|
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
|
||||||
|
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
old_migration_exists boolean := false;
|
||||||
|
table_exists boolean := false;
|
||||||
|
failed_migration_exists boolean := false;
|
||||||
|
BEGIN
|
||||||
|
-- Check if the old migration name exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = 'add_user_sessions'
|
||||||
|
) INTO old_migration_exists;
|
||||||
|
|
||||||
|
-- Check if user_sessions table exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'user_sessions'
|
||||||
|
) INTO table_exists;
|
||||||
|
|
||||||
|
-- Check if there's a failed migration attempt
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL
|
||||||
|
) INTO failed_migration_exists;
|
||||||
|
|
||||||
|
-- Scenario 1: Old migration exists, table exists, no failed migration
|
||||||
|
-- This means 1.2.7 was installed and we need to update the migration name
|
||||||
|
IF old_migration_exists AND table_exists AND NOT failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'Found 1.2.7 migration "add_user_sessions" - updating to timestamped version';
|
||||||
|
|
||||||
|
-- Update the old migration name to the new timestamped version
|
||||||
|
UPDATE _prisma_migrations
|
||||||
|
SET migration_name = '20251005000000_add_user_sessions'
|
||||||
|
WHERE migration_name = 'add_user_sessions';
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration name updated: add_user_sessions -> 20251005000000_add_user_sessions';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Scenario 2: Failed migration exists (upgrade attempt gone wrong)
|
||||||
|
IF failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'Found failed migration attempt - cleaning up';
|
||||||
|
|
||||||
|
-- If table exists, it means the migration partially succeeded
|
||||||
|
IF table_exists THEN
|
||||||
|
RAISE NOTICE 'Table exists - marking migration as applied';
|
||||||
|
|
||||||
|
-- Delete the failed migration record
|
||||||
|
DELETE FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL;
|
||||||
|
|
||||||
|
-- Insert a successful migration record
|
||||||
|
INSERT INTO _prisma_migrations (
|
||||||
|
id,
|
||||||
|
checksum,
|
||||||
|
finished_at,
|
||||||
|
migration_name,
|
||||||
|
logs,
|
||||||
|
rolled_back_at,
|
||||||
|
started_at,
|
||||||
|
applied_steps_count
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid()::text,
|
||||||
|
'', -- Empty checksum since we're reconciling
|
||||||
|
NOW(),
|
||||||
|
'20251005000000_add_user_sessions',
|
||||||
|
NULL,
|
||||||
|
NULL,
|
||||||
|
NOW(),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration marked as successfully applied';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'Table does not exist - removing failed migration to allow retry';
|
||||||
|
|
||||||
|
-- Just delete the failed migration to allow it to retry
|
||||||
|
DELETE FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL;
|
||||||
|
|
||||||
|
RAISE NOTICE 'Failed migration removed - will retry on next migration run';
|
||||||
|
END IF;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Scenario 3: Everything is clean (fresh install or already reconciled)
|
||||||
|
IF NOT old_migration_exists AND NOT failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'No migration reconciliation needed';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
END $$;
|
||||||
@@ -0,0 +1,94 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_images" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"repository" TEXT NOT NULL,
|
||||||
|
"tag" TEXT NOT NULL DEFAULT 'latest',
|
||||||
|
"image_id" TEXT NOT NULL,
|
||||||
|
"digest" TEXT,
|
||||||
|
"size_bytes" BIGINT,
|
||||||
|
"source" TEXT NOT NULL DEFAULT 'docker-hub',
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"last_pulled" TIMESTAMP(3),
|
||||||
|
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_images_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_containers" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"host_id" TEXT NOT NULL,
|
||||||
|
"container_id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"image_id" TEXT,
|
||||||
|
"image_name" TEXT NOT NULL,
|
||||||
|
"image_tag" TEXT NOT NULL DEFAULT 'latest',
|
||||||
|
"status" TEXT NOT NULL,
|
||||||
|
"state" TEXT,
|
||||||
|
"ports" JSONB,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"started_at" TIMESTAMP(3),
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_containers_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_image_updates" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"image_id" TEXT NOT NULL,
|
||||||
|
"current_tag" TEXT NOT NULL,
|
||||||
|
"available_tag" TEXT NOT NULL,
|
||||||
|
"is_security_update" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"severity" TEXT,
|
||||||
|
"changelog_url" TEXT,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_image_updates_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_images_repository_idx" ON "docker_images"("repository");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_images_source_idx" ON "docker_images"("source");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_images_repository_tag_idx" ON "docker_images"("repository", "tag");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_images_repository_tag_image_id_key" ON "docker_images"("repository", "tag", "image_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_host_id_idx" ON "docker_containers"("host_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_image_id_idx" ON "docker_containers"("image_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_status_idx" ON "docker_containers"("status");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_name_idx" ON "docker_containers"("name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_containers_host_id_container_id_key" ON "docker_containers"("host_id", "container_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_image_updates_image_id_idx" ON "docker_image_updates"("image_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_image_updates_is_security_update_idx" ON "docker_image_updates"("is_security_update");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_image_updates_image_id_available_tag_key" ON "docker_image_updates"("image_id", "available_tag");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "docker_containers" ADD CONSTRAINT "docker_containers_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "docker_image_updates" ADD CONSTRAINT "docker_image_updates_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
@@ -262,3 +262,65 @@ model auto_enrollment_tokens {
|
|||||||
@@index([token_key])
|
@@index([token_key])
|
||||||
@@index([is_active])
|
@@index([is_active])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model docker_containers {
|
||||||
|
id String @id
|
||||||
|
host_id String
|
||||||
|
container_id String
|
||||||
|
name String
|
||||||
|
image_id String?
|
||||||
|
image_name String
|
||||||
|
image_tag String @default("latest")
|
||||||
|
status String
|
||||||
|
state String?
|
||||||
|
ports Json?
|
||||||
|
created_at DateTime
|
||||||
|
started_at DateTime?
|
||||||
|
updated_at DateTime
|
||||||
|
last_checked DateTime @default(now())
|
||||||
|
docker_images docker_images? @relation(fields: [image_id], references: [id], onDelete: SetNull)
|
||||||
|
|
||||||
|
@@unique([host_id, container_id])
|
||||||
|
@@index([host_id])
|
||||||
|
@@index([image_id])
|
||||||
|
@@index([status])
|
||||||
|
@@index([name])
|
||||||
|
}
|
||||||
|
|
||||||
|
model docker_images {
|
||||||
|
id String @id
|
||||||
|
repository String
|
||||||
|
tag String @default("latest")
|
||||||
|
image_id String
|
||||||
|
digest String?
|
||||||
|
size_bytes BigInt?
|
||||||
|
source String @default("docker-hub")
|
||||||
|
created_at DateTime
|
||||||
|
last_pulled DateTime?
|
||||||
|
last_checked DateTime @default(now())
|
||||||
|
updated_at DateTime
|
||||||
|
docker_containers docker_containers[]
|
||||||
|
docker_image_updates docker_image_updates[]
|
||||||
|
|
||||||
|
@@unique([repository, tag, image_id])
|
||||||
|
@@index([repository])
|
||||||
|
@@index([source])
|
||||||
|
@@index([repository, tag])
|
||||||
|
}
|
||||||
|
|
||||||
|
model docker_image_updates {
|
||||||
|
id String @id
|
||||||
|
image_id String
|
||||||
|
current_tag String
|
||||||
|
available_tag String
|
||||||
|
is_security_update Boolean @default(false)
|
||||||
|
severity String?
|
||||||
|
changelog_url String?
|
||||||
|
created_at DateTime @default(now())
|
||||||
|
updated_at DateTime
|
||||||
|
docker_images docker_images @relation(fields: [image_id], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([image_id, available_tag])
|
||||||
|
@@index([image_id])
|
||||||
|
@@index([is_security_update])
|
||||||
|
}
|
||||||
|
|||||||
362
backend/src/routes/automationRoutes.js
Normal file
362
backend/src/routes/automationRoutes.js
Normal file
@@ -0,0 +1,362 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Get all queue statistics
|
||||||
|
router.get("/stats", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await queueManager.getAllQueueStats();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: stats,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching queue stats:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch queue statistics",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get specific queue statistics
|
||||||
|
router.get("/stats/:queueName", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { queueName } = req.params;
|
||||||
|
|
||||||
|
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: "Invalid queue name",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = await queueManager.getQueueStats(queueName);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: stats,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching queue stats:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch queue statistics",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get recent jobs for a queue
|
||||||
|
router.get("/jobs/:queueName", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { queueName } = req.params;
|
||||||
|
const { limit = 10 } = req.query;
|
||||||
|
|
||||||
|
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: "Invalid queue name",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobs = await queueManager.getRecentJobs(queueName, parseInt(limit));
|
||||||
|
|
||||||
|
// Format jobs for frontend
|
||||||
|
const formattedJobs = jobs.map((job) => ({
|
||||||
|
id: job.id,
|
||||||
|
name: job.name,
|
||||||
|
status: job.finishedOn
|
||||||
|
? job.failedReason
|
||||||
|
? "failed"
|
||||||
|
: "completed"
|
||||||
|
: "active",
|
||||||
|
progress: job.progress,
|
||||||
|
data: job.data,
|
||||||
|
returnvalue: job.returnvalue,
|
||||||
|
failedReason: job.failedReason,
|
||||||
|
processedOn: job.processedOn,
|
||||||
|
finishedOn: job.finishedOn,
|
||||||
|
createdAt: new Date(job.timestamp),
|
||||||
|
attemptsMade: job.attemptsMade,
|
||||||
|
delay: job.delay,
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: formattedJobs,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching recent jobs:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch recent jobs",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Trigger manual GitHub update check
|
||||||
|
router.post("/trigger/github-update", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerGitHubUpdateCheck();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "GitHub update check triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering GitHub update check:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger GitHub update check",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Trigger manual session cleanup
|
||||||
|
router.post("/trigger/session-cleanup", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerSessionCleanup();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Session cleanup triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering session cleanup:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger session cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Trigger manual echo hello
|
||||||
|
router.post("/trigger/echo-hello", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { message } = req.body;
|
||||||
|
const job = await queueManager.triggerEchoHello(message);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Echo hello triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering echo hello:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger echo hello",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Trigger manual orphaned repo cleanup
|
||||||
|
router.post(
|
||||||
|
"/trigger/orphaned-repo-cleanup",
|
||||||
|
authenticateToken,
|
||||||
|
async (req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerOrphanedRepoCleanup();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Orphaned repository cleanup triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering orphaned repository cleanup:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger orphaned repository cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get queue health status
|
||||||
|
router.get("/health", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await queueManager.getAllQueueStats();
|
||||||
|
const totalJobs = Object.values(stats).reduce((sum, queueStats) => {
|
||||||
|
return sum + queueStats.waiting + queueStats.active + queueStats.failed;
|
||||||
|
}, 0);
|
||||||
|
|
||||||
|
const health = {
|
||||||
|
status: "healthy",
|
||||||
|
totalJobs,
|
||||||
|
queues: Object.keys(stats).length,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check for unhealthy conditions
|
||||||
|
if (totalJobs > 1000) {
|
||||||
|
health.status = "warning";
|
||||||
|
health.message = "High number of queued jobs";
|
||||||
|
}
|
||||||
|
|
||||||
|
const failedJobs = Object.values(stats).reduce((sum, queueStats) => {
|
||||||
|
return sum + queueStats.failed;
|
||||||
|
}, 0);
|
||||||
|
|
||||||
|
if (failedJobs > 10) {
|
||||||
|
health.status = "error";
|
||||||
|
health.message = "High number of failed jobs";
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: health,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error checking queue health:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to check queue health",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get automation overview (for dashboard cards)
|
||||||
|
router.get("/overview", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await queueManager.getAllQueueStats();
|
||||||
|
|
||||||
|
// Get recent jobs for each queue to show last run times
|
||||||
|
const recentJobs = await Promise.all([
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.GITHUB_UPDATE_CHECK, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.ECHO_HELLO, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Calculate overview metrics
|
||||||
|
const overview = {
|
||||||
|
scheduledTasks:
|
||||||
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
|
||||||
|
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
|
||||||
|
stats[QUEUE_NAMES.SYSTEM_MAINTENANCE].delayed +
|
||||||
|
stats[QUEUE_NAMES.ECHO_HELLO].delayed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed,
|
||||||
|
|
||||||
|
runningTasks:
|
||||||
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
|
||||||
|
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
|
||||||
|
stats[QUEUE_NAMES.SYSTEM_MAINTENANCE].active +
|
||||||
|
stats[QUEUE_NAMES.ECHO_HELLO].active +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active,
|
||||||
|
|
||||||
|
failedTasks:
|
||||||
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
|
||||||
|
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
|
||||||
|
stats[QUEUE_NAMES.SYSTEM_MAINTENANCE].failed +
|
||||||
|
stats[QUEUE_NAMES.ECHO_HELLO].failed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed,
|
||||||
|
|
||||||
|
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
|
||||||
|
return (
|
||||||
|
sum +
|
||||||
|
queueStats.completed +
|
||||||
|
queueStats.failed +
|
||||||
|
queueStats.active +
|
||||||
|
queueStats.waiting +
|
||||||
|
queueStats.delayed
|
||||||
|
);
|
||||||
|
}, 0),
|
||||||
|
|
||||||
|
// Automation details with last run times
|
||||||
|
automations: [
|
||||||
|
{
|
||||||
|
name: "GitHub Update Check",
|
||||||
|
queue: QUEUE_NAMES.GITHUB_UPDATE_CHECK,
|
||||||
|
description: "Checks for new PatchMon releases",
|
||||||
|
schedule: "Daily at midnight",
|
||||||
|
lastRun: recentJobs[0][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[0][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[0][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[0][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[0][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Session Cleanup",
|
||||||
|
queue: QUEUE_NAMES.SESSION_CLEANUP,
|
||||||
|
description: "Cleans up expired user sessions",
|
||||||
|
schedule: "Every hour",
|
||||||
|
lastRun: recentJobs[1][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[1][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[1][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[1][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[1][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.SESSION_CLEANUP],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Echo Hello",
|
||||||
|
queue: QUEUE_NAMES.ECHO_HELLO,
|
||||||
|
description: "Simple test automation task",
|
||||||
|
schedule: "Manual only",
|
||||||
|
lastRun: recentJobs[2][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[2][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[2][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[2][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[2][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.ECHO_HELLO],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Orphaned Repo Cleanup",
|
||||||
|
queue: QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
|
||||||
|
description: "Removes repositories with no associated hosts",
|
||||||
|
schedule: "Daily at 2 AM",
|
||||||
|
lastRun: recentJobs[3][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[3][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[3][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[3][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[3][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
|
||||||
|
},
|
||||||
|
].sort((a, b) => {
|
||||||
|
// Sort by last run timestamp (most recent first)
|
||||||
|
// If both have never run (timestamp 0), maintain original order
|
||||||
|
if (a.lastRunTimestamp === 0 && b.lastRunTimestamp === 0) return 0;
|
||||||
|
if (a.lastRunTimestamp === 0) return 1; // Never run goes to bottom
|
||||||
|
if (b.lastRunTimestamp === 0) return -1; // Never run goes to bottom
|
||||||
|
return b.lastRunTimestamp - a.lastRunTimestamp; // Most recent first
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: overview,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching automation overview:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch automation overview",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
779
backend/src/routes/dockerRoutes.js
Normal file
779
backend/src/routes/dockerRoutes.js
Normal file
@@ -0,0 +1,779 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
const { PrismaClient } = require("@prisma/client");
|
||||||
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Helper function to convert BigInt fields to strings for JSON serialization
|
||||||
|
const convertBigIntToString = (obj) => {
|
||||||
|
if (obj === null || obj === undefined) return obj;
|
||||||
|
|
||||||
|
if (typeof obj === "bigint") {
|
||||||
|
return obj.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(obj)) {
|
||||||
|
return obj.map(convertBigIntToString);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof obj === "object") {
|
||||||
|
const converted = {};
|
||||||
|
for (const key in obj) {
|
||||||
|
converted[key] = convertBigIntToString(obj[key]);
|
||||||
|
}
|
||||||
|
return converted;
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
};
|
||||||
|
|
||||||
|
// GET /api/v1/docker/dashboard - Get Docker dashboard statistics
|
||||||
|
router.get("/dashboard", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
// Get total hosts with Docker containers
|
||||||
|
const hostsWithDocker = await prisma.docker_containers.groupBy({
|
||||||
|
by: ["host_id"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total containers
|
||||||
|
const totalContainers = await prisma.docker_containers.count();
|
||||||
|
|
||||||
|
// Get running containers
|
||||||
|
const runningContainers = await prisma.docker_containers.count({
|
||||||
|
where: { status: "running" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total images
|
||||||
|
const totalImages = await prisma.docker_images.count();
|
||||||
|
|
||||||
|
// Get available updates
|
||||||
|
const availableUpdates = await prisma.docker_image_updates.count();
|
||||||
|
|
||||||
|
// Get containers by status
|
||||||
|
const containersByStatus = await prisma.docker_containers.groupBy({
|
||||||
|
by: ["status"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get images by source
|
||||||
|
const imagesBySource = await prisma.docker_images.groupBy({
|
||||||
|
by: ["source"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
stats: {
|
||||||
|
totalHostsWithDocker: hostsWithDocker.length,
|
||||||
|
totalContainers,
|
||||||
|
runningContainers,
|
||||||
|
totalImages,
|
||||||
|
availableUpdates,
|
||||||
|
},
|
||||||
|
containersByStatus,
|
||||||
|
imagesBySource,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching Docker dashboard:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch Docker dashboard" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/containers - Get all containers with filters
|
||||||
|
router.get("/containers", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { status, hostId, imageId, search, page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (status) where.status = status;
|
||||||
|
if (hostId) where.host_id = hostId;
|
||||||
|
if (imageId) where.image_id = imageId;
|
||||||
|
if (search) {
|
||||||
|
where.OR = [
|
||||||
|
{ name: { contains: search, mode: "insensitive" } },
|
||||||
|
{ image_name: { contains: search, mode: "insensitive" } },
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [containers, total] = await Promise.all([
|
||||||
|
prisma.docker_containers.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
docker_images: true,
|
||||||
|
},
|
||||||
|
orderBy: { updated_at: "desc" },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_containers.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get host information for each container
|
||||||
|
const hostIds = [...new Set(containers.map((c) => c.host_id))];
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
select: { id: true, friendly_name: true, hostname: true, ip: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const hostsMap = hosts.reduce((acc, host) => {
|
||||||
|
acc[host.id] = host;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
const containersWithHosts = containers.map((container) => ({
|
||||||
|
...container,
|
||||||
|
host: hostsMap[container.host_id],
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
containers: containersWithHosts,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching containers:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch containers" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/containers/:id - Get container detail
|
||||||
|
router.get("/containers/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const container = await prisma.docker_containers.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
docker_images: {
|
||||||
|
include: {
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!container) {
|
||||||
|
return res.status(404).json({ error: "Container not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get host information
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id: container.host_id },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
friendly_name: true,
|
||||||
|
hostname: true,
|
||||||
|
ip: true,
|
||||||
|
os_type: true,
|
||||||
|
os_version: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get other containers using the same image
|
||||||
|
const similarContainers = await prisma.docker_containers.findMany({
|
||||||
|
where: {
|
||||||
|
image_id: container.image_id,
|
||||||
|
id: { not: id },
|
||||||
|
},
|
||||||
|
take: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
container: {
|
||||||
|
...container,
|
||||||
|
host,
|
||||||
|
},
|
||||||
|
similarContainers,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching container detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch container detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/images - Get all images with filters
|
||||||
|
router.get("/images", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { source, search, page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (source) where.source = source;
|
||||||
|
if (search) {
|
||||||
|
where.OR = [
|
||||||
|
{ repository: { contains: search, mode: "insensitive" } },
|
||||||
|
{ tag: { contains: search, mode: "insensitive" } },
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [images, total] = await Promise.all([
|
||||||
|
prisma.docker_images.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
docker_containers: true,
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
docker_image_updates: {
|
||||||
|
take: 1,
|
||||||
|
orderBy: { created_at: "desc" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { updated_at: "desc" },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_images.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get unique hosts using each image
|
||||||
|
const imagesWithHosts = await Promise.all(
|
||||||
|
images.map(async (image) => {
|
||||||
|
const containers = await prisma.docker_containers.findMany({
|
||||||
|
where: { image_id: image.id },
|
||||||
|
select: { host_id: true },
|
||||||
|
distinct: ["host_id"],
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
...image,
|
||||||
|
hostsCount: containers.length,
|
||||||
|
hasUpdates: image._count.docker_image_updates > 0,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
images: imagesWithHosts,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching images:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch images" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/images/:id - Get image detail
|
||||||
|
router.get("/images/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const image = await prisma.docker_images.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
docker_containers: {
|
||||||
|
take: 100,
|
||||||
|
},
|
||||||
|
docker_image_updates: {
|
||||||
|
orderBy: { created_at: "desc" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!image) {
|
||||||
|
return res.status(404).json({ error: "Image not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get unique hosts using this image
|
||||||
|
const hostIds = [...new Set(image.docker_containers.map((c) => c.host_id))];
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
select: { id: true, friendly_name: true, hostname: true, ip: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
image,
|
||||||
|
hosts,
|
||||||
|
totalContainers: image.docker_containers.length,
|
||||||
|
totalHosts: hosts.length,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching image detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch image detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/hosts - Get all hosts with Docker
|
||||||
|
router.get("/hosts", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
// Get hosts that have Docker containers
|
||||||
|
const hostsWithContainers = await prisma.docker_containers.groupBy({
|
||||||
|
by: ["host_id"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hostIds = hostsWithContainers.map((h) => h.host_id);
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
orderBy: { friendly_name: "asc" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get container counts and statuses for each host
|
||||||
|
const hostsWithStats = await Promise.all(
|
||||||
|
hosts.map(async (host) => {
|
||||||
|
const [totalContainers, runningContainers, totalImages] =
|
||||||
|
await Promise.all([
|
||||||
|
prisma.docker_containers.count({
|
||||||
|
where: { host_id: host.id },
|
||||||
|
}),
|
||||||
|
prisma.docker_containers.count({
|
||||||
|
where: { host_id: host.id, status: "running" },
|
||||||
|
}),
|
||||||
|
prisma.docker_containers.findMany({
|
||||||
|
where: { host_id: host.id },
|
||||||
|
select: { image_id: true },
|
||||||
|
distinct: ["image_id"],
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...host,
|
||||||
|
dockerStats: {
|
||||||
|
totalContainers,
|
||||||
|
runningContainers,
|
||||||
|
totalImages: totalImages.length,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
hosts: hostsWithStats,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total: hostIds.length,
|
||||||
|
totalPages: Math.ceil(hostIds.length / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching Docker hosts:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch Docker hosts" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/hosts/:id - Get host Docker detail
|
||||||
|
router.get("/hosts/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(404).json({ error: "Host not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get containers on this host
|
||||||
|
const containers = await prisma.docker_containers.findMany({
|
||||||
|
where: { host_id: id },
|
||||||
|
include: {
|
||||||
|
docker_images: {
|
||||||
|
include: {
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { name: "asc" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get unique images on this host
|
||||||
|
const imageIds = [...new Set(containers.map((c) => c.image_id))].filter(
|
||||||
|
Boolean,
|
||||||
|
);
|
||||||
|
const images = await prisma.docker_images.findMany({
|
||||||
|
where: { id: { in: imageIds } },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get container statistics
|
||||||
|
const runningContainers = containers.filter(
|
||||||
|
(c) => c.status === "running",
|
||||||
|
).length;
|
||||||
|
const stoppedContainers = containers.filter(
|
||||||
|
(c) => c.status === "exited" || c.status === "stopped",
|
||||||
|
).length;
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
host,
|
||||||
|
containers,
|
||||||
|
images,
|
||||||
|
stats: {
|
||||||
|
totalContainers: containers.length,
|
||||||
|
runningContainers,
|
||||||
|
stoppedContainers,
|
||||||
|
totalImages: images.length,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching host Docker detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch host Docker detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/updates - Get available updates
|
||||||
|
router.get("/updates", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { page = 1, limit = 50, securityOnly = false } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (securityOnly === "true") {
|
||||||
|
where.is_security_update = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [updates, total] = await Promise.all([
|
||||||
|
prisma.docker_image_updates.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
docker_images: {
|
||||||
|
include: {
|
||||||
|
docker_containers: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
host_id: true,
|
||||||
|
name: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: [{ is_security_update: "desc" }, { created_at: "desc" }],
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_image_updates.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get affected hosts for each update
|
||||||
|
const updatesWithHosts = await Promise.all(
|
||||||
|
updates.map(async (update) => {
|
||||||
|
const hostIds = [
|
||||||
|
...new Set(
|
||||||
|
update.docker_images.docker_containers.map((c) => c.host_id),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
select: { id: true, friendly_name: true, hostname: true },
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
...update,
|
||||||
|
affectedHosts: hosts,
|
||||||
|
affectedContainersCount:
|
||||||
|
update.docker_images.docker_containers.length,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
updates: updatesWithHosts,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching Docker updates:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch Docker updates" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/v1/docker/collect - Collect Docker data from agent
|
||||||
|
router.post("/collect", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId, apiKey, containers, images, updates } = req.body;
|
||||||
|
|
||||||
|
// Validate API credentials
|
||||||
|
const host = await prisma.hosts.findFirst({
|
||||||
|
where: { api_id: apiId, api_key: apiKey },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(401).json({ error: "Invalid API credentials" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Helper function to validate and parse dates
|
||||||
|
const parseDate = (dateString) => {
|
||||||
|
if (!dateString) return now;
|
||||||
|
const date = new Date(dateString);
|
||||||
|
return Number.isNaN(date.getTime()) ? now : date;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Process containers
|
||||||
|
if (containers && Array.isArray(containers)) {
|
||||||
|
for (const containerData of containers) {
|
||||||
|
const containerId = uuidv4();
|
||||||
|
|
||||||
|
// Find or create image
|
||||||
|
let imageId = null;
|
||||||
|
if (containerData.image_repository && containerData.image_tag) {
|
||||||
|
const image = await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
source: containerData.image_source || "docker-hub",
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
imageId = image.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert container
|
||||||
|
await prisma.docker_containers.upsert({
|
||||||
|
where: {
|
||||||
|
host_id_container_id: {
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
last_checked: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: containerId,
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process standalone images
|
||||||
|
if (images && Array.isArray(images)) {
|
||||||
|
for (const imageData of images) {
|
||||||
|
await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
digest: imageData.digest,
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
source: imageData.source || "docker-hub",
|
||||||
|
created_at: parseDate(imageData.created_at),
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process updates
|
||||||
|
// First, get all images for this host to clean up old updates
|
||||||
|
const hostImageIds = await prisma.docker_containers
|
||||||
|
.findMany({
|
||||||
|
where: { host_id: host.id },
|
||||||
|
select: { image_id: true },
|
||||||
|
distinct: ["image_id"],
|
||||||
|
})
|
||||||
|
.then((results) => results.map((r) => r.image_id).filter(Boolean));
|
||||||
|
|
||||||
|
// Delete old updates for images on this host that are no longer reported
|
||||||
|
if (hostImageIds.length > 0) {
|
||||||
|
const reportedImageIds = [];
|
||||||
|
|
||||||
|
// Process new updates
|
||||||
|
if (updates && Array.isArray(updates)) {
|
||||||
|
for (const updateData of updates) {
|
||||||
|
// Find the image by repository, tag, and image_id
|
||||||
|
const image = await prisma.docker_images.findFirst({
|
||||||
|
where: {
|
||||||
|
repository: updateData.repository,
|
||||||
|
tag: updateData.current_tag,
|
||||||
|
image_id: updateData.image_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (image) {
|
||||||
|
reportedImageIds.push(image.id);
|
||||||
|
|
||||||
|
// Store digest info in changelog_url field as JSON for now
|
||||||
|
const digestInfo = JSON.stringify({
|
||||||
|
method: "digest_comparison",
|
||||||
|
current_digest: updateData.current_digest,
|
||||||
|
available_digest: updateData.available_digest,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upsert the update record
|
||||||
|
await prisma.docker_image_updates.upsert({
|
||||||
|
where: {
|
||||||
|
image_id_available_tag: {
|
||||||
|
image_id: image.id,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
updated_at: now,
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
severity: "digest_changed",
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
image_id: image.id,
|
||||||
|
current_tag: updateData.current_tag,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
severity: "digest_changed",
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove stale updates for images on this host that are no longer in the updates list
|
||||||
|
const imageIdsToCleanup = hostImageIds.filter(
|
||||||
|
(id) => !reportedImageIds.includes(id),
|
||||||
|
);
|
||||||
|
if (imageIdsToCleanup.length > 0) {
|
||||||
|
await prisma.docker_image_updates.deleteMany({
|
||||||
|
where: {
|
||||||
|
image_id: { in: imageIdsToCleanup },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({ success: true, message: "Docker data collected successfully" });
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error collecting Docker data:", error);
|
||||||
|
console.error("Error stack:", error.stack);
|
||||||
|
console.error("Request body:", JSON.stringify(req.body, null, 2));
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to collect Docker data",
|
||||||
|
message: error.message,
|
||||||
|
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/agent - Serve the Docker agent installation script
|
||||||
|
router.get("/agent", async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const fs = require("node:fs");
|
||||||
|
const path = require("node:path");
|
||||||
|
const agentPath = path.join(
|
||||||
|
__dirname,
|
||||||
|
"../../..",
|
||||||
|
"agents",
|
||||||
|
"patchmon-docker-agent.sh",
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if file exists
|
||||||
|
if (!fs.existsSync(agentPath)) {
|
||||||
|
return res.status(404).json({ error: "Docker agent script not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read and serve the file
|
||||||
|
const agentScript = fs.readFileSync(agentPath, "utf8");
|
||||||
|
res.setHeader("Content-Type", "text/x-shellscript");
|
||||||
|
res.setHeader(
|
||||||
|
"Content-Disposition",
|
||||||
|
'inline; filename="patchmon-docker-agent.sh"',
|
||||||
|
);
|
||||||
|
res.send(agentScript);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error serving Docker agent:", error);
|
||||||
|
res.status(500).json({ error: "Failed to serve Docker agent script" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
236
backend/src/routes/gethomepageRoutes.js
Normal file
236
backend/src/routes/gethomepageRoutes.js
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { createPrismaClient } = require("../config/database");
|
||||||
|
const bcrypt = require("bcryptjs");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
const prisma = createPrismaClient();
|
||||||
|
|
||||||
|
// Middleware to authenticate API key
|
||||||
|
const authenticateApiKey = async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
|
||||||
|
if (!authHeader || !authHeader.startsWith("Basic ")) {
|
||||||
|
return res
|
||||||
|
.status(401)
|
||||||
|
.json({ error: "Missing or invalid authorization header" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode base64 credentials
|
||||||
|
const base64Credentials = authHeader.split(" ")[1];
|
||||||
|
const credentials = Buffer.from(base64Credentials, "base64").toString(
|
||||||
|
"ascii",
|
||||||
|
);
|
||||||
|
const [apiKey, apiSecret] = credentials.split(":");
|
||||||
|
|
||||||
|
if (!apiKey || !apiSecret) {
|
||||||
|
return res.status(401).json({ error: "Invalid credentials format" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the token in database
|
||||||
|
const token = await prisma.auto_enrollment_tokens.findUnique({
|
||||||
|
where: { token_key: apiKey },
|
||||||
|
include: {
|
||||||
|
users: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
username: true,
|
||||||
|
role: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
console.log(`API key not found: ${apiKey}`);
|
||||||
|
return res.status(401).json({ error: "Invalid API key" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token is active
|
||||||
|
if (!token.is_active) {
|
||||||
|
return res.status(401).json({ error: "API key is disabled" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token has expired
|
||||||
|
if (token.expires_at && new Date(token.expires_at) < new Date()) {
|
||||||
|
return res.status(401).json({ error: "API key has expired" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token is for gethomepage integration
|
||||||
|
if (token.metadata?.integration_type !== "gethomepage") {
|
||||||
|
return res.status(401).json({ error: "Invalid API key type" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the secret
|
||||||
|
const isValidSecret = await bcrypt.compare(apiSecret, token.token_secret);
|
||||||
|
if (!isValidSecret) {
|
||||||
|
return res.status(401).json({ error: "Invalid API secret" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check IP restrictions if any
|
||||||
|
if (token.allowed_ip_ranges && token.allowed_ip_ranges.length > 0) {
|
||||||
|
const clientIp = req.ip || req.connection.remoteAddress;
|
||||||
|
const forwardedFor = req.headers["x-forwarded-for"];
|
||||||
|
const realIp = req.headers["x-real-ip"];
|
||||||
|
|
||||||
|
// Get the actual client IP (considering proxies)
|
||||||
|
const actualClientIp = forwardedFor
|
||||||
|
? forwardedFor.split(",")[0].trim()
|
||||||
|
: realIp || clientIp;
|
||||||
|
|
||||||
|
const isAllowedIp = token.allowed_ip_ranges.some((range) => {
|
||||||
|
// Simple IP range check (can be enhanced for CIDR support)
|
||||||
|
return actualClientIp.startsWith(range) || actualClientIp === range;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!isAllowedIp) {
|
||||||
|
console.log(
|
||||||
|
`IP validation failed. Client IP: ${actualClientIp}, Allowed ranges: ${token.allowed_ip_ranges.join(", ")}`,
|
||||||
|
);
|
||||||
|
return res.status(403).json({ error: "IP address not allowed" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update last used timestamp
|
||||||
|
await prisma.auto_enrollment_tokens.update({
|
||||||
|
where: { id: token.id },
|
||||||
|
data: { last_used_at: new Date() },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Attach token info to request
|
||||||
|
req.apiToken = token;
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
console.error("API key authentication error:", error);
|
||||||
|
res.status(500).json({ error: "Authentication failed" });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get homepage widget statistics
|
||||||
|
router.get("/stats", authenticateApiKey, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
// Get total hosts count
|
||||||
|
const totalHosts = await prisma.hosts.count({
|
||||||
|
where: { status: "active" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total outdated packages count
|
||||||
|
const totalOutdatedPackages = await prisma.host_packages.count({
|
||||||
|
where: { needs_update: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total repositories count
|
||||||
|
const totalRepos = await prisma.repositories.count({
|
||||||
|
where: { is_active: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get hosts that need updates (have outdated packages)
|
||||||
|
const hostsNeedingUpdates = await prisma.hosts.count({
|
||||||
|
where: {
|
||||||
|
status: "active",
|
||||||
|
host_packages: {
|
||||||
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get security updates count
|
||||||
|
const securityUpdates = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
needs_update: true,
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get hosts with security updates
|
||||||
|
const hostsWithSecurityUpdates = await prisma.hosts.count({
|
||||||
|
where: {
|
||||||
|
status: "active",
|
||||||
|
host_packages: {
|
||||||
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get up-to-date hosts count
|
||||||
|
const upToDateHosts = totalHosts - hostsNeedingUpdates;
|
||||||
|
|
||||||
|
// Get recent update activity (last 24 hours)
|
||||||
|
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||||
|
const recentUpdates = await prisma.update_history.count({
|
||||||
|
where: {
|
||||||
|
timestamp: {
|
||||||
|
gte: oneDayAgo,
|
||||||
|
},
|
||||||
|
status: "success",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get OS distribution
|
||||||
|
const osDistribution = await prisma.hosts.groupBy({
|
||||||
|
by: ["os_type"],
|
||||||
|
where: { status: "active" },
|
||||||
|
_count: {
|
||||||
|
id: true,
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
_count: {
|
||||||
|
id: "desc",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Format OS distribution data
|
||||||
|
const osDistributionFormatted = osDistribution.map((os) => ({
|
||||||
|
name: os.os_type,
|
||||||
|
count: os._count.id,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Extract top 3 OS types for flat display in widgets
|
||||||
|
const top_os_1 = osDistributionFormatted[0] || { name: "None", count: 0 };
|
||||||
|
const top_os_2 = osDistributionFormatted[1] || { name: "None", count: 0 };
|
||||||
|
const top_os_3 = osDistributionFormatted[2] || { name: "None", count: 0 };
|
||||||
|
|
||||||
|
// Prepare response data
|
||||||
|
const stats = {
|
||||||
|
total_hosts: totalHosts,
|
||||||
|
total_outdated_packages: totalOutdatedPackages,
|
||||||
|
total_repos: totalRepos,
|
||||||
|
hosts_needing_updates: hostsNeedingUpdates,
|
||||||
|
up_to_date_hosts: upToDateHosts,
|
||||||
|
security_updates: securityUpdates,
|
||||||
|
hosts_with_security_updates: hostsWithSecurityUpdates,
|
||||||
|
recent_updates_24h: recentUpdates,
|
||||||
|
os_distribution: osDistributionFormatted,
|
||||||
|
// Flattened OS data for easy widget display
|
||||||
|
top_os_1_name: top_os_1.name,
|
||||||
|
top_os_1_count: top_os_1.count,
|
||||||
|
top_os_2_name: top_os_2.name,
|
||||||
|
top_os_2_count: top_os_2.count,
|
||||||
|
top_os_3_name: top_os_3.name,
|
||||||
|
top_os_3_count: top_os_3.count,
|
||||||
|
last_updated: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
res.json(stats);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching homepage stats:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch statistics" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Health check endpoint for the API
|
||||||
|
router.get("/health", authenticateApiKey, async (req, res) => {
|
||||||
|
res.json({
|
||||||
|
status: "ok",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
api_key: req.apiToken.token_name,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -390,7 +390,8 @@ router.post(
|
|||||||
const totalPackages = packages.length;
|
const totalPackages = packages.length;
|
||||||
|
|
||||||
// Process everything in a single transaction to avoid race conditions
|
// Process everything in a single transaction to avoid race conditions
|
||||||
await prisma.$transaction(async (tx) => {
|
await prisma.$transaction(
|
||||||
|
async (tx) => {
|
||||||
// Update host data
|
// Update host data
|
||||||
await tx.hosts.update({
|
await tx.hosts.update({
|
||||||
where: { id: host.id },
|
where: { id: host.id },
|
||||||
@@ -402,43 +403,74 @@ router.post(
|
|||||||
where: { host_id: host.id },
|
where: { host_id: host.id },
|
||||||
});
|
});
|
||||||
|
|
||||||
// Process each package
|
// Process packages in batches using createMany/updateMany
|
||||||
for (const packageData of packages) {
|
const packagesToCreate = [];
|
||||||
// Find or create package
|
const packagesToUpdate = [];
|
||||||
let pkg = await tx.packages.findUnique({
|
const hostPackagesToUpsert = [];
|
||||||
where: { name: packageData.name },
|
|
||||||
|
// First pass: identify what needs to be created/updated
|
||||||
|
const existingPackages = await tx.packages.findMany({
|
||||||
|
where: {
|
||||||
|
name: { in: packages.map((p) => p.name) },
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!pkg) {
|
const existingPackageMap = new Map(
|
||||||
pkg = await tx.packages.create({
|
existingPackages.map((p) => [p.name, p]),
|
||||||
data: {
|
);
|
||||||
|
|
||||||
|
for (const packageData of packages) {
|
||||||
|
const existingPkg = existingPackageMap.get(packageData.name);
|
||||||
|
|
||||||
|
if (!existingPkg) {
|
||||||
|
// Package doesn't exist, create it
|
||||||
|
const newPkg = {
|
||||||
id: uuidv4(),
|
id: uuidv4(),
|
||||||
name: packageData.name,
|
name: packageData.name,
|
||||||
description: packageData.description || null,
|
description: packageData.description || null,
|
||||||
category: packageData.category || null,
|
category: packageData.category || null,
|
||||||
latest_version:
|
latest_version:
|
||||||
packageData.availableVersion || packageData.currentVersion,
|
packageData.availableVersion || packageData.currentVersion,
|
||||||
|
created_at: new Date(),
|
||||||
updated_at: new Date(),
|
updated_at: new Date(),
|
||||||
},
|
};
|
||||||
});
|
packagesToCreate.push(newPkg);
|
||||||
} else {
|
existingPackageMap.set(packageData.name, newPkg);
|
||||||
// Update package latest version if newer
|
} else if (
|
||||||
if (
|
|
||||||
packageData.availableVersion &&
|
packageData.availableVersion &&
|
||||||
packageData.availableVersion !== pkg.latest_version
|
packageData.availableVersion !== existingPkg.latest_version
|
||||||
) {
|
) {
|
||||||
await tx.packages.update({
|
// Package exists but needs version update
|
||||||
where: { id: pkg.id },
|
packagesToUpdate.push({
|
||||||
data: {
|
id: existingPkg.id,
|
||||||
latest_version: packageData.availableVersion,
|
latest_version: packageData.availableVersion,
|
||||||
updated_at: new Date(),
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create host package relationship
|
// Batch create new packages
|
||||||
// Use upsert to handle potential duplicates gracefully
|
if (packagesToCreate.length > 0) {
|
||||||
|
await tx.packages.createMany({
|
||||||
|
data: packagesToCreate,
|
||||||
|
skipDuplicates: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Batch update existing packages
|
||||||
|
for (const update of packagesToUpdate) {
|
||||||
|
await tx.packages.update({
|
||||||
|
where: { id: update.id },
|
||||||
|
data: {
|
||||||
|
latest_version: update.latest_version,
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now process host_packages
|
||||||
|
for (const packageData of packages) {
|
||||||
|
const pkg = existingPackageMap.get(packageData.name);
|
||||||
|
|
||||||
await tx.host_packages.upsert({
|
await tx.host_packages.upsert({
|
||||||
where: {
|
where: {
|
||||||
host_id_package_id: {
|
host_id_package_id: {
|
||||||
@@ -536,7 +568,12 @@ router.post(
|
|||||||
status: "success",
|
status: "success",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
},
|
||||||
|
{
|
||||||
|
maxWait: 30000, // Wait up to 30s for a transaction slot
|
||||||
|
timeout: 60000, // Allow transaction to run for up to 60s
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// Agent auto-update is now handled client-side by the agent itself
|
// Agent auto-update is now handled client-side by the agent itself
|
||||||
|
|
||||||
|
|||||||
@@ -14,13 +14,13 @@ const router = express.Router();
|
|||||||
function getCurrentVersion() {
|
function getCurrentVersion() {
|
||||||
try {
|
try {
|
||||||
const packageJson = require("../../package.json");
|
const packageJson = require("../../package.json");
|
||||||
return packageJson?.version || "1.2.8";
|
return packageJson?.version || "1.2.9";
|
||||||
} catch (packageError) {
|
} catch (packageError) {
|
||||||
console.warn(
|
console.warn(
|
||||||
"Could not read version from package.json, using fallback:",
|
"Could not read version from package.json, using fallback:",
|
||||||
packageError.message,
|
packageError.message,
|
||||||
);
|
);
|
||||||
return "1.2.8";
|
return "1.2.9";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -292,11 +292,11 @@ router.get(
|
|||||||
) {
|
) {
|
||||||
console.log("GitHub API rate limited, providing fallback data");
|
console.log("GitHub API rate limited, providing fallback data");
|
||||||
latestRelease = {
|
latestRelease = {
|
||||||
tagName: "1.2.8",
|
tagName: "v1.2.8",
|
||||||
version: "1.2.8",
|
version: "1.2.8",
|
||||||
publishedAt: "2025-10-02T17:12:53Z",
|
publishedAt: "2025-10-02T17:12:53Z",
|
||||||
htmlUrl:
|
htmlUrl:
|
||||||
"https://github.com/PatchMon/PatchMon/releases/tag/1.2.8",
|
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.8",
|
||||||
};
|
};
|
||||||
latestCommit = {
|
latestCommit = {
|
||||||
sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd",
|
sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd",
|
||||||
@@ -318,10 +318,9 @@ router.get(
|
|||||||
latestRelease = settings.latest_version
|
latestRelease = settings.latest_version
|
||||||
? {
|
? {
|
||||||
version: settings.latest_version,
|
version: settings.latest_version,
|
||||||
tagName: settings.latest_version,
|
tagName: `v${settings.latest_version}`,
|
||||||
publishedAt: null, // Only use date from GitHub API, not cached data
|
publishedAt: null, // Only use date from GitHub API, not cached data
|
||||||
// Note: URL may need 'v' prefix depending on actual tag format in repo
|
htmlUrl: `${githubRepoUrl.replace(/\.git$/, "")}/releases/tag/v${settings.latest_version}`,
|
||||||
htmlUrl: `${githubRepoUrl.replace(/\.git$/, "")}/releases/tag/${settings.latest_version}`,
|
|
||||||
}
|
}
|
||||||
: null;
|
: null;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -62,9 +62,13 @@ const versionRoutes = require("./routes/versionRoutes");
|
|||||||
const tfaRoutes = require("./routes/tfaRoutes");
|
const tfaRoutes = require("./routes/tfaRoutes");
|
||||||
const searchRoutes = require("./routes/searchRoutes");
|
const searchRoutes = require("./routes/searchRoutes");
|
||||||
const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
|
const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
|
||||||
|
const gethomepageRoutes = require("./routes/gethomepageRoutes");
|
||||||
|
const automationRoutes = require("./routes/automationRoutes");
|
||||||
|
const dockerRoutes = require("./routes/dockerRoutes");
|
||||||
const updateScheduler = require("./services/updateScheduler");
|
const updateScheduler = require("./services/updateScheduler");
|
||||||
const { initSettings } = require("./services/settingsService");
|
const { initSettings } = require("./services/settingsService");
|
||||||
const { cleanup_expired_sessions } = require("./utils/session_manager");
|
const { cleanup_expired_sessions } = require("./utils/session_manager");
|
||||||
|
const { queueManager } = require("./services/automation");
|
||||||
|
|
||||||
// Initialize Prisma client with optimized connection pooling for multiple instances
|
// Initialize Prisma client with optimized connection pooling for multiple instances
|
||||||
const prisma = createPrismaClient();
|
const prisma = createPrismaClient();
|
||||||
@@ -422,6 +426,9 @@ app.use(
|
|||||||
authLimiter,
|
authLimiter,
|
||||||
autoEnrollmentRoutes,
|
autoEnrollmentRoutes,
|
||||||
);
|
);
|
||||||
|
app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/automation`, automationRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/docker`, dockerRoutes);
|
||||||
|
|
||||||
// Error handling middleware
|
// Error handling middleware
|
||||||
app.use((err, _req, res, _next) => {
|
app.use((err, _req, res, _next) => {
|
||||||
@@ -448,6 +455,7 @@ process.on("SIGINT", async () => {
|
|||||||
clearInterval(app.locals.session_cleanup_interval);
|
clearInterval(app.locals.session_cleanup_interval);
|
||||||
}
|
}
|
||||||
updateScheduler.stop();
|
updateScheduler.stop();
|
||||||
|
await queueManager.shutdown();
|
||||||
await disconnectPrisma(prisma);
|
await disconnectPrisma(prisma);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
@@ -460,6 +468,7 @@ process.on("SIGTERM", async () => {
|
|||||||
clearInterval(app.locals.session_cleanup_interval);
|
clearInterval(app.locals.session_cleanup_interval);
|
||||||
}
|
}
|
||||||
updateScheduler.stop();
|
updateScheduler.stop();
|
||||||
|
await queueManager.shutdown();
|
||||||
await disconnectPrisma(prisma);
|
await disconnectPrisma(prisma);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
@@ -728,6 +737,12 @@ async function startServer() {
|
|||||||
// Initialize dashboard preferences for all users
|
// Initialize dashboard preferences for all users
|
||||||
await initializeDashboardPreferences();
|
await initializeDashboardPreferences();
|
||||||
|
|
||||||
|
// Initialize BullMQ queue manager
|
||||||
|
await queueManager.initialize();
|
||||||
|
|
||||||
|
// Schedule recurring jobs
|
||||||
|
await queueManager.scheduleAllJobs();
|
||||||
|
|
||||||
// Initial session cleanup
|
// Initial session cleanup
|
||||||
await cleanup_expired_sessions();
|
await cleanup_expired_sessions();
|
||||||
|
|
||||||
|
|||||||
67
backend/src/services/automation/echoHello.js
Normal file
67
backend/src/services/automation/echoHello.js
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
/**
|
||||||
|
* Echo Hello Automation
|
||||||
|
* Simple test automation task
|
||||||
|
*/
|
||||||
|
class EchoHello {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "echo-hello";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process echo hello job
|
||||||
|
*/
|
||||||
|
async process(job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("👋 Starting echo hello task...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Simple echo task
|
||||||
|
const message = job.data.message || "Hello from BullMQ!";
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
// Simulate some work
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(`✅ Echo hello completed in ${executionTime}ms: ${message}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
message,
|
||||||
|
timestamp,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Echo hello failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Echo hello is manual only - no scheduling
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
console.log("ℹ️ Echo hello is manual only - no scheduling needed");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual echo hello
|
||||||
|
*/
|
||||||
|
async triggerManual(message = "Hello from BullMQ!") {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"echo-hello-manual",
|
||||||
|
{ message },
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual echo hello triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = EchoHello;
|
||||||
153
backend/src/services/automation/githubUpdateCheck.js
Normal file
153
backend/src/services/automation/githubUpdateCheck.js
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
const { compareVersions, checkPublicRepo } = require("./shared/utils");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GitHub Update Check Automation
|
||||||
|
* Checks for new releases on GitHub using HTTPS API
|
||||||
|
*/
|
||||||
|
class GitHubUpdateCheck {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "github-update-check";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process GitHub update check job
|
||||||
|
*/
|
||||||
|
async process(job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🔍 Starting GitHub update check...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get settings
|
||||||
|
const settings = await prisma.settings.findFirst();
|
||||||
|
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
|
||||||
|
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
||||||
|
let owner, repo;
|
||||||
|
|
||||||
|
// Parse GitHub repository URL (supports both HTTPS and SSH formats)
|
||||||
|
if (repoUrl.includes("git@github.com:")) {
|
||||||
|
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
|
||||||
|
if (match) {
|
||||||
|
[, owner, repo] = match;
|
||||||
|
}
|
||||||
|
} else if (repoUrl.includes("github.com/")) {
|
||||||
|
const match = repoUrl.match(
|
||||||
|
/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/,
|
||||||
|
);
|
||||||
|
if (match) {
|
||||||
|
[, owner, repo] = match;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!owner || !repo) {
|
||||||
|
throw new Error("Could not parse GitHub repository URL");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always use HTTPS GitHub API (simpler and more reliable)
|
||||||
|
const latestVersion = await checkPublicRepo(owner, repo);
|
||||||
|
|
||||||
|
if (!latestVersion) {
|
||||||
|
throw new Error("Could not determine latest version");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read version from package.json
|
||||||
|
let currentVersion = "1.2.7"; // fallback
|
||||||
|
try {
|
||||||
|
const packageJson = require("../../../package.json");
|
||||||
|
if (packageJson?.version) {
|
||||||
|
currentVersion = packageJson.version;
|
||||||
|
}
|
||||||
|
} catch (packageError) {
|
||||||
|
console.warn(
|
||||||
|
"Could not read version from package.json:",
|
||||||
|
packageError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isUpdateAvailable =
|
||||||
|
compareVersions(latestVersion, currentVersion) > 0;
|
||||||
|
|
||||||
|
// Update settings with check results
|
||||||
|
await prisma.settings.update({
|
||||||
|
where: { id: settings.id },
|
||||||
|
data: {
|
||||||
|
last_update_check: new Date(),
|
||||||
|
update_available: isUpdateAvailable,
|
||||||
|
latest_version: latestVersion,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ GitHub update check completed in ${executionTime}ms - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
currentVersion,
|
||||||
|
latestVersion,
|
||||||
|
isUpdateAvailable,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ GitHub update check failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update last check time even on error
|
||||||
|
try {
|
||||||
|
const settings = await prisma.settings.findFirst();
|
||||||
|
if (settings) {
|
||||||
|
await prisma.settings.update({
|
||||||
|
where: { id: settings.id },
|
||||||
|
data: {
|
||||||
|
last_update_check: new Date(),
|
||||||
|
update_available: false,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (updateError) {
|
||||||
|
console.error(
|
||||||
|
"❌ Error updating last check time:",
|
||||||
|
updateError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring GitHub update check (daily at midnight)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"github-update-check",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 0 * * *" }, // Daily at midnight
|
||||||
|
jobId: "github-update-check-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ GitHub update check scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual GitHub update check
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"github-update-check-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual GitHub update check triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = GitHubUpdateCheck;
|
||||||
283
backend/src/services/automation/index.js
Normal file
283
backend/src/services/automation/index.js
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
const { Queue, Worker } = require("bullmq");
|
||||||
|
const { redis, redisConnection } = require("./shared/redis");
|
||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
|
||||||
|
// Import automation classes
|
||||||
|
const GitHubUpdateCheck = require("./githubUpdateCheck");
|
||||||
|
const SessionCleanup = require("./sessionCleanup");
|
||||||
|
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
|
||||||
|
const EchoHello = require("./echoHello");
|
||||||
|
|
||||||
|
// Queue names
|
||||||
|
const QUEUE_NAMES = {
|
||||||
|
GITHUB_UPDATE_CHECK: "github-update-check",
|
||||||
|
SESSION_CLEANUP: "session-cleanup",
|
||||||
|
SYSTEM_MAINTENANCE: "system-maintenance",
|
||||||
|
ECHO_HELLO: "echo-hello",
|
||||||
|
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main Queue Manager
|
||||||
|
* Manages all BullMQ queues and workers
|
||||||
|
*/
|
||||||
|
class QueueManager {
|
||||||
|
constructor() {
|
||||||
|
this.queues = {};
|
||||||
|
this.workers = {};
|
||||||
|
this.automations = {};
|
||||||
|
this.isInitialized = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all queues, workers, and automations
|
||||||
|
*/
|
||||||
|
async initialize() {
|
||||||
|
try {
|
||||||
|
console.log("✅ Redis connection successful");
|
||||||
|
|
||||||
|
// Initialize queues
|
||||||
|
await this.initializeQueues();
|
||||||
|
|
||||||
|
// Initialize automation classes
|
||||||
|
await this.initializeAutomations();
|
||||||
|
|
||||||
|
// Initialize workers
|
||||||
|
await this.initializeWorkers();
|
||||||
|
|
||||||
|
// Setup event listeners
|
||||||
|
this.setupEventListeners();
|
||||||
|
|
||||||
|
this.isInitialized = true;
|
||||||
|
console.log("✅ Queue manager initialized successfully");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to initialize queue manager:", error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all queues
|
||||||
|
*/
|
||||||
|
async initializeQueues() {
|
||||||
|
for (const [key, queueName] of Object.entries(QUEUE_NAMES)) {
|
||||||
|
this.queues[queueName] = new Queue(queueName, {
|
||||||
|
connection: redisConnection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
removeOnComplete: 50, // Keep last 50 completed jobs
|
||||||
|
removeOnFail: 20, // Keep last 20 failed jobs
|
||||||
|
attempts: 3, // Retry failed jobs 3 times
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`✅ Queue '${queueName}' initialized`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize automation classes
|
||||||
|
*/
|
||||||
|
async initializeAutomations() {
|
||||||
|
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new GitHubUpdateCheck(
|
||||||
|
this,
|
||||||
|
);
|
||||||
|
this.automations[QUEUE_NAMES.SESSION_CLEANUP] = new SessionCleanup(this);
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] =
|
||||||
|
new OrphanedRepoCleanup(this);
|
||||||
|
this.automations[QUEUE_NAMES.ECHO_HELLO] = new EchoHello(this);
|
||||||
|
|
||||||
|
console.log("✅ All automation classes initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all workers
|
||||||
|
*/
|
||||||
|
async initializeWorkers() {
|
||||||
|
// GitHub Update Check Worker
|
||||||
|
this.workers[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new Worker(
|
||||||
|
QUEUE_NAMES.GITHUB_UPDATE_CHECK,
|
||||||
|
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
|
||||||
|
),
|
||||||
|
{
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Session Cleanup Worker
|
||||||
|
this.workers[QUEUE_NAMES.SESSION_CLEANUP] = new Worker(
|
||||||
|
QUEUE_NAMES.SESSION_CLEANUP,
|
||||||
|
this.automations[QUEUE_NAMES.SESSION_CLEANUP].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.SESSION_CLEANUP],
|
||||||
|
),
|
||||||
|
{
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Orphaned Repo Cleanup Worker
|
||||||
|
this.workers[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] = new Worker(
|
||||||
|
QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
|
||||||
|
),
|
||||||
|
{
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Echo Hello Worker
|
||||||
|
this.workers[QUEUE_NAMES.ECHO_HELLO] = new Worker(
|
||||||
|
QUEUE_NAMES.ECHO_HELLO,
|
||||||
|
this.automations[QUEUE_NAMES.ECHO_HELLO].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.ECHO_HELLO],
|
||||||
|
),
|
||||||
|
{
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add error handling for all workers
|
||||||
|
Object.values(this.workers).forEach((worker) => {
|
||||||
|
worker.on("error", (error) => {
|
||||||
|
console.error("Worker error:", error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("✅ All workers initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup event listeners for all queues
|
||||||
|
*/
|
||||||
|
setupEventListeners() {
|
||||||
|
for (const queueName of Object.values(QUEUE_NAMES)) {
|
||||||
|
const queue = this.queues[queueName];
|
||||||
|
queue.on("error", (error) => {
|
||||||
|
console.error(`❌ Queue '${queueName}' experienced an error:`, error);
|
||||||
|
});
|
||||||
|
queue.on("failed", (job, err) => {
|
||||||
|
console.error(
|
||||||
|
`❌ Job '${job.id}' in queue '${queueName}' failed:`,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
queue.on("completed", (job) => {
|
||||||
|
console.log(`✅ Job '${job.id}' in queue '${queueName}' completed.`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
console.log("✅ Queue events initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule all recurring jobs
|
||||||
|
*/
|
||||||
|
async scheduleAllJobs() {
|
||||||
|
await this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.ECHO_HELLO].schedule();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manual job triggers
|
||||||
|
*/
|
||||||
|
async triggerGitHubUpdateCheck() {
|
||||||
|
return this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerSessionCleanup() {
|
||||||
|
return this.automations[QUEUE_NAMES.SESSION_CLEANUP].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerOrphanedRepoCleanup() {
|
||||||
|
return this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerEchoHello(message = "Hello from BullMQ!") {
|
||||||
|
return this.automations[QUEUE_NAMES.ECHO_HELLO].triggerManual(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get queue statistics
|
||||||
|
*/
|
||||||
|
async getQueueStats(queueName) {
|
||||||
|
const queue = this.queues[queueName];
|
||||||
|
if (!queue) {
|
||||||
|
throw new Error(`Queue ${queueName} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||||
|
queue.getWaiting(),
|
||||||
|
queue.getActive(),
|
||||||
|
queue.getCompleted(),
|
||||||
|
queue.getFailed(),
|
||||||
|
queue.getDelayed(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
waiting: waiting.length,
|
||||||
|
active: active.length,
|
||||||
|
completed: completed.length,
|
||||||
|
failed: failed.length,
|
||||||
|
delayed: delayed.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all queue statistics
|
||||||
|
*/
|
||||||
|
async getAllQueueStats() {
|
||||||
|
const stats = {};
|
||||||
|
for (const queueName of Object.values(QUEUE_NAMES)) {
|
||||||
|
stats[queueName] = await this.getQueueStats(queueName);
|
||||||
|
}
|
||||||
|
return stats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get recent jobs for a queue
|
||||||
|
*/
|
||||||
|
async getRecentJobs(queueName, limit = 10) {
|
||||||
|
const queue = this.queues[queueName];
|
||||||
|
if (!queue) {
|
||||||
|
throw new Error(`Queue ${queueName} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [completed, failed] = await Promise.all([
|
||||||
|
queue.getCompleted(0, limit - 1),
|
||||||
|
queue.getFailed(0, limit - 1),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return [...completed, ...failed]
|
||||||
|
.sort((a, b) => new Date(b.finishedOn) - new Date(a.finishedOn))
|
||||||
|
.slice(0, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Graceful shutdown
|
||||||
|
*/
|
||||||
|
async shutdown() {
|
||||||
|
console.log("🛑 Shutting down queue manager...");
|
||||||
|
|
||||||
|
for (const queueName of Object.keys(this.queues)) {
|
||||||
|
await this.queues[queueName].close();
|
||||||
|
await this.workers[queueName].close();
|
||||||
|
}
|
||||||
|
|
||||||
|
await redis.quit();
|
||||||
|
console.log("✅ Queue manager shutdown complete");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const queueManager = new QueueManager();
|
||||||
|
|
||||||
|
module.exports = { queueManager, QUEUE_NAMES };
|
||||||
114
backend/src/services/automation/orphanedRepoCleanup.js
Normal file
114
backend/src/services/automation/orphanedRepoCleanup.js
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Orphaned Repository Cleanup Automation
|
||||||
|
* Removes repositories with no associated hosts
|
||||||
|
*/
|
||||||
|
class OrphanedRepoCleanup {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "orphaned-repo-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process orphaned repository cleanup job
|
||||||
|
*/
|
||||||
|
async process(job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🧹 Starting orphaned repository cleanup...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find repositories with 0 hosts
|
||||||
|
const orphanedRepos = await prisma.repositories.findMany({
|
||||||
|
where: {
|
||||||
|
host_repositories: {
|
||||||
|
none: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
host_repositories: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let deletedCount = 0;
|
||||||
|
const deletedRepos = [];
|
||||||
|
|
||||||
|
// Delete orphaned repositories
|
||||||
|
for (const repo of orphanedRepos) {
|
||||||
|
try {
|
||||||
|
await prisma.repositories.delete({
|
||||||
|
where: { id: repo.id },
|
||||||
|
});
|
||||||
|
deletedCount++;
|
||||||
|
deletedRepos.push({
|
||||||
|
id: repo.id,
|
||||||
|
name: repo.name,
|
||||||
|
url: repo.url,
|
||||||
|
});
|
||||||
|
console.log(
|
||||||
|
`🗑️ Deleted orphaned repository: ${repo.name} (${repo.url})`,
|
||||||
|
);
|
||||||
|
} catch (deleteError) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to delete repository ${repo.id}:`,
|
||||||
|
deleteError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Orphaned repository cleanup completed in ${executionTime}ms - Deleted ${deletedCount} repositories`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
deletedCount,
|
||||||
|
deletedRepos,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Orphaned repository cleanup failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring orphaned repository cleanup (daily at 2 AM)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-repo-cleanup",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 2 * * *" }, // Daily at 2 AM
|
||||||
|
jobId: "orphaned-repo-cleanup-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Orphaned repository cleanup scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual orphaned repository cleanup
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-repo-cleanup-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual orphaned repository cleanup triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = OrphanedRepoCleanup;
|
||||||
78
backend/src/services/automation/sessionCleanup.js
Normal file
78
backend/src/services/automation/sessionCleanup.js
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
const { cleanup_expired_sessions } = require("../../utils/session_manager");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session Cleanup Automation
|
||||||
|
* Cleans up expired user sessions
|
||||||
|
*/
|
||||||
|
class SessionCleanup {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "session-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process session cleanup job
|
||||||
|
*/
|
||||||
|
async process(job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🧹 Starting session cleanup...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await prisma.user_sessions.deleteMany({
|
||||||
|
where: {
|
||||||
|
OR: [{ expires_at: { lt: new Date() } }, { is_revoked: true }],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Session cleanup completed in ${executionTime}ms - Cleaned up ${result.count} expired sessions`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
sessionsCleaned: result.count,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Session cleanup failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring session cleanup (every hour)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"session-cleanup",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 * * * *" }, // Every hour
|
||||||
|
jobId: "session-cleanup-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Session cleanup scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual session cleanup
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"session-cleanup-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual session cleanup triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SessionCleanup;
|
||||||
5
backend/src/services/automation/shared/prisma.js
Normal file
5
backend/src/services/automation/shared/prisma.js
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
const { PrismaClient } = require("@prisma/client");
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
module.exports = { prisma };
|
||||||
16
backend/src/services/automation/shared/redis.js
Normal file
16
backend/src/services/automation/shared/redis.js
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
const IORedis = require("ioredis");
|
||||||
|
|
||||||
|
// Redis connection configuration
|
||||||
|
const redisConnection = {
|
||||||
|
host: process.env.REDIS_HOST || "localhost",
|
||||||
|
port: parseInt(process.env.REDIS_PORT) || 6379,
|
||||||
|
password: process.env.REDIS_PASSWORD || undefined,
|
||||||
|
db: parseInt(process.env.REDIS_DB) || 0,
|
||||||
|
retryDelayOnFailover: 100,
|
||||||
|
maxRetriesPerRequest: null, // BullMQ requires this to be null
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create Redis connection
|
||||||
|
const redis = new IORedis(redisConnection);
|
||||||
|
|
||||||
|
module.exports = { redis, redisConnection };
|
||||||
82
backend/src/services/automation/shared/utils.js
Normal file
82
backend/src/services/automation/shared/utils.js
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
// Common utilities for automation jobs
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare two semantic versions
|
||||||
|
* @param {string} version1 - First version
|
||||||
|
* @param {string} version2 - Second version
|
||||||
|
* @returns {number} - 1 if version1 > version2, -1 if version1 < version2, 0 if equal
|
||||||
|
*/
|
||||||
|
function compareVersions(version1, version2) {
|
||||||
|
const v1parts = version1.split(".").map(Number);
|
||||||
|
const v2parts = version2.split(".").map(Number);
|
||||||
|
|
||||||
|
const maxLength = Math.max(v1parts.length, v2parts.length);
|
||||||
|
|
||||||
|
for (let i = 0; i < maxLength; i++) {
|
||||||
|
const v1part = v1parts[i] || 0;
|
||||||
|
const v2part = v2parts[i] || 0;
|
||||||
|
|
||||||
|
if (v1part > v2part) return 1;
|
||||||
|
if (v1part < v2part) return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check public GitHub repository for latest release
|
||||||
|
* @param {string} owner - Repository owner
|
||||||
|
* @param {string} repo - Repository name
|
||||||
|
* @returns {Promise<string|null>} - Latest version or null
|
||||||
|
*/
|
||||||
|
async function checkPublicRepo(owner, repo) {
|
||||||
|
try {
|
||||||
|
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
||||||
|
|
||||||
|
let currentVersion = "1.2.7"; // fallback
|
||||||
|
try {
|
||||||
|
const packageJson = require("../../../package.json");
|
||||||
|
if (packageJson?.version) {
|
||||||
|
currentVersion = packageJson.version;
|
||||||
|
}
|
||||||
|
} catch (packageError) {
|
||||||
|
console.warn(
|
||||||
|
"Could not read version from package.json for User-Agent, using fallback:",
|
||||||
|
packageError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(httpsRepoUrl, {
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
"User-Agent": `PatchMon-Server/${currentVersion}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
if (
|
||||||
|
errorText.includes("rate limit") ||
|
||||||
|
errorText.includes("API rate limit")
|
||||||
|
) {
|
||||||
|
console.log("⚠️ GitHub API rate limit exceeded, skipping update check");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw new Error(
|
||||||
|
`GitHub API error: ${response.status} ${response.statusText}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const releaseData = await response.json();
|
||||||
|
return releaseData.tag_name.replace("v", "");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("GitHub API error:", error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
compareVersions,
|
||||||
|
checkPublicRepo,
|
||||||
|
};
|
||||||
@@ -104,7 +104,7 @@ class UpdateScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read version from package.json dynamically
|
// Read version from package.json dynamically
|
||||||
let currentVersion = "1.2.8"; // fallback
|
let currentVersion = "1.2.9"; // fallback
|
||||||
try {
|
try {
|
||||||
const packageJson = require("../../package.json");
|
const packageJson = require("../../package.json");
|
||||||
if (packageJson?.version) {
|
if (packageJson?.version) {
|
||||||
@@ -214,7 +214,7 @@ class UpdateScheduler {
|
|||||||
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
||||||
|
|
||||||
// Get current version for User-Agent
|
// Get current version for User-Agent
|
||||||
let currentVersion = "1.2.8"; // fallback
|
let currentVersion = "1.2.9"; // fallback
|
||||||
try {
|
try {
|
||||||
const packageJson = require("../../package.json");
|
const packageJson = require("../../package.json");
|
||||||
if (packageJson?.version) {
|
if (packageJson?.version) {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon-frontend",
|
"name": "patchmon-frontend",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "1.2.8",
|
"version": "1.2.9",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -20,7 +20,7 @@
|
|||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"date-fns": "^4.1.0",
|
"date-fns": "^4.1.0",
|
||||||
"express": "^4.21.2",
|
"express": "^5.0.0",
|
||||||
"http-proxy-middleware": "^3.0.3",
|
"http-proxy-middleware": "^3.0.3",
|
||||||
"lucide-react": "^0.468.0",
|
"lucide-react": "^0.468.0",
|
||||||
"react": "^18.3.1",
|
"react": "^18.3.1",
|
||||||
|
|||||||
@@ -18,9 +18,15 @@ const Login = lazy(() => import("./pages/Login"));
|
|||||||
const PackageDetail = lazy(() => import("./pages/PackageDetail"));
|
const PackageDetail = lazy(() => import("./pages/PackageDetail"));
|
||||||
const Packages = lazy(() => import("./pages/Packages"));
|
const Packages = lazy(() => import("./pages/Packages"));
|
||||||
const Profile = lazy(() => import("./pages/Profile"));
|
const Profile = lazy(() => import("./pages/Profile"));
|
||||||
const Queue = lazy(() => import("./pages/Queue"));
|
const Automation = lazy(() => import("./pages/Automation"));
|
||||||
const Repositories = lazy(() => import("./pages/Repositories"));
|
const Repositories = lazy(() => import("./pages/Repositories"));
|
||||||
const RepositoryDetail = lazy(() => import("./pages/RepositoryDetail"));
|
const RepositoryDetail = lazy(() => import("./pages/RepositoryDetail"));
|
||||||
|
const Docker = lazy(() => import("./pages/Docker"));
|
||||||
|
const DockerContainerDetail = lazy(
|
||||||
|
() => import("./pages/docker/ContainerDetail"),
|
||||||
|
);
|
||||||
|
const DockerImageDetail = lazy(() => import("./pages/docker/ImageDetail"));
|
||||||
|
const DockerHostDetail = lazy(() => import("./pages/docker/HostDetail"));
|
||||||
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
|
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
|
||||||
const Integrations = lazy(() => import("./pages/settings/Integrations"));
|
const Integrations = lazy(() => import("./pages/settings/Integrations"));
|
||||||
const Notifications = lazy(() => import("./pages/settings/Notifications"));
|
const Notifications = lazy(() => import("./pages/settings/Notifications"));
|
||||||
@@ -137,11 +143,51 @@ function AppRoutes() {
|
|||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
<Route
|
<Route
|
||||||
path="/queue"
|
path="/automation"
|
||||||
element={
|
element={
|
||||||
<ProtectedRoute requirePermission="can_view_hosts">
|
<ProtectedRoute requirePermission="can_view_hosts">
|
||||||
<Layout>
|
<Layout>
|
||||||
<Queue />
|
<Automation />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<Docker />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/containers/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerContainerDetail />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/images/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerImageDetail />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/hosts/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerHostDetail />
|
||||||
</Layout>
|
</Layout>
|
||||||
</ProtectedRoute>
|
</ProtectedRoute>
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ import {
|
|||||||
Github,
|
Github,
|
||||||
Globe,
|
Globe,
|
||||||
Home,
|
Home,
|
||||||
List,
|
|
||||||
LogOut,
|
LogOut,
|
||||||
Mail,
|
Mail,
|
||||||
Menu,
|
Menu,
|
||||||
@@ -113,18 +112,26 @@ const Layout = ({ children }) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add Automation item (available to all users with inventory access)
|
||||||
|
inventoryItems.push({
|
||||||
|
name: "Automation",
|
||||||
|
href: "/automation",
|
||||||
|
icon: RefreshCw,
|
||||||
|
beta: true,
|
||||||
|
});
|
||||||
|
|
||||||
if (canViewReports()) {
|
if (canViewReports()) {
|
||||||
inventoryItems.push(
|
inventoryItems.push(
|
||||||
{
|
|
||||||
name: "Services",
|
|
||||||
href: "/services",
|
|
||||||
icon: Activity,
|
|
||||||
comingSoon: true,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "Docker",
|
name: "Docker",
|
||||||
href: "/docker",
|
href: "/docker",
|
||||||
icon: Container,
|
icon: Container,
|
||||||
|
beta: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Services",
|
||||||
|
href: "/services",
|
||||||
|
icon: Activity,
|
||||||
comingSoon: true,
|
comingSoon: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -136,21 +143,13 @@ const Layout = ({ children }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add Pro-Action and Queue items (available to all users with inventory access)
|
// Add Pro-Action item (available to all users with inventory access)
|
||||||
inventoryItems.push(
|
inventoryItems.push({
|
||||||
{
|
|
||||||
name: "Pro-Action",
|
name: "Pro-Action",
|
||||||
href: "/pro-action",
|
href: "/pro-action",
|
||||||
icon: Zap,
|
icon: Zap,
|
||||||
comingSoon: true,
|
comingSoon: true,
|
||||||
},
|
});
|
||||||
{
|
|
||||||
name: "Queue",
|
|
||||||
href: "/queue",
|
|
||||||
icon: List,
|
|
||||||
comingSoon: true,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
if (inventoryItems.length > 0) {
|
if (inventoryItems.length > 0) {
|
||||||
nav.push({
|
nav.push({
|
||||||
@@ -210,7 +209,7 @@ const Layout = ({ children }) => {
|
|||||||
if (path === "/services") return "Services";
|
if (path === "/services") return "Services";
|
||||||
if (path === "/docker") return "Docker";
|
if (path === "/docker") return "Docker";
|
||||||
if (path === "/pro-action") return "Pro-Action";
|
if (path === "/pro-action") return "Pro-Action";
|
||||||
if (path === "/queue") return "Queue";
|
if (path === "/automation") return "Automation";
|
||||||
if (path === "/users") return "Users";
|
if (path === "/users") return "Users";
|
||||||
if (path === "/permissions") return "Permissions";
|
if (path === "/permissions") return "Permissions";
|
||||||
if (path === "/settings") return "Settings";
|
if (path === "/settings") return "Settings";
|
||||||
@@ -436,6 +435,11 @@ const Layout = ({ children }) => {
|
|||||||
Soon
|
Soon
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
|
{subItem.beta && (
|
||||||
|
<span className="text-xs bg-blue-100 dark:bg-blue-900 text-blue-600 dark:text-blue-200 px-1.5 py-0.5 rounded font-medium">
|
||||||
|
Beta
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
</span>
|
</span>
|
||||||
</Link>
|
</Link>
|
||||||
)}
|
)}
|
||||||
@@ -707,6 +711,11 @@ const Layout = ({ children }) => {
|
|||||||
Soon
|
Soon
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
|
{subItem.beta && (
|
||||||
|
<span className="text-xs bg-blue-100 dark:bg-blue-900 text-blue-600 dark:text-blue-200 px-1.5 py-0.5 rounded font-medium">
|
||||||
|
Beta
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
{subItem.showUpgradeIcon && (
|
{subItem.showUpgradeIcon && (
|
||||||
<UpgradeNotificationIcon className="h-3 w-3" />
|
<UpgradeNotificationIcon className="h-3 w-3" />
|
||||||
)}
|
)}
|
||||||
@@ -929,11 +938,17 @@ const Layout = ({ children }) => {
|
|||||||
<div className="h-6 w-px bg-secondary-200 dark:bg-secondary-600 lg:hidden" />
|
<div className="h-6 w-px bg-secondary-200 dark:bg-secondary-600 lg:hidden" />
|
||||||
|
|
||||||
<div className="flex flex-1 gap-x-4 self-stretch lg:gap-x-6">
|
<div className="flex flex-1 gap-x-4 self-stretch lg:gap-x-6">
|
||||||
{/* Page title - hidden on dashboard, hosts, repositories, packages, and host details to give more space to search */}
|
{/* Page title - hidden on dashboard, hosts, repositories, packages, automation, docker, and host details to give more space to search */}
|
||||||
{!["/", "/hosts", "/repositories", "/packages"].includes(
|
{![
|
||||||
location.pathname,
|
"/",
|
||||||
) &&
|
"/hosts",
|
||||||
!location.pathname.startsWith("/hosts/") && (
|
"/repositories",
|
||||||
|
"/packages",
|
||||||
|
"/automation",
|
||||||
|
"/docker",
|
||||||
|
].includes(location.pathname) &&
|
||||||
|
!location.pathname.startsWith("/hosts/") &&
|
||||||
|
!location.pathname.startsWith("/docker/") && (
|
||||||
<div className="relative flex items-center">
|
<div className="relative flex items-center">
|
||||||
<h2 className="text-lg font-semibold text-secondary-900 dark:text-secondary-100 whitespace-nowrap">
|
<h2 className="text-lg font-semibold text-secondary-900 dark:text-secondary-100 whitespace-nowrap">
|
||||||
{getPageTitle()}
|
{getPageTitle()}
|
||||||
@@ -943,7 +958,7 @@ const Layout = ({ children }) => {
|
|||||||
|
|
||||||
{/* Global Search Bar */}
|
{/* Global Search Bar */}
|
||||||
<div
|
<div
|
||||||
className={`flex items-center ${["/", "/hosts", "/repositories", "/packages"].includes(location.pathname) || location.pathname.startsWith("/hosts/") ? "flex-1 max-w-none" : "max-w-sm"}`}
|
className={`flex items-center ${["/", "/hosts", "/repositories", "/packages", "/automation", "/docker"].includes(location.pathname) || location.pathname.startsWith("/hosts/") || location.pathname.startsWith("/docker/") ? "flex-1 max-w-none" : "max-w-sm"}`}
|
||||||
>
|
>
|
||||||
<GlobalSearch />
|
<GlobalSearch />
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
581
frontend/src/pages/Automation.jsx
Normal file
581
frontend/src/pages/Automation.jsx
Normal file
@@ -0,0 +1,581 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
Activity,
|
||||||
|
AlertCircle,
|
||||||
|
ArrowDown,
|
||||||
|
ArrowUp,
|
||||||
|
ArrowUpDown,
|
||||||
|
Bot,
|
||||||
|
CheckCircle,
|
||||||
|
Clock,
|
||||||
|
Play,
|
||||||
|
RefreshCw,
|
||||||
|
Settings,
|
||||||
|
XCircle,
|
||||||
|
Zap,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { useEffect, useState } from "react";
|
||||||
|
import api from "../utils/api";
|
||||||
|
|
||||||
|
const Automation = () => {
|
||||||
|
const [activeTab, setActiveTab] = useState("overview");
|
||||||
|
const [sortField, setSortField] = useState("nextRunTimestamp");
|
||||||
|
const [sortDirection, setSortDirection] = useState("asc");
|
||||||
|
|
||||||
|
// Fetch automation overview data
|
||||||
|
const { data: overview, isLoading: overviewLoading } = useQuery({
|
||||||
|
queryKey: ["automation-overview"],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get("/automation/overview");
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000, // Refresh every 30 seconds
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch queue statistics
|
||||||
|
const { data: queueStats, isLoading: statsLoading } = useQuery({
|
||||||
|
queryKey: ["automation-stats"],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get("/automation/stats");
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch recent jobs
|
||||||
|
const { data: recentJobs, isLoading: jobsLoading } = useQuery({
|
||||||
|
queryKey: ["automation-jobs"],
|
||||||
|
queryFn: async () => {
|
||||||
|
const jobs = await Promise.all([
|
||||||
|
api
|
||||||
|
.get("/automation/jobs/github-update-check?limit=5")
|
||||||
|
.then((r) => r.data.data || []),
|
||||||
|
api
|
||||||
|
.get("/automation/jobs/session-cleanup?limit=5")
|
||||||
|
.then((r) => r.data.data || []),
|
||||||
|
]);
|
||||||
|
return {
|
||||||
|
githubUpdate: jobs[0],
|
||||||
|
sessionCleanup: jobs[1],
|
||||||
|
};
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const getStatusIcon = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return <CheckCircle className="h-4 w-4 text-green-500" />;
|
||||||
|
case "failed":
|
||||||
|
return <XCircle className="h-4 w-4 text-red-500" />;
|
||||||
|
case "active":
|
||||||
|
return <Activity className="h-4 w-4 text-blue-500 animate-pulse" />;
|
||||||
|
default:
|
||||||
|
return <Clock className="h-4 w-4 text-gray-500" />;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStatusColor = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return "bg-green-100 text-green-800";
|
||||||
|
case "failed":
|
||||||
|
return "bg-red-100 text-red-800";
|
||||||
|
case "active":
|
||||||
|
return "bg-blue-100 text-blue-800";
|
||||||
|
default:
|
||||||
|
return "bg-gray-100 text-gray-800";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatDate = (dateString) => {
|
||||||
|
if (!dateString) return "N/A";
|
||||||
|
return new Date(dateString).toLocaleString();
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatDuration = (ms) => {
|
||||||
|
if (!ms) return "N/A";
|
||||||
|
return `${ms}ms`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStatusBadge = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "Success":
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-green-100 text-green-800">
|
||||||
|
Success
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
case "Failed":
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-red-100 text-red-800">
|
||||||
|
Failed
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
case "Never run":
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
|
||||||
|
Never run
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
default:
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
|
||||||
|
{status}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getNextRunTime = (schedule, lastRun) => {
|
||||||
|
if (schedule === "Manual only") return "Manual trigger only";
|
||||||
|
if (schedule === "Daily at midnight") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(0, 0, 0, 0);
|
||||||
|
return tomorrow.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 2 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(2, 0, 0, 0);
|
||||||
|
return tomorrow.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (schedule === "Every hour") {
|
||||||
|
const now = new Date();
|
||||||
|
const nextHour = new Date(now);
|
||||||
|
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
|
||||||
|
return nextHour.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return "Unknown";
|
||||||
|
};
|
||||||
|
|
||||||
|
const getNextRunTimestamp = (schedule) => {
|
||||||
|
if (schedule === "Manual only") return Number.MAX_SAFE_INTEGER; // Manual tasks go to bottom
|
||||||
|
if (schedule === "Daily at midnight") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(0, 0, 0, 0);
|
||||||
|
return tomorrow.getTime();
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 2 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(2, 0, 0, 0);
|
||||||
|
return tomorrow.getTime();
|
||||||
|
}
|
||||||
|
if (schedule === "Every hour") {
|
||||||
|
const now = new Date();
|
||||||
|
const nextHour = new Date(now);
|
||||||
|
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
|
||||||
|
return nextHour.getTime();
|
||||||
|
}
|
||||||
|
return Number.MAX_SAFE_INTEGER; // Unknown schedules go to bottom
|
||||||
|
};
|
||||||
|
|
||||||
|
const triggerManualJob = async (jobType, data = {}) => {
|
||||||
|
try {
|
||||||
|
let endpoint;
|
||||||
|
|
||||||
|
if (jobType === "github") {
|
||||||
|
endpoint = "/automation/trigger/github-update";
|
||||||
|
} else if (jobType === "sessions") {
|
||||||
|
endpoint = "/automation/trigger/session-cleanup";
|
||||||
|
} else if (jobType === "echo") {
|
||||||
|
endpoint = "/automation/trigger/echo-hello";
|
||||||
|
} else if (jobType === "orphaned-repos") {
|
||||||
|
endpoint = "/automation/trigger/orphaned-repo-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await api.post(endpoint, data);
|
||||||
|
|
||||||
|
// Refresh data
|
||||||
|
window.location.reload();
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering job:", error);
|
||||||
|
alert(
|
||||||
|
"Failed to trigger job: " +
|
||||||
|
(error.response?.data?.error || error.message),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSort = (field) => {
|
||||||
|
if (sortField === field) {
|
||||||
|
setSortDirection(sortDirection === "asc" ? "desc" : "asc");
|
||||||
|
} else {
|
||||||
|
setSortField(field);
|
||||||
|
setSortDirection("asc");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getSortIcon = (field) => {
|
||||||
|
if (sortField !== field) return <ArrowUpDown className="h-4 w-4" />;
|
||||||
|
return sortDirection === "asc" ? (
|
||||||
|
<ArrowUp className="h-4 w-4" />
|
||||||
|
) : (
|
||||||
|
<ArrowDown className="h-4 w-4" />
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Sort automations based on current sort settings
|
||||||
|
const sortedAutomations = overview?.automations
|
||||||
|
? [...overview.automations].sort((a, b) => {
|
||||||
|
let aValue, bValue;
|
||||||
|
|
||||||
|
switch (sortField) {
|
||||||
|
case "name":
|
||||||
|
aValue = a.name.toLowerCase();
|
||||||
|
bValue = b.name.toLowerCase();
|
||||||
|
break;
|
||||||
|
case "schedule":
|
||||||
|
aValue = a.schedule.toLowerCase();
|
||||||
|
bValue = b.schedule.toLowerCase();
|
||||||
|
break;
|
||||||
|
case "lastRun":
|
||||||
|
// Convert "Never" to empty string for proper sorting
|
||||||
|
aValue = a.lastRun === "Never" ? "" : a.lastRun;
|
||||||
|
bValue = b.lastRun === "Never" ? "" : b.lastRun;
|
||||||
|
break;
|
||||||
|
case "lastRunTimestamp":
|
||||||
|
aValue = a.lastRunTimestamp || 0;
|
||||||
|
bValue = b.lastRunTimestamp || 0;
|
||||||
|
break;
|
||||||
|
case "nextRunTimestamp":
|
||||||
|
aValue = getNextRunTimestamp(a.schedule);
|
||||||
|
bValue = getNextRunTimestamp(b.schedule);
|
||||||
|
break;
|
||||||
|
case "status":
|
||||||
|
aValue = a.status.toLowerCase();
|
||||||
|
bValue = b.status.toLowerCase();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
aValue = a[sortField];
|
||||||
|
bValue = b[sortField];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aValue < bValue) return sortDirection === "asc" ? -1 : 1;
|
||||||
|
if (aValue > bValue) return sortDirection === "asc" ? 1 : -1;
|
||||||
|
return 0;
|
||||||
|
})
|
||||||
|
: [];
|
||||||
|
|
||||||
|
const tabs = [{ id: "overview", name: "Overview", icon: Settings }];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Page Header */}
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
Automation Management
|
||||||
|
</h1>
|
||||||
|
<p className="text-sm text-secondary-600 dark:text-secondary-400 mt-1">
|
||||||
|
Monitor and manage automated server operations, agent
|
||||||
|
communications, and patch deployments
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => triggerManualJob("github")}
|
||||||
|
className="btn-outline flex items-center gap-2"
|
||||||
|
title="Trigger manual GitHub update check"
|
||||||
|
>
|
||||||
|
<RefreshCw className="h-4 w-4" />
|
||||||
|
Check Updates
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => triggerManualJob("sessions")}
|
||||||
|
className="btn-outline flex items-center gap-2"
|
||||||
|
title="Trigger manual session cleanup"
|
||||||
|
>
|
||||||
|
<RefreshCw className="h-4 w-4" />
|
||||||
|
Clean Sessions
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() =>
|
||||||
|
triggerManualJob("echo", {
|
||||||
|
message: "Hello from Automation Page!",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
className="btn-outline flex items-center gap-2"
|
||||||
|
title="Trigger echo hello task"
|
||||||
|
>
|
||||||
|
<RefreshCw className="h-4 w-4" />
|
||||||
|
Echo Hello
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Stats Cards */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||||
|
{/* Scheduled Tasks Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Clock className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Scheduled Tasks
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.scheduledTasks || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Running Tasks Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Play className="h-5 w-5 text-success-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Running Tasks
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.runningTasks || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Failed Tasks Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<XCircle className="h-5 w-5 text-red-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Failed Tasks
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.failedTasks || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Total Task Runs Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Zap className="h-5 w-5 text-secondary-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Total Task Runs
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.totalAutomations || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tabs */}
|
||||||
|
<div className="mb-6">
|
||||||
|
<div className="border-b border-gray-200 dark:border-gray-700">
|
||||||
|
<nav className="-mb-px flex space-x-8">
|
||||||
|
{tabs.map((tab) => (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
key={tab.id}
|
||||||
|
onClick={() => setActiveTab(tab.id)}
|
||||||
|
className={`py-2 px-1 border-b-2 font-medium text-sm flex items-center gap-2 ${
|
||||||
|
activeTab === tab.id
|
||||||
|
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||||
|
: "border-transparent text-gray-500 hover:text-gray-700 hover:border-gray-300 dark:text-gray-400 dark:hover:text-gray-300"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<tab.icon className="h-4 w-4" />
|
||||||
|
{tab.name}
|
||||||
|
</button>
|
||||||
|
))}
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tab Content */}
|
||||||
|
{activeTab === "overview" && (
|
||||||
|
<div className="card p-6">
|
||||||
|
{overviewLoading ? (
|
||||||
|
<div className="text-center py-8">
|
||||||
|
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-600 mx-auto"></div>
|
||||||
|
<p className="mt-2 text-sm text-secondary-500">
|
||||||
|
Loading automations...
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-700">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Run
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("name")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Task
|
||||||
|
{getSortIcon("name")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("schedule")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Frequency
|
||||||
|
{getSortIcon("schedule")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("lastRunTimestamp")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Last Run
|
||||||
|
{getSortIcon("lastRunTimestamp")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("nextRunTimestamp")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Next Run
|
||||||
|
{getSortIcon("nextRunTimestamp")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("status")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Status
|
||||||
|
{getSortIcon("status")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
{sortedAutomations.map((automation) => (
|
||||||
|
<tr
|
||||||
|
key={automation.queue}
|
||||||
|
className="hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||||
|
>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
{automation.schedule !== "Manual only" ? (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
if (automation.queue.includes("github")) {
|
||||||
|
triggerManualJob("github");
|
||||||
|
} else if (automation.queue.includes("session")) {
|
||||||
|
triggerManualJob("sessions");
|
||||||
|
} else if (automation.queue.includes("echo")) {
|
||||||
|
triggerManualJob("echo", {
|
||||||
|
message: "Manual trigger from table",
|
||||||
|
});
|
||||||
|
} else if (
|
||||||
|
automation.queue.includes("orphaned-repo")
|
||||||
|
) {
|
||||||
|
triggerManualJob("orphaned-repos");
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200"
|
||||||
|
title="Run Now"
|
||||||
|
>
|
||||||
|
<Play className="h-3 w-3" />
|
||||||
|
</button>
|
||||||
|
) : (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
if (automation.queue.includes("echo")) {
|
||||||
|
triggerManualJob("echo", {
|
||||||
|
message: "Manual trigger from table",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200"
|
||||||
|
title="Trigger"
|
||||||
|
>
|
||||||
|
<Play className="h-3 w-3" />
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
<div>
|
||||||
|
<div className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{automation.name}
|
||||||
|
</div>
|
||||||
|
<div className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
|
{automation.description}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
|
||||||
|
{automation.schedule}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
|
||||||
|
{automation.lastRun}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
|
||||||
|
{getNextRunTime(
|
||||||
|
automation.schedule,
|
||||||
|
automation.lastRun,
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
{getStatusBadge(automation.status)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Automation;
|
||||||
1003
frontend/src/pages/Docker.jsx
Normal file
1003
frontend/src/pages/Docker.jsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,699 +0,0 @@
|
|||||||
import {
|
|
||||||
Activity,
|
|
||||||
AlertCircle,
|
|
||||||
CheckCircle,
|
|
||||||
Clock,
|
|
||||||
Download,
|
|
||||||
Eye,
|
|
||||||
Filter,
|
|
||||||
Package,
|
|
||||||
Pause,
|
|
||||||
Play,
|
|
||||||
RefreshCw,
|
|
||||||
Search,
|
|
||||||
Server,
|
|
||||||
XCircle,
|
|
||||||
} from "lucide-react";
|
|
||||||
import { useState } from "react";
|
|
||||||
|
|
||||||
const Queue = () => {
|
|
||||||
const [activeTab, setActiveTab] = useState("server");
|
|
||||||
const [filterStatus, setFilterStatus] = useState("all");
|
|
||||||
const [searchQuery, setSearchQuery] = useState("");
|
|
||||||
|
|
||||||
// Mock data for demonstration
|
|
||||||
const serverQueueData = [
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
type: "Server Update Check",
|
|
||||||
description: "Check for server updates from GitHub",
|
|
||||||
status: "running",
|
|
||||||
priority: "high",
|
|
||||||
createdAt: "2024-01-15 10:30:00",
|
|
||||||
estimatedCompletion: "2024-01-15 10:35:00",
|
|
||||||
progress: 75,
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
type: "Session Cleanup",
|
|
||||||
description: "Clear expired login sessions",
|
|
||||||
status: "pending",
|
|
||||||
priority: "medium",
|
|
||||||
createdAt: "2024-01-15 10:25:00",
|
|
||||||
estimatedCompletion: "2024-01-15 10:40:00",
|
|
||||||
progress: 0,
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
type: "Database Optimization",
|
|
||||||
description: "Optimize database indexes and cleanup old records",
|
|
||||||
status: "completed",
|
|
||||||
priority: "low",
|
|
||||||
createdAt: "2024-01-15 09:00:00",
|
|
||||||
completedAt: "2024-01-15 09:45:00",
|
|
||||||
progress: 100,
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
type: "Backup Creation",
|
|
||||||
description: "Create system backup",
|
|
||||||
status: "failed",
|
|
||||||
priority: "high",
|
|
||||||
createdAt: "2024-01-15 08:00:00",
|
|
||||||
errorMessage: "Insufficient disk space",
|
|
||||||
progress: 45,
|
|
||||||
retryCount: 2,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const agentQueueData = [
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
hostname: "web-server-01",
|
|
||||||
ip: "192.168.1.100",
|
|
||||||
type: "Agent Update Collection",
|
|
||||||
description: "Agent v1.2.7 → v1.2.8",
|
|
||||||
status: "pending",
|
|
||||||
priority: "medium",
|
|
||||||
lastCommunication: "2024-01-15 10:00:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 11:00:00",
|
|
||||||
currentVersion: "1.2.7",
|
|
||||||
targetVersion: "1.2.8",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 5,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
hostname: "db-server-02",
|
|
||||||
ip: "192.168.1.101",
|
|
||||||
type: "Data Collection",
|
|
||||||
description: "Collect package and system information",
|
|
||||||
status: "running",
|
|
||||||
priority: "high",
|
|
||||||
lastCommunication: "2024-01-15 10:15:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 11:15:00",
|
|
||||||
currentVersion: "1.2.8",
|
|
||||||
targetVersion: "1.2.8",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
hostname: "app-server-03",
|
|
||||||
ip: "192.168.1.102",
|
|
||||||
type: "Agent Update Collection",
|
|
||||||
description: "Agent v1.2.6 → v1.2.8",
|
|
||||||
status: "completed",
|
|
||||||
priority: "low",
|
|
||||||
lastCommunication: "2024-01-15 09:30:00",
|
|
||||||
completedAt: "2024-01-15 09:45:00",
|
|
||||||
currentVersion: "1.2.8",
|
|
||||||
targetVersion: "1.2.8",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 5,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
hostname: "test-server-04",
|
|
||||||
ip: "192.168.1.103",
|
|
||||||
type: "Data Collection",
|
|
||||||
description: "Collect package and system information",
|
|
||||||
status: "failed",
|
|
||||||
priority: "medium",
|
|
||||||
lastCommunication: "2024-01-15 08:00:00",
|
|
||||||
errorMessage: "Connection timeout",
|
|
||||||
retryCount: 3,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const patchQueueData = [
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
hostname: "web-server-01",
|
|
||||||
ip: "192.168.1.100",
|
|
||||||
packages: ["nginx", "openssl", "curl"],
|
|
||||||
type: "Security Updates",
|
|
||||||
description: "Apply critical security patches",
|
|
||||||
status: "pending",
|
|
||||||
priority: "high",
|
|
||||||
scheduledFor: "2024-01-15 19:00:00",
|
|
||||||
lastCommunication: "2024-01-15 18:00:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 19:00:00",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
hostname: "db-server-02",
|
|
||||||
ip: "192.168.1.101",
|
|
||||||
packages: ["postgresql", "python3"],
|
|
||||||
type: "Feature Updates",
|
|
||||||
description: "Update database and Python packages",
|
|
||||||
status: "running",
|
|
||||||
priority: "medium",
|
|
||||||
scheduledFor: "2024-01-15 20:00:00",
|
|
||||||
lastCommunication: "2024-01-15 19:15:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 20:15:00",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
hostname: "app-server-03",
|
|
||||||
ip: "192.168.1.102",
|
|
||||||
packages: ["nodejs", "npm"],
|
|
||||||
type: "Maintenance Updates",
|
|
||||||
description: "Update Node.js and npm packages",
|
|
||||||
status: "completed",
|
|
||||||
priority: "low",
|
|
||||||
scheduledFor: "2024-01-15 18:30:00",
|
|
||||||
completedAt: "2024-01-15 18:45:00",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
hostname: "test-server-04",
|
|
||||||
ip: "192.168.1.103",
|
|
||||||
packages: ["docker", "docker-compose"],
|
|
||||||
type: "Security Updates",
|
|
||||||
description: "Update Docker components",
|
|
||||||
status: "failed",
|
|
||||||
priority: "high",
|
|
||||||
scheduledFor: "2024-01-15 17:00:00",
|
|
||||||
errorMessage: "Package conflicts detected",
|
|
||||||
retryCount: 2,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const getStatusIcon = (status) => {
|
|
||||||
switch (status) {
|
|
||||||
case "running":
|
|
||||||
return <RefreshCw className="h-4 w-4 text-blue-500 animate-spin" />;
|
|
||||||
case "completed":
|
|
||||||
return <CheckCircle className="h-4 w-4 text-green-500" />;
|
|
||||||
case "failed":
|
|
||||||
return <XCircle className="h-4 w-4 text-red-500" />;
|
|
||||||
case "pending":
|
|
||||||
return <Clock className="h-4 w-4 text-yellow-500" />;
|
|
||||||
case "paused":
|
|
||||||
return <Pause className="h-4 w-4 text-gray-500" />;
|
|
||||||
default:
|
|
||||||
return <AlertCircle className="h-4 w-4 text-gray-500" />;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getStatusColor = (status) => {
|
|
||||||
switch (status) {
|
|
||||||
case "running":
|
|
||||||
return "bg-blue-100 text-blue-800";
|
|
||||||
case "completed":
|
|
||||||
return "bg-green-100 text-green-800";
|
|
||||||
case "failed":
|
|
||||||
return "bg-red-100 text-red-800";
|
|
||||||
case "pending":
|
|
||||||
return "bg-yellow-100 text-yellow-800";
|
|
||||||
case "paused":
|
|
||||||
return "bg-gray-100 text-gray-800";
|
|
||||||
default:
|
|
||||||
return "bg-gray-100 text-gray-800";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getPriorityColor = (priority) => {
|
|
||||||
switch (priority) {
|
|
||||||
case "high":
|
|
||||||
return "bg-red-100 text-red-800";
|
|
||||||
case "medium":
|
|
||||||
return "bg-yellow-100 text-yellow-800";
|
|
||||||
case "low":
|
|
||||||
return "bg-green-100 text-green-800";
|
|
||||||
default:
|
|
||||||
return "bg-gray-100 text-gray-800";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const filteredData = (data) => {
|
|
||||||
let filtered = data;
|
|
||||||
|
|
||||||
if (filterStatus !== "all") {
|
|
||||||
filtered = filtered.filter((item) => item.status === filterStatus);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (searchQuery) {
|
|
||||||
filtered = filtered.filter(
|
|
||||||
(item) =>
|
|
||||||
item.hostname?.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
|
||||||
item.type?.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
|
||||||
item.description?.toLowerCase().includes(searchQuery.toLowerCase()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return filtered;
|
|
||||||
};
|
|
||||||
|
|
||||||
const tabs = [
|
|
||||||
{
|
|
||||||
id: "server",
|
|
||||||
name: "Server Queue",
|
|
||||||
icon: Server,
|
|
||||||
data: serverQueueData,
|
|
||||||
count: serverQueueData.length,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: "agent",
|
|
||||||
name: "Agent Queue",
|
|
||||||
icon: Download,
|
|
||||||
data: agentQueueData,
|
|
||||||
count: agentQueueData.length,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: "patch",
|
|
||||||
name: "Patch Management",
|
|
||||||
icon: Package,
|
|
||||||
data: patchQueueData,
|
|
||||||
count: patchQueueData.length,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const renderServerQueueItem = (item) => (
|
|
||||||
<div
|
|
||||||
key={item.id}
|
|
||||||
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
|
|
||||||
>
|
|
||||||
<div className="flex items-start justify-between">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="flex items-center gap-3 mb-2">
|
|
||||||
{getStatusIcon(item.status)}
|
|
||||||
<h3 className="font-medium text-gray-900 dark:text-white">
|
|
||||||
{item.type}
|
|
||||||
</h3>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
|
|
||||||
>
|
|
||||||
{item.status}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
|
|
||||||
>
|
|
||||||
{item.priority}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-3">
|
|
||||||
{item.description}
|
|
||||||
</p>
|
|
||||||
|
|
||||||
{item.status === "running" && (
|
|
||||||
<div className="mb-3">
|
|
||||||
<div className="flex justify-between text-xs text-gray-500 mb-1">
|
|
||||||
<span>Progress</span>
|
|
||||||
<span>{item.progress}%</span>
|
|
||||||
</div>
|
|
||||||
<div className="w-full bg-gray-200 rounded-full h-2">
|
|
||||||
<div
|
|
||||||
className="bg-blue-600 h-2 rounded-full transition-all duration-300"
|
|
||||||
style={{ width: `${item.progress}%` }}
|
|
||||||
></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Created:</span> {item.createdAt}
|
|
||||||
</div>
|
|
||||||
{item.status === "running" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">ETA:</span>{" "}
|
|
||||||
{item.estimatedCompletion}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "completed" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Completed:</span>{" "}
|
|
||||||
{item.completedAt}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<div className="col-span-2">
|
|
||||||
<span className="font-medium">Error:</span> {item.errorMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{item.retryCount > 0 && (
|
|
||||||
<div className="mt-2 text-xs text-orange-600">
|
|
||||||
Retries: {item.retryCount}/{item.maxRetries}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex gap-2 ml-4">
|
|
||||||
{item.status === "running" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Pause className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
{item.status === "paused" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Play className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
const renderAgentQueueItem = (item) => (
|
|
||||||
<div
|
|
||||||
key={item.id}
|
|
||||||
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
|
|
||||||
>
|
|
||||||
<div className="flex items-start justify-between">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="flex items-center gap-3 mb-2">
|
|
||||||
{getStatusIcon(item.status)}
|
|
||||||
<h3 className="font-medium text-gray-900 dark:text-white">
|
|
||||||
{item.hostname}
|
|
||||||
</h3>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
|
|
||||||
>
|
|
||||||
{item.status}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
|
|
||||||
>
|
|
||||||
{item.priority}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-2">
|
|
||||||
{item.type}
|
|
||||||
</p>
|
|
||||||
<p className="text-sm text-gray-500 mb-3">{item.description}</p>
|
|
||||||
|
|
||||||
{item.type === "Agent Update Collection" && (
|
|
||||||
<div className="mb-3 p-2 bg-gray-50 dark:bg-gray-700 rounded">
|
|
||||||
<div className="text-xs text-gray-600 dark:text-gray-400">
|
|
||||||
<span className="font-medium">Version:</span>{" "}
|
|
||||||
{item.currentVersion} → {item.targetVersion}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">IP:</span> {item.ip}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Last Comm:</span>{" "}
|
|
||||||
{item.lastCommunication}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Next Expected:</span>{" "}
|
|
||||||
{item.nextExpectedCommunication}
|
|
||||||
</div>
|
|
||||||
{item.status === "completed" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Completed:</span>{" "}
|
|
||||||
{item.completedAt}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<div className="col-span-2">
|
|
||||||
<span className="font-medium">Error:</span> {item.errorMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{item.retryCount > 0 && (
|
|
||||||
<div className="mt-2 text-xs text-orange-600">
|
|
||||||
Retries: {item.retryCount}/{item.maxRetries}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex gap-2 ml-4">
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
const renderPatchQueueItem = (item) => (
|
|
||||||
<div
|
|
||||||
key={item.id}
|
|
||||||
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
|
|
||||||
>
|
|
||||||
<div className="flex items-start justify-between">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="flex items-center gap-3 mb-2">
|
|
||||||
{getStatusIcon(item.status)}
|
|
||||||
<h3 className="font-medium text-gray-900 dark:text-white">
|
|
||||||
{item.hostname}
|
|
||||||
</h3>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
|
|
||||||
>
|
|
||||||
{item.status}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
|
|
||||||
>
|
|
||||||
{item.priority}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-2">
|
|
||||||
{item.type}
|
|
||||||
</p>
|
|
||||||
<p className="text-sm text-gray-500 mb-3">{item.description}</p>
|
|
||||||
|
|
||||||
<div className="mb-3">
|
|
||||||
<div className="text-xs text-gray-600 dark:text-gray-400 mb-1">
|
|
||||||
<span className="font-medium">Packages:</span>
|
|
||||||
</div>
|
|
||||||
<div className="flex flex-wrap gap-1">
|
|
||||||
{item.packages.map((pkg) => (
|
|
||||||
<span
|
|
||||||
key={pkg}
|
|
||||||
className="px-2 py-1 bg-blue-100 text-blue-800 text-xs rounded"
|
|
||||||
>
|
|
||||||
{pkg}
|
|
||||||
</span>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">IP:</span> {item.ip}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Scheduled:</span>{" "}
|
|
||||||
{item.scheduledFor}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Last Comm:</span>{" "}
|
|
||||||
{item.lastCommunication}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Next Expected:</span>{" "}
|
|
||||||
{item.nextExpectedCommunication}
|
|
||||||
</div>
|
|
||||||
{item.status === "completed" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Completed:</span>{" "}
|
|
||||||
{item.completedAt}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<div className="col-span-2">
|
|
||||||
<span className="font-medium">Error:</span> {item.errorMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{item.retryCount > 0 && (
|
|
||||||
<div className="mt-2 text-xs text-orange-600">
|
|
||||||
Retries: {item.retryCount}/{item.maxRetries}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex gap-2 ml-4">
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
const currentTab = tabs.find((tab) => tab.id === activeTab);
|
|
||||||
const filteredItems = filteredData(currentTab?.data || []);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="min-h-screen bg-gray-50 dark:bg-gray-900">
|
|
||||||
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
|
||||||
{/* Header */}
|
|
||||||
<div className="mb-8">
|
|
||||||
<h1 className="text-3xl font-bold text-gray-900 dark:text-white mb-2">
|
|
||||||
Queue Management
|
|
||||||
</h1>
|
|
||||||
<p className="text-gray-600 dark:text-gray-400">
|
|
||||||
Monitor and manage server operations, agent communications, and
|
|
||||||
patch deployments
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Tabs */}
|
|
||||||
<div className="mb-6">
|
|
||||||
<div className="border-b border-gray-200 dark:border-gray-700">
|
|
||||||
<nav className="-mb-px flex space-x-8">
|
|
||||||
{tabs.map((tab) => (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
key={tab.id}
|
|
||||||
onClick={() => setActiveTab(tab.id)}
|
|
||||||
className={`py-2 px-1 border-b-2 font-medium text-sm flex items-center gap-2 ${
|
|
||||||
activeTab === tab.id
|
|
||||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
|
||||||
: "border-transparent text-gray-500 hover:text-gray-700 hover:border-gray-300 dark:text-gray-400 dark:hover:text-gray-300"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
<tab.icon className="h-4 w-4" />
|
|
||||||
{tab.name}
|
|
||||||
<span className="bg-gray-100 dark:bg-gray-700 text-gray-600 dark:text-gray-300 px-2 py-0.5 rounded-full text-xs">
|
|
||||||
{tab.count}
|
|
||||||
</span>
|
|
||||||
</button>
|
|
||||||
))}
|
|
||||||
</nav>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Filters and Search */}
|
|
||||||
<div className="mb-6 flex flex-col sm:flex-row gap-4">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="relative">
|
|
||||||
<Search className="absolute left-3 top-1/2 transform -translate-y-1/2 text-gray-400 h-4 w-4" />
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
placeholder="Search queues..."
|
|
||||||
value={searchQuery}
|
|
||||||
onChange={(e) => setSearchQuery(e.target.value)}
|
|
||||||
className="w-full pl-10 pr-4 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<select
|
|
||||||
value={filterStatus}
|
|
||||||
onChange={(e) => setFilterStatus(e.target.value)}
|
|
||||||
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
|
||||||
>
|
|
||||||
<option value="all">All Status</option>
|
|
||||||
<option value="pending">Pending</option>
|
|
||||||
<option value="running">Running</option>
|
|
||||||
<option value="completed">Completed</option>
|
|
||||||
<option value="failed">Failed</option>
|
|
||||||
<option value="paused">Paused</option>
|
|
||||||
</select>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white hover:bg-gray-50 dark:hover:bg-gray-700 flex items-center gap-2"
|
|
||||||
>
|
|
||||||
<Filter className="h-4 w-4" />
|
|
||||||
More Filters
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Queue Items */}
|
|
||||||
<div className="space-y-4">
|
|
||||||
{filteredItems.length === 0 ? (
|
|
||||||
<div className="text-center py-12">
|
|
||||||
<Activity className="mx-auto h-12 w-12 text-gray-400" />
|
|
||||||
<h3 className="mt-2 text-sm font-medium text-gray-900 dark:text-white">
|
|
||||||
No queue items found
|
|
||||||
</h3>
|
|
||||||
<p className="mt-1 text-sm text-gray-500 dark:text-gray-400">
|
|
||||||
{searchQuery
|
|
||||||
? "Try adjusting your search criteria"
|
|
||||||
: "No items match the current filters"}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
filteredItems.map((item) => {
|
|
||||||
switch (activeTab) {
|
|
||||||
case "server":
|
|
||||||
return renderServerQueueItem(item);
|
|
||||||
case "agent":
|
|
||||||
return renderAgentQueueItem(item);
|
|
||||||
case "patch":
|
|
||||||
return renderPatchQueueItem(item);
|
|
||||||
default:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default Queue;
|
|
||||||
389
frontend/src/pages/docker/ContainerDetail.jsx
Normal file
389
frontend/src/pages/docker/ContainerDetail.jsx
Normal file
@@ -0,0 +1,389 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
CheckCircle,
|
||||||
|
Container,
|
||||||
|
ExternalLink,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api, { formatRelativeTime } from "../../utils/api";
|
||||||
|
|
||||||
|
const ContainerDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "container", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/containers/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const container = data?.container;
|
||||||
|
const similarContainers = data?.similarContainers || [];
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !container) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Container not found
|
||||||
|
</h3>
|
||||||
|
<p className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
|
The container you're looking for doesn't exist or has been
|
||||||
|
removed.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const getStatusBadge = (status) => {
|
||||||
|
const statusClasses = {
|
||||||
|
running:
|
||||||
|
"bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200",
|
||||||
|
exited: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200",
|
||||||
|
paused:
|
||||||
|
"bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200",
|
||||||
|
restarting:
|
||||||
|
"bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200",
|
||||||
|
};
|
||||||
|
return (
|
||||||
|
<span
|
||||||
|
className={`inline-flex items-center px-3 py-1 rounded-full text-sm font-medium ${
|
||||||
|
statusClasses[status] ||
|
||||||
|
"bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{status}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Container className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{container.name}
|
||||||
|
</h1>
|
||||||
|
{getStatusBadge(container.status)}
|
||||||
|
</div>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
Container ID: {container.container_id.substring(0, 12)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
{/* Update Status Card */}
|
||||||
|
{container.docker_images?.docker_image_updates &&
|
||||||
|
container.docker_images.docker_image_updates.length > 0 ? (
|
||||||
|
<div className="card p-4 bg-yellow-50 dark:bg-yellow-900/20 border-yellow-200 dark:border-yellow-800">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-yellow-600 dark:text-yellow-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-yellow-200">
|
||||||
|
Update Available
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-yellow-100 truncate">
|
||||||
|
{
|
||||||
|
container.docker_images.docker_image_updates[0]
|
||||||
|
.available_tag
|
||||||
|
}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="card p-4 bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<CheckCircle className="h-5 w-5 text-green-600 dark:text-green-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-green-200">
|
||||||
|
Update Status
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-green-100">
|
||||||
|
Up to date
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Server className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">Host</p>
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${container.host?.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 truncate block"
|
||||||
|
>
|
||||||
|
{container.host?.friendly_name || container.host?.hostname}
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
State
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{container.state || container.status}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<RefreshCw className="h-5 w-5 text-secondary-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Last Checked
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.last_checked)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Container and Image Information - Side by Side */}
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{/* Container Details */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Container Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Container ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono break-all">
|
||||||
|
{container.container_id}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Image Tag
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{container.image_tag}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.created_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{container.started_at && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Started
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.started_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{container.ports && Object.keys(container.ports).length > 0 && (
|
||||||
|
<div className="sm:col-span-2">
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Port Mappings
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
{Object.entries(container.ports).map(([key, value]) => (
|
||||||
|
<span
|
||||||
|
key={key}
|
||||||
|
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200"
|
||||||
|
>
|
||||||
|
{key} → {value}
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Image Information */}
|
||||||
|
{container.docker_images && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Image Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Repository
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
<Link
|
||||||
|
to={`/docker/images/${container.docker_images.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
{container.docker_images.repository}
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Tag
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{container.docker_images.tag}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Source
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{container.docker_images.source}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{container.docker_images.size_bytes && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Size
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{(
|
||||||
|
Number(container.docker_images.size_bytes) /
|
||||||
|
1024 /
|
||||||
|
1024
|
||||||
|
).toFixed(2)}{" "}
|
||||||
|
MB
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Image ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-xs text-secondary-900 dark:text-white font-mono break-all">
|
||||||
|
{container.docker_images.image_id?.substring(0, 12)}...
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.docker_images.created_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Similar Containers */}
|
||||||
|
{similarContainers.length > 0 && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Similar Containers (Same Image)
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<ul className="divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{similarContainers.map((similar) => (
|
||||||
|
<li
|
||||||
|
key={similar.id}
|
||||||
|
className="py-4 flex items-center justify-between"
|
||||||
|
>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Container className="h-5 w-5 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${similar.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{similar.name}
|
||||||
|
</Link>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-secondary-400">
|
||||||
|
{similar.status}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ContainerDetail;
|
||||||
354
frontend/src/pages/docker/HostDetail.jsx
Normal file
354
frontend/src/pages/docker/HostDetail.jsx
Normal file
@@ -0,0 +1,354 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
Container,
|
||||||
|
ExternalLink,
|
||||||
|
Package,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api from "../../utils/api";
|
||||||
|
|
||||||
|
const HostDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "host", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/hosts/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const host = data?.host;
|
||||||
|
const containers = data?.containers || [];
|
||||||
|
const images = data?.images || [];
|
||||||
|
const stats = data?.stats;
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !host) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Host not found
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Server className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{host.friendly_name || host.hostname}
|
||||||
|
</h1>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
{host.ip}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${id}`}
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
View Full Host Details
|
||||||
|
<ExternalLink className="ml-2 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Total Containers
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.totalContainers || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Running
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.runningContainers || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-red-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Stopped
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.stoppedContainers || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Package className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Images
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.totalImages || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Host Information */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Host Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5 space-y-6">
|
||||||
|
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Friendly Name
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.friendly_name}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Hostname
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.hostname}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
IP Address
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.ip}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
OS
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.os_type} {host.os_version}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Containers */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Containers ({containers.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Container Name
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Image
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Status
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{containers.map((container) => (
|
||||||
|
<tr key={container.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{container.name}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{container.image_name}:{container.image_tag}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
{container.status}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Images */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Images ({images.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Repository
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Tag
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Source
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{images.map((image) => (
|
||||||
|
<tr key={image.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/docker/images/${image.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{image.repository}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
{image.tag}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{image.source}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/docker/images/${image.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default HostDetail;
|
||||||
439
frontend/src/pages/docker/ImageDetail.jsx
Normal file
439
frontend/src/pages/docker/ImageDetail.jsx
Normal file
@@ -0,0 +1,439 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
Container,
|
||||||
|
ExternalLink,
|
||||||
|
Package,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
Shield,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api, { formatRelativeTime } from "../../utils/api";
|
||||||
|
|
||||||
|
const ImageDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "image", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/images/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const image = data?.image;
|
||||||
|
const hosts = data?.hosts || [];
|
||||||
|
const containers = image?.docker_containers || [];
|
||||||
|
const updates = image?.docker_image_updates || [];
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !image) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Image not found
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Package className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{image.repository}:{image.tag}
|
||||||
|
</h1>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
Image ID: {image.image_id.substring(0, 12)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Containers
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{containers.length}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Server className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Hosts
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{hosts.length}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Package className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">Size</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{image.size_bytes ? (
|
||||||
|
<>{(Number(image.size_bytes) / 1024 / 1024).toFixed(0)} MB</>
|
||||||
|
) : (
|
||||||
|
"N/A"
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Updates
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{updates.length}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Available Updates with Digest Comparison */}
|
||||||
|
{updates.length > 0 && (
|
||||||
|
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-yellow-400" />
|
||||||
|
<div className="ml-3 flex-1">
|
||||||
|
<h3 className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
|
||||||
|
Updates Available
|
||||||
|
</h3>
|
||||||
|
<div className="mt-2 space-y-3">
|
||||||
|
{updates.map((update) => {
|
||||||
|
let digestInfo = null;
|
||||||
|
try {
|
||||||
|
if (update.changelog_url) {
|
||||||
|
digestInfo = JSON.parse(update.changelog_url);
|
||||||
|
}
|
||||||
|
} catch (_e) {
|
||||||
|
// Ignore parse errors
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={update.id}
|
||||||
|
className="bg-white dark:bg-secondary-800 rounded-lg p-3 border border-yellow-200 dark:border-yellow-700"
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between mb-2">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{update.is_security_update && (
|
||||||
|
<Shield className="h-4 w-4 text-red-500" />
|
||||||
|
)}
|
||||||
|
<span className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
|
||||||
|
New version available:{" "}
|
||||||
|
<span className="font-semibold">
|
||||||
|
{update.available_tag}
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{update.is_security_update && (
|
||||||
|
<span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-medium bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200">
|
||||||
|
Security
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{digestInfo &&
|
||||||
|
digestInfo.method === "digest_comparison" && (
|
||||||
|
<div className="mt-2 pt-2 border-t border-yellow-200 dark:border-yellow-700">
|
||||||
|
<p className="text-xs text-secondary-600 dark:text-secondary-400 mb-1">
|
||||||
|
Detected via digest comparison:
|
||||||
|
</p>
|
||||||
|
<div className="font-mono text-xs space-y-1">
|
||||||
|
<div className="text-red-600 dark:text-red-400">
|
||||||
|
<span className="font-bold">- Current: </span>
|
||||||
|
{digestInfo.current_digest}
|
||||||
|
</div>
|
||||||
|
<div className="text-green-600 dark:text-green-400">
|
||||||
|
<span className="font-bold">+ Available: </span>
|
||||||
|
{digestInfo.available_digest}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Image Information */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Image Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5 space-y-6">
|
||||||
|
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Repository
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.repository}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Tag
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.tag}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Source
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.source}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.created_at
|
||||||
|
? formatRelativeTime(image.created_at)
|
||||||
|
: "Unknown"}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Image ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white">
|
||||||
|
{image.image_id}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Last Checked
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.last_checked
|
||||||
|
? formatRelativeTime(image.last_checked)
|
||||||
|
: "Never"}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{image.digest && (
|
||||||
|
<div className="sm:col-span-2">
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Digest
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white break-all">
|
||||||
|
{image.digest}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Containers using this image */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Containers ({containers.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Container Name
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Status
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Host
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{containers.map((container) => (
|
||||||
|
<tr key={container.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{container.name}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
{container.status}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{container.host_id}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Hosts using this image */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Hosts ({hosts.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Host Name
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
IP Address
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{hosts.map((host) => (
|
||||||
|
<tr key={host.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${host.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{host.friendly_name || host.hostname}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{host.ip}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${host.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ImageDetail;
|
||||||
File diff suppressed because it is too large
Load Diff
907
package-lock.json
generated
907
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon",
|
"name": "patchmon",
|
||||||
"version": "1.2.8",
|
"version": "1.2.9",
|
||||||
"description": "Linux Patch Monitoring System",
|
"description": "Linux Patch Monitoring System",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
|
|||||||
14
setup.sh
14
setup.sh
@@ -34,7 +34,7 @@ BLUE='\033[0;34m'
|
|||||||
NC='\033[0m' # No Color
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
# Global variables
|
# Global variables
|
||||||
SCRIPT_VERSION="self-hosting-install.sh v1.2.8-selfhost-2025-10-10-6"
|
SCRIPT_VERSION="self-hosting-install.sh v1.2.9-selfhost-2025-10-11-1"
|
||||||
DEFAULT_GITHUB_REPO="https://github.com/PatchMon/PatchMon.git"
|
DEFAULT_GITHUB_REPO="https://github.com/PatchMon/PatchMon.git"
|
||||||
FQDN=""
|
FQDN=""
|
||||||
CUSTOM_FQDN=""
|
CUSTOM_FQDN=""
|
||||||
@@ -867,7 +867,7 @@ EOF
|
|||||||
cat > frontend/.env << EOF
|
cat > frontend/.env << EOF
|
||||||
VITE_API_URL=$SERVER_PROTOCOL_SEL://$FQDN/api/v1
|
VITE_API_URL=$SERVER_PROTOCOL_SEL://$FQDN/api/v1
|
||||||
VITE_APP_NAME=PatchMon
|
VITE_APP_NAME=PatchMon
|
||||||
VITE_APP_VERSION=1.2.8
|
VITE_APP_VERSION=1.2.9
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
print_status "Environment files created"
|
print_status "Environment files created"
|
||||||
@@ -1239,7 +1239,7 @@ create_agent_version() {
|
|||||||
|
|
||||||
# Priority 2: Use fallback version if not found
|
# Priority 2: Use fallback version if not found
|
||||||
if [ "$current_version" = "N/A" ] || [ -z "$current_version" ]; then
|
if [ "$current_version" = "N/A" ] || [ -z "$current_version" ]; then
|
||||||
current_version="1.2.8"
|
current_version="1.2.9"
|
||||||
print_warning "Could not determine version, using fallback: $current_version"
|
print_warning "Could not determine version, using fallback: $current_version"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -1769,6 +1769,14 @@ update_installation() {
|
|||||||
print_info "Pulling latest code from branch: $DEPLOYMENT_BRANCH"
|
print_info "Pulling latest code from branch: $DEPLOYMENT_BRANCH"
|
||||||
cd "$instance_dir"
|
cd "$instance_dir"
|
||||||
|
|
||||||
|
# Clean up any untracked files that might conflict with incoming changes
|
||||||
|
print_info "Cleaning up untracked files to prevent merge conflicts..."
|
||||||
|
git clean -fd
|
||||||
|
|
||||||
|
# Reset any local changes to ensure clean state
|
||||||
|
print_info "Resetting local changes to ensure clean state..."
|
||||||
|
git reset --hard HEAD
|
||||||
|
|
||||||
# Fetch latest changes
|
# Fetch latest changes
|
||||||
git fetch origin
|
git fetch origin
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user