mirror of
https://github.com/9technologygroup/patchmon.net.git
synced 2025-11-01 04:23:48 +00:00
Compare commits
30 Commits
renovate/p
...
fb0f6ba028
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb0f6ba028 | ||
|
|
c328123bd3 | ||
|
|
46eb797ac3 | ||
|
|
c43afeb127 | ||
|
|
5b77a1328d | ||
|
|
9a40d5e6ee | ||
|
|
fdd0cfd619 | ||
|
|
de236f9ae2 | ||
|
|
4d5040e0e9 | ||
|
|
28c5310b99 | ||
|
|
a2e9743da6 | ||
|
|
3863d641fa | ||
|
|
cc8f77a946 | ||
|
|
36455e2bfd | ||
|
|
af65d38cad | ||
|
|
29266b6d77 | ||
|
|
f96e468482 | ||
|
|
9f8c88badf | ||
|
|
7985a225d7 | ||
|
|
8c538bd99c | ||
|
|
623bf5e2c8 | ||
|
|
ed8cc81b89 | ||
|
|
5c4353a688 | ||
|
|
6ebcdd57d5 | ||
|
|
a3d0dfd665 | ||
|
|
d99ded6d65 | ||
|
|
1ea96b6172 | ||
|
|
1e5ee66825 | ||
|
|
88130797e4 | ||
|
|
0ad1a96871 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -139,6 +139,7 @@ playwright-report/
|
|||||||
test-results.xml
|
test-results.xml
|
||||||
test_*.sh
|
test_*.sh
|
||||||
test-*.sh
|
test-*.sh
|
||||||
|
*.code-workspace
|
||||||
|
|
||||||
# Package manager lock files (uncomment if you want to ignore them)
|
# Package manager lock files (uncomment if you want to ignore them)
|
||||||
# package-lock.json
|
# package-lock.json
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ PatchMon provides centralized patch management across diverse server environment
|
|||||||
|
|
||||||
### API & Integrations
|
### API & Integrations
|
||||||
- REST API under `/api/v1` with JWT auth
|
- REST API under `/api/v1` with JWT auth
|
||||||
- **Proxmox LXC Auto-Enrollment** - Automatically discover and enroll LXC containers from Proxmox hosts ([Documentation](PROXMOX_AUTO_ENROLLMENT.md))
|
- Proxmox LXC Auto-Enrollment - Automatically discover and enroll LXC containers from Proxmox hosts
|
||||||
|
|
||||||
### Security
|
### Security
|
||||||
- Rate limiting for general, auth, and agent endpoints
|
- Rate limiting for general, auth, and agent endpoints
|
||||||
@@ -85,11 +85,16 @@ apt-get upgrade -y
|
|||||||
apt install curl -y
|
apt install curl -y
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Script
|
#### Install Script
|
||||||
```bash
|
```bash
|
||||||
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh
|
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Update Script (--update flag)
|
||||||
|
```bash
|
||||||
|
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh --update
|
||||||
|
```
|
||||||
|
|
||||||
#### Minimum specs for building : #####
|
#### Minimum specs for building : #####
|
||||||
CPU : 2 vCPU
|
CPU : 2 vCPU
|
||||||
RAM : 2GB
|
RAM : 2GB
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# PatchMon Agent Script v1.2.8
|
# PatchMon Agent Script v1.2.9
|
||||||
# This script sends package update information to the PatchMon server using API credentials
|
# This script sends package update information to the PatchMon server using API credentials
|
||||||
|
|
||||||
# Configuration
|
# Configuration
|
||||||
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
|
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
|
||||||
API_VERSION="v1"
|
API_VERSION="v1"
|
||||||
AGENT_VERSION="1.2.8"
|
AGENT_VERSION="1.2.9"
|
||||||
CONFIG_FILE="/etc/patchmon/agent.conf"
|
CONFIG_FILE="/etc/patchmon/agent.conf"
|
||||||
CREDENTIALS_FILE="/etc/patchmon/credentials"
|
CREDENTIALS_FILE="/etc/patchmon/credentials"
|
||||||
LOG_FILE="/var/log/patchmon-agent.log"
|
LOG_FILE="/var/log/patchmon-agent.log"
|
||||||
@@ -38,21 +38,21 @@ error() {
|
|||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
# Info logging (cleaner output - only stdout, no duplicate logging)
|
# Info logging (cleaner output - only stderr, no duplicate logging)
|
||||||
info() {
|
info() {
|
||||||
echo -e "${BLUE}ℹ️ $1${NC}"
|
echo -e "${BLUE}ℹ️ $1${NC}" >&2
|
||||||
log "INFO: $1"
|
log "INFO: $1"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Success logging (cleaner output - only stdout, no duplicate logging)
|
# Success logging (cleaner output - only stderr, no duplicate logging)
|
||||||
success() {
|
success() {
|
||||||
echo -e "${GREEN}✅ $1${NC}"
|
echo -e "${GREEN}✅ $1${NC}" >&2
|
||||||
log "SUCCESS: $1"
|
log "SUCCESS: $1"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Warning logging (cleaner output - only stdout, no duplicate logging)
|
# Warning logging (cleaner output - only stderr, no duplicate logging)
|
||||||
warning() {
|
warning() {
|
||||||
echo -e "${YELLOW}⚠️ $1${NC}"
|
echo -e "${YELLOW}⚠️ $1${NC}" >&2
|
||||||
log "WARNING: $1"
|
log "WARNING: $1"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -709,6 +709,135 @@ get_package_info() {
|
|||||||
echo "$packages_json"
|
echo "$packages_json"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Check and handle APT locks
|
||||||
|
handle_apt_locks() {
|
||||||
|
local interactive=${1:-false} # First parameter indicates if running interactively
|
||||||
|
|
||||||
|
local lock_files=(
|
||||||
|
"/var/lib/dpkg/lock"
|
||||||
|
"/var/lib/dpkg/lock-frontend"
|
||||||
|
"/var/lib/apt/lists/lock"
|
||||||
|
"/var/cache/apt/archives/lock"
|
||||||
|
)
|
||||||
|
|
||||||
|
local processes_found=false
|
||||||
|
local hung_processes=()
|
||||||
|
|
||||||
|
# Check for running APT processes
|
||||||
|
if pgrep -x "apt-get|apt|aptitude|dpkg|unattended-upgr" > /dev/null 2>&1; then
|
||||||
|
processes_found=true
|
||||||
|
info "Found running package management processes:"
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
# Get process info with ACTUAL elapsed time (not CPU time)
|
||||||
|
# Using ps -eo format to get real elapsed time
|
||||||
|
while IFS= read -r line; do
|
||||||
|
[[ -z "$line" ]] && continue
|
||||||
|
|
||||||
|
local pid=$(echo "$line" | awk '{print $1}')
|
||||||
|
local elapsed=$(echo "$line" | awk '{print $2}')
|
||||||
|
local cmd=$(echo "$line" | awk '{for(i=3;i<=NF;i++) printf "%s ", $i; print ""}')
|
||||||
|
|
||||||
|
# Display process info
|
||||||
|
echo " PID $pid: $cmd (running for $elapsed)" >&2
|
||||||
|
|
||||||
|
# Parse elapsed time and convert to seconds
|
||||||
|
# Format can be: MM:SS, HH:MM:SS, DD-HH:MM:SS, or just SS
|
||||||
|
# Use 10# prefix to force base-10 (avoid octal interpretation of leading zeros)
|
||||||
|
local runtime_seconds=0
|
||||||
|
if [[ "$elapsed" =~ ^([0-9]+)-([0-9]+):([0-9]+):([0-9]+)$ ]]; then
|
||||||
|
# Format: DD-HH:MM:SS
|
||||||
|
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 86400 + 10#${BASH_REMATCH[2]} * 3600 + 10#${BASH_REMATCH[3]} * 60 + 10#${BASH_REMATCH[4]} ))
|
||||||
|
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+):([0-9]+)$ ]]; then
|
||||||
|
# Format: HH:MM:SS
|
||||||
|
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 3600 + 10#${BASH_REMATCH[2]} * 60 + 10#${BASH_REMATCH[3]} ))
|
||||||
|
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+)$ ]]; then
|
||||||
|
# Format: MM:SS
|
||||||
|
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 60 + 10#${BASH_REMATCH[2]} ))
|
||||||
|
elif [[ "$elapsed" =~ ^([0-9]+)$ ]]; then
|
||||||
|
# Format: just seconds
|
||||||
|
runtime_seconds=$((10#${BASH_REMATCH[1]}))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Consider process hung if running for more than 5 minutes
|
||||||
|
if [[ $runtime_seconds -gt 300 ]]; then
|
||||||
|
hung_processes+=("$pid:$elapsed:$cmd")
|
||||||
|
fi
|
||||||
|
done < <(ps -eo pid,etime,cmd | grep -E "apt-get|apt[^-]|aptitude|dpkg|unattended-upgr" | grep -v grep | grep -v "ps -eo")
|
||||||
|
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
info "Detected ${#hung_processes[@]} hung process(es), interactive=$interactive"
|
||||||
|
|
||||||
|
# If hung processes found and running interactively, offer to kill them
|
||||||
|
if [[ ${#hung_processes[@]} -gt 0 && "$interactive" == "true" ]]; then
|
||||||
|
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
for process_info in "${hung_processes[@]}"; do
|
||||||
|
IFS=':' read -r pid elapsed cmd <<< "$process_info"
|
||||||
|
echo " PID $pid: $cmd (hung for $elapsed)" >&2
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "" >&2
|
||||||
|
read -p "$(echo -e "${YELLOW}⚠️ Do you want to kill these processes? [y/N]:${NC} ")" -n 1 -r >&2
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
for process_info in "${hung_processes[@]}"; do
|
||||||
|
IFS=':' read -r pid elapsed cmd <<< "$process_info"
|
||||||
|
info "Killing process $pid..."
|
||||||
|
if kill "$pid" 2>/dev/null; then
|
||||||
|
success "Killed process $pid"
|
||||||
|
sleep 1
|
||||||
|
# Check if process is still running
|
||||||
|
if kill -0 "$pid" 2>/dev/null; then
|
||||||
|
warning "Process $pid still running, using SIGKILL..."
|
||||||
|
kill -9 "$pid" 2>/dev/null
|
||||||
|
success "Force killed process $pid"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
warning "Could not kill process $pid (may require sudo)"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Wait a moment for locks to clear
|
||||||
|
sleep 2
|
||||||
|
else
|
||||||
|
info "Skipping process termination"
|
||||||
|
fi
|
||||||
|
elif [[ ${#hung_processes[@]} -gt 0 ]]; then
|
||||||
|
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
|
||||||
|
info "Run this command with sudo and interactively to kill hung processes"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for stale lock files (files that exist but no process is holding them)
|
||||||
|
for lock_file in "${lock_files[@]}"; do
|
||||||
|
if [[ -f "$lock_file" ]]; then
|
||||||
|
# Try to get the PID from the lock file if it exists
|
||||||
|
if lsof "$lock_file" > /dev/null 2>&1; then
|
||||||
|
info "Lock file $lock_file is held by an active process"
|
||||||
|
else
|
||||||
|
warning "Found stale lock file: $lock_file"
|
||||||
|
info "Attempting to remove stale lock..."
|
||||||
|
if rm -f "$lock_file" 2>/dev/null; then
|
||||||
|
success "Removed stale lock: $lock_file"
|
||||||
|
else
|
||||||
|
warning "Could not remove lock (insufficient permissions): $lock_file"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# If processes were found, return failure so caller can wait
|
||||||
|
if [[ "$processes_found" == true ]]; then
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# Get package info for APT-based systems
|
# Get package info for APT-based systems
|
||||||
get_apt_packages() {
|
get_apt_packages() {
|
||||||
local -n packages_ref=$1
|
local -n packages_ref=$1
|
||||||
@@ -725,10 +854,25 @@ get_apt_packages() {
|
|||||||
else
|
else
|
||||||
retry_count=$((retry_count + 1))
|
retry_count=$((retry_count + 1))
|
||||||
if [[ $retry_count -lt $max_retries ]]; then
|
if [[ $retry_count -lt $max_retries ]]; then
|
||||||
warning "APT lock detected, retrying in ${retry_delay} seconds... (attempt $retry_count/$max_retries)"
|
warning "APT lock detected (attempt $retry_count/$max_retries)"
|
||||||
|
|
||||||
|
# On first retry, try to handle locks
|
||||||
|
if [[ $retry_count -eq 1 ]]; then
|
||||||
|
info "Checking for stale APT locks..."
|
||||||
|
# Check if running interactively (stdin is a terminal OR stdout is a terminal)
|
||||||
|
local is_interactive=false
|
||||||
|
if [[ -t 0 ]] || [[ -t 1 ]]; then
|
||||||
|
is_interactive=true
|
||||||
|
fi
|
||||||
|
info "Interactive mode: $is_interactive"
|
||||||
|
handle_apt_locks "$is_interactive"
|
||||||
|
fi
|
||||||
|
|
||||||
|
info "Waiting ${retry_delay} seconds before retry..."
|
||||||
sleep $retry_delay
|
sleep $retry_delay
|
||||||
else
|
else
|
||||||
warning "APT lock persists after $max_retries attempts, continuing without update..."
|
warning "APT lock persists after $max_retries attempts"
|
||||||
|
warning "Continuing without updating package lists (will use cached data)"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
@@ -1564,9 +1708,21 @@ main() {
|
|||||||
"diagnostics")
|
"diagnostics")
|
||||||
show_diagnostics
|
show_diagnostics
|
||||||
;;
|
;;
|
||||||
|
"clear-locks"|"unlock")
|
||||||
|
check_root
|
||||||
|
info "Checking APT locks and hung processes..."
|
||||||
|
echo ""
|
||||||
|
handle_apt_locks true
|
||||||
|
echo ""
|
||||||
|
if [[ $? -eq 0 ]]; then
|
||||||
|
success "No APT locks or processes blocking package management"
|
||||||
|
else
|
||||||
|
info "APT processes are still running - they may be legitimate operations"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
echo "PatchMon Agent v$AGENT_VERSION - API Credential Based"
|
echo "PatchMon Agent v$AGENT_VERSION - API Credential Based"
|
||||||
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|diagnostics}"
|
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|clear-locks|diagnostics}"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Commands:"
|
echo "Commands:"
|
||||||
echo " configure <API_ID> <API_KEY> [SERVER_URL] - Configure API credentials for this host"
|
echo " configure <API_ID> <API_KEY> [SERVER_URL] - Configure API credentials for this host"
|
||||||
@@ -1578,6 +1734,7 @@ main() {
|
|||||||
echo " check-agent-update - Check for agent updates using timestamp comparison"
|
echo " check-agent-update - Check for agent updates using timestamp comparison"
|
||||||
echo " update-agent - Update agent to latest version"
|
echo " update-agent - Update agent to latest version"
|
||||||
echo " update-crontab - Update crontab with current policy"
|
echo " update-crontab - Update crontab with current policy"
|
||||||
|
echo " clear-locks - Check and clear APT locks (interactive)"
|
||||||
echo " diagnostics - Show detailed system diagnostics"
|
echo " diagnostics - Show detailed system diagnostics"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Setup Process:"
|
echo "Setup Process:"
|
||||||
BIN
agents/patchmon-agent-linux-386
Executable file
BIN
agents/patchmon-agent-linux-386
Executable file
Binary file not shown.
BIN
agents/patchmon-agent-linux-amd64
Executable file
BIN
agents/patchmon-agent-linux-amd64
Executable file
Binary file not shown.
BIN
agents/patchmon-agent-linux-arm64
Executable file
BIN
agents/patchmon-agent-linux-arm64
Executable file
Binary file not shown.
496
agents/patchmon-docker-agent.sh
Executable file
496
agents/patchmon-docker-agent.sh
Executable file
@@ -0,0 +1,496 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# PatchMon Docker Agent Script v1.2.9
|
||||||
|
# This script collects Docker container and image information and sends it to PatchMon
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
|
||||||
|
API_VERSION="v1"
|
||||||
|
AGENT_VERSION="1.2.9"
|
||||||
|
CONFIG_FILE="/etc/patchmon/agent.conf"
|
||||||
|
CREDENTIALS_FILE="/etc/patchmon/credentials"
|
||||||
|
LOG_FILE="/var/log/patchmon-docker-agent.log"
|
||||||
|
|
||||||
|
# Curl flags placeholder (replaced by server based on SSL settings)
|
||||||
|
CURL_FLAGS=""
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Logging function
|
||||||
|
log() {
|
||||||
|
if [[ -w "$(dirname "$LOG_FILE")" ]] 2>/dev/null; then
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" >> "$LOG_FILE" 2>/dev/null
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Error handling
|
||||||
|
error() {
|
||||||
|
echo -e "${RED}ERROR: $1${NC}" >&2
|
||||||
|
log "ERROR: $1"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Info logging
|
||||||
|
info() {
|
||||||
|
echo -e "${BLUE}ℹ️ $1${NC}" >&2
|
||||||
|
log "INFO: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Success logging
|
||||||
|
success() {
|
||||||
|
echo -e "${GREEN}✅ $1${NC}" >&2
|
||||||
|
log "SUCCESS: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Warning logging
|
||||||
|
warning() {
|
||||||
|
echo -e "${YELLOW}⚠️ $1${NC}" >&2
|
||||||
|
log "WARNING: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if Docker is installed and running
|
||||||
|
check_docker() {
|
||||||
|
if ! command -v docker &> /dev/null; then
|
||||||
|
error "Docker is not installed on this system"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! docker info &> /dev/null; then
|
||||||
|
error "Docker daemon is not running or you don't have permission to access it. Try running with sudo."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Load credentials
|
||||||
|
load_credentials() {
|
||||||
|
if [[ ! -f "$CREDENTIALS_FILE" ]]; then
|
||||||
|
error "Credentials file not found at $CREDENTIALS_FILE. Please configure the main PatchMon agent first."
|
||||||
|
fi
|
||||||
|
|
||||||
|
source "$CREDENTIALS_FILE"
|
||||||
|
|
||||||
|
if [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
|
||||||
|
error "API credentials not found in $CREDENTIALS_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Use PATCHMON_URL from credentials if available, otherwise use default
|
||||||
|
if [[ -n "$PATCHMON_URL" ]]; then
|
||||||
|
PATCHMON_SERVER="$PATCHMON_URL"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Load configuration
|
||||||
|
load_config() {
|
||||||
|
if [[ -f "$CONFIG_FILE" ]]; then
|
||||||
|
source "$CONFIG_FILE"
|
||||||
|
if [[ -n "$SERVER_URL" ]]; then
|
||||||
|
PATCHMON_SERVER="$SERVER_URL"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Collect Docker containers
|
||||||
|
collect_containers() {
|
||||||
|
info "Collecting Docker container information..."
|
||||||
|
|
||||||
|
local containers_json="["
|
||||||
|
local first=true
|
||||||
|
|
||||||
|
# Get all containers (running and stopped)
|
||||||
|
while IFS='|' read -r container_id name image status state created started ports; do
|
||||||
|
if [[ -z "$container_id" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse image name and tag
|
||||||
|
local image_name="${image%%:*}"
|
||||||
|
local image_tag="${image##*:}"
|
||||||
|
if [[ "$image_tag" == "$image_name" ]]; then
|
||||||
|
image_tag="latest"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Determine image source based on registry
|
||||||
|
local image_source="docker-hub"
|
||||||
|
if [[ "$image_name" == ghcr.io/* ]]; then
|
||||||
|
image_source="github"
|
||||||
|
elif [[ "$image_name" == registry.gitlab.com/* ]]; then
|
||||||
|
image_source="gitlab"
|
||||||
|
elif [[ "$image_name" == *"/"*"/"* ]]; then
|
||||||
|
image_source="private"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get repository name (without registry prefix for common registries)
|
||||||
|
local image_repository="$image_name"
|
||||||
|
image_repository="${image_repository#ghcr.io/}"
|
||||||
|
image_repository="${image_repository#registry.gitlab.com/}"
|
||||||
|
|
||||||
|
# Get image ID
|
||||||
|
local full_image_id=$(docker inspect --format='{{.Image}}' "$container_id" 2>/dev/null || echo "unknown")
|
||||||
|
full_image_id="${full_image_id#sha256:}"
|
||||||
|
|
||||||
|
# Normalize status (extract just the status keyword)
|
||||||
|
local normalized_status="unknown"
|
||||||
|
if [[ "$status" =~ ^Up ]]; then
|
||||||
|
normalized_status="running"
|
||||||
|
elif [[ "$status" =~ ^Exited ]]; then
|
||||||
|
normalized_status="exited"
|
||||||
|
elif [[ "$status" =~ ^Created ]]; then
|
||||||
|
normalized_status="created"
|
||||||
|
elif [[ "$status" =~ ^Restarting ]]; then
|
||||||
|
normalized_status="restarting"
|
||||||
|
elif [[ "$status" =~ ^Paused ]]; then
|
||||||
|
normalized_status="paused"
|
||||||
|
elif [[ "$status" =~ ^Dead ]]; then
|
||||||
|
normalized_status="dead"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse ports
|
||||||
|
local ports_json="null"
|
||||||
|
if [[ -n "$ports" && "$ports" != "null" ]]; then
|
||||||
|
# Convert Docker port format to JSON
|
||||||
|
ports_json=$(echo "$ports" | jq -R -s -c 'split(",") | map(select(length > 0)) | map(split("->") | {(.[0]): .[1]}) | add // {}')
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert dates to ISO 8601 format
|
||||||
|
# If date conversion fails, use null instead of invalid date string
|
||||||
|
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
|
||||||
|
local started_iso="null"
|
||||||
|
if [[ -n "$started" && "$started" != "null" ]]; then
|
||||||
|
started_iso=$(date -d "$started" -Iseconds 2>/dev/null || echo "null")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add comma for JSON array
|
||||||
|
if [[ "$first" == false ]]; then
|
||||||
|
containers_json+=","
|
||||||
|
fi
|
||||||
|
first=false
|
||||||
|
|
||||||
|
# Build JSON object for this container
|
||||||
|
containers_json+="{\"container_id\":\"$container_id\","
|
||||||
|
containers_json+="\"name\":\"$name\","
|
||||||
|
containers_json+="\"image_name\":\"$image_name\","
|
||||||
|
containers_json+="\"image_tag\":\"$image_tag\","
|
||||||
|
containers_json+="\"image_repository\":\"$image_repository\","
|
||||||
|
containers_json+="\"image_source\":\"$image_source\","
|
||||||
|
containers_json+="\"image_id\":\"$full_image_id\","
|
||||||
|
containers_json+="\"status\":\"$normalized_status\","
|
||||||
|
containers_json+="\"state\":\"$state\","
|
||||||
|
containers_json+="\"ports\":$ports_json"
|
||||||
|
|
||||||
|
# Only add created_at if we have a valid date
|
||||||
|
if [[ "$created_iso" != "null" ]]; then
|
||||||
|
containers_json+=",\"created_at\":\"$created_iso\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Only add started_at if we have a valid date
|
||||||
|
if [[ "$started_iso" != "null" ]]; then
|
||||||
|
containers_json+=",\"started_at\":\"$started_iso\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
containers_json+="}"
|
||||||
|
|
||||||
|
done < <(docker ps -a --format '{{.ID}}|{{.Names}}|{{.Image}}|{{.Status}}|{{.State}}|{{.CreatedAt}}|{{.RunningFor}}|{{.Ports}}' 2>/dev/null)
|
||||||
|
|
||||||
|
containers_json+="]"
|
||||||
|
|
||||||
|
echo "$containers_json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Collect Docker images
|
||||||
|
collect_images() {
|
||||||
|
info "Collecting Docker image information..."
|
||||||
|
|
||||||
|
local images_json="["
|
||||||
|
local first=true
|
||||||
|
|
||||||
|
while IFS='|' read -r repository tag image_id created size digest; do
|
||||||
|
if [[ -z "$repository" || "$repository" == "<none>" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up tag
|
||||||
|
if [[ -z "$tag" || "$tag" == "<none>" ]]; then
|
||||||
|
tag="latest"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean image ID
|
||||||
|
image_id="${image_id#sha256:}"
|
||||||
|
|
||||||
|
# Determine source
|
||||||
|
local source="docker-hub"
|
||||||
|
if [[ "$repository" == ghcr.io/* ]]; then
|
||||||
|
source="github"
|
||||||
|
elif [[ "$repository" == registry.gitlab.com/* ]]; then
|
||||||
|
source="gitlab"
|
||||||
|
elif [[ "$repository" == *"/"*"/"* ]]; then
|
||||||
|
source="private"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert size to bytes (approximate)
|
||||||
|
local size_bytes=0
|
||||||
|
if [[ "$size" =~ ([0-9.]+)([KMGT]?B) ]]; then
|
||||||
|
local num="${BASH_REMATCH[1]}"
|
||||||
|
local unit="${BASH_REMATCH[2]}"
|
||||||
|
case "$unit" in
|
||||||
|
KB) size_bytes=$(echo "$num * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
MB) size_bytes=$(echo "$num * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
GB) size_bytes=$(echo "$num * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
TB) size_bytes=$(echo "$num * 1024 * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
B) size_bytes=$(echo "$num" | cut -d. -f1) ;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert created date to ISO 8601
|
||||||
|
# If date conversion fails, use null instead of invalid date string
|
||||||
|
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
|
||||||
|
|
||||||
|
# Add comma for JSON array
|
||||||
|
if [[ "$first" == false ]]; then
|
||||||
|
images_json+=","
|
||||||
|
fi
|
||||||
|
first=false
|
||||||
|
|
||||||
|
# Build JSON object for this image
|
||||||
|
images_json+="{\"repository\":\"$repository\","
|
||||||
|
images_json+="\"tag\":\"$tag\","
|
||||||
|
images_json+="\"image_id\":\"$image_id\","
|
||||||
|
images_json+="\"source\":\"$source\","
|
||||||
|
images_json+="\"size_bytes\":$size_bytes"
|
||||||
|
|
||||||
|
# Only add created_at if we have a valid date
|
||||||
|
if [[ "$created_iso" != "null" ]]; then
|
||||||
|
images_json+=",\"created_at\":\"$created_iso\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Only add digest if present
|
||||||
|
if [[ -n "$digest" && "$digest" != "<none>" ]]; then
|
||||||
|
images_json+=",\"digest\":\"$digest\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
images_json+="}"
|
||||||
|
|
||||||
|
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.CreatedAt}}|{{.Size}}|{{.Digest}}' --no-trunc 2>/dev/null)
|
||||||
|
|
||||||
|
images_json+="]"
|
||||||
|
|
||||||
|
echo "$images_json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for image updates
|
||||||
|
check_image_updates() {
|
||||||
|
info "Checking for image updates..."
|
||||||
|
|
||||||
|
local updates_json="["
|
||||||
|
local first=true
|
||||||
|
local update_count=0
|
||||||
|
|
||||||
|
# Get all images
|
||||||
|
while IFS='|' read -r repository tag image_id digest; do
|
||||||
|
if [[ -z "$repository" || "$repository" == "<none>" || "$tag" == "<none>" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Skip checking 'latest' tag as it's always considered current by name
|
||||||
|
# We'll still check digest though
|
||||||
|
local full_image="${repository}:${tag}"
|
||||||
|
|
||||||
|
# Try to get remote digest from registry
|
||||||
|
# Use docker manifest inspect to avoid pulling the image
|
||||||
|
local remote_digest=$(docker manifest inspect "$full_image" 2>/dev/null | jq -r '.config.digest // .manifests[0].digest // empty' 2>/dev/null)
|
||||||
|
|
||||||
|
if [[ -z "$remote_digest" ]]; then
|
||||||
|
# If manifest inspect fails, try buildx imagetools inspect (works for more registries)
|
||||||
|
remote_digest=$(docker buildx imagetools inspect "$full_image" 2>/dev/null | grep -oP 'Digest:\s*\K\S+' | head -1)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up digests for comparison
|
||||||
|
local local_digest="${digest#sha256:}"
|
||||||
|
remote_digest="${remote_digest#sha256:}"
|
||||||
|
|
||||||
|
# If we got a remote digest and it's different from local, there's an update
|
||||||
|
if [[ -n "$remote_digest" && -n "$local_digest" && "$remote_digest" != "$local_digest" ]]; then
|
||||||
|
if [[ "$first" == false ]]; then
|
||||||
|
updates_json+=","
|
||||||
|
fi
|
||||||
|
first=false
|
||||||
|
|
||||||
|
# Build update JSON object
|
||||||
|
updates_json+="{\"repository\":\"$repository\","
|
||||||
|
updates_json+="\"current_tag\":\"$tag\","
|
||||||
|
updates_json+="\"available_tag\":\"$tag\","
|
||||||
|
updates_json+="\"current_digest\":\"$local_digest\","
|
||||||
|
updates_json+="\"available_digest\":\"$remote_digest\","
|
||||||
|
updates_json+="\"image_id\":\"${image_id#sha256:}\""
|
||||||
|
updates_json+="}"
|
||||||
|
|
||||||
|
((update_count++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.Digest}}' --no-trunc 2>/dev/null)
|
||||||
|
|
||||||
|
updates_json+="]"
|
||||||
|
|
||||||
|
info "Found $update_count image update(s) available"
|
||||||
|
|
||||||
|
echo "$updates_json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send Docker data to server
|
||||||
|
send_docker_data() {
|
||||||
|
load_credentials
|
||||||
|
|
||||||
|
info "Collecting Docker data..."
|
||||||
|
|
||||||
|
local containers=$(collect_containers)
|
||||||
|
local images=$(collect_images)
|
||||||
|
local updates=$(check_image_updates)
|
||||||
|
|
||||||
|
# Count collected items
|
||||||
|
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
|
||||||
|
info "Found $container_count containers, $image_count images, and $update_count update(s) available"
|
||||||
|
|
||||||
|
# Build payload
|
||||||
|
local payload="{\"apiId\":\"$API_ID\",\"apiKey\":\"$API_KEY\",\"containers\":$containers,\"images\":$images,\"updates\":$updates}"
|
||||||
|
|
||||||
|
# Send to server
|
||||||
|
info "Sending Docker data to PatchMon server..."
|
||||||
|
|
||||||
|
local response=$(curl $CURL_FLAGS -s -w "\n%{http_code}" -X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$payload" \
|
||||||
|
"${PATCHMON_SERVER}/api/${API_VERSION}/docker/collect" 2>&1)
|
||||||
|
|
||||||
|
local http_code=$(echo "$response" | tail -n1)
|
||||||
|
local response_body=$(echo "$response" | head -n-1)
|
||||||
|
|
||||||
|
if [[ "$http_code" == "200" ]]; then
|
||||||
|
success "Docker data sent successfully!"
|
||||||
|
log "Docker data sent: $container_count containers, $image_count images"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
error "Failed to send Docker data. HTTP Status: $http_code\nResponse: $response_body"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test Docker data collection without sending
|
||||||
|
test_collection() {
|
||||||
|
check_docker
|
||||||
|
|
||||||
|
info "Testing Docker data collection (dry run)..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
local containers=$(collect_containers)
|
||||||
|
local images=$(collect_images)
|
||||||
|
local updates=$(check_image_updates)
|
||||||
|
|
||||||
|
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
|
||||||
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||||
|
echo -e "${GREEN}Docker Data Collection Results${NC}"
|
||||||
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||||
|
echo -e "Containers found: ${GREEN}$container_count${NC}"
|
||||||
|
echo -e "Images found: ${GREEN}$image_count${NC}"
|
||||||
|
echo -e "Updates available: ${YELLOW}$update_count${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if command -v jq &> /dev/null; then
|
||||||
|
echo "━━━ Containers ━━━"
|
||||||
|
echo "$containers" | jq -r '.[] | "\(.name) (\(.status)) - \(.image_name):\(.image_tag)"' | head -10
|
||||||
|
if [[ $container_count -gt 10 ]]; then
|
||||||
|
echo "... and $((container_count - 10)) more"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
echo "━━━ Images ━━━"
|
||||||
|
echo "$images" | jq -r '.[] | "\(.repository):\(.tag) (\(.size_bytes / 1024 / 1024 | floor)MB)"' | head -10
|
||||||
|
if [[ $image_count -gt 10 ]]; then
|
||||||
|
echo "... and $((image_count - 10)) more"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $update_count -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "━━━ Available Updates ━━━"
|
||||||
|
echo "$updates" | jq -r '.[] | "\(.repository):\(.current_tag) → \(.available_tag)"'
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
success "Test collection completed successfully!"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show help
|
||||||
|
show_help() {
|
||||||
|
cat << EOF
|
||||||
|
PatchMon Docker Agent v${AGENT_VERSION}
|
||||||
|
|
||||||
|
This agent collects Docker container and image information and sends it to PatchMon.
|
||||||
|
|
||||||
|
USAGE:
|
||||||
|
$0 <command>
|
||||||
|
|
||||||
|
COMMANDS:
|
||||||
|
collect Collect and send Docker data to PatchMon server
|
||||||
|
test Test Docker data collection without sending (dry run)
|
||||||
|
help Show this help message
|
||||||
|
|
||||||
|
REQUIREMENTS:
|
||||||
|
- Docker must be installed and running
|
||||||
|
- Main PatchMon agent must be configured first
|
||||||
|
- Credentials file must exist at $CREDENTIALS_FILE
|
||||||
|
|
||||||
|
EXAMPLES:
|
||||||
|
# Test collection (dry run)
|
||||||
|
sudo $0 test
|
||||||
|
|
||||||
|
# Collect and send Docker data
|
||||||
|
sudo $0 collect
|
||||||
|
|
||||||
|
SCHEDULING:
|
||||||
|
To run this agent automatically, add a cron job:
|
||||||
|
|
||||||
|
# Run every 5 minutes
|
||||||
|
*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
|
||||||
|
|
||||||
|
# Run every hour
|
||||||
|
0 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
|
||||||
|
|
||||||
|
FILES:
|
||||||
|
Config: $CONFIG_FILE
|
||||||
|
Credentials: $CREDENTIALS_FILE
|
||||||
|
Log: $LOG_FILE
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main function
|
||||||
|
main() {
|
||||||
|
case "$1" in
|
||||||
|
"collect")
|
||||||
|
check_docker
|
||||||
|
load_config
|
||||||
|
send_docker_data
|
||||||
|
;;
|
||||||
|
"test")
|
||||||
|
check_docker
|
||||||
|
load_config
|
||||||
|
test_collection
|
||||||
|
;;
|
||||||
|
"help"|"--help"|"-h"|"")
|
||||||
|
show_help
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
error "Unknown command: $1\n\nRun '$0 help' for usage information."
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
|
|
||||||
@@ -97,13 +97,22 @@ verify_datetime
|
|||||||
# Clean up old files (keep only last 3 of each type)
|
# Clean up old files (keep only last 3 of each type)
|
||||||
cleanup_old_files() {
|
cleanup_old_files() {
|
||||||
# Clean up old credential backups
|
# Clean up old credential backups
|
||||||
ls -t /etc/patchmon/credentials.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /etc/patchmon/credentials.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
|
# Clean up old config backups
|
||||||
|
ls -t /etc/patchmon/config.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
# Clean up old agent backups
|
# Clean up old agent backups
|
||||||
ls -t /usr/local/bin/patchmon-agent.sh.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /usr/local/bin/patchmon-agent.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
# Clean up old log files
|
# Clean up old log files
|
||||||
ls -t /var/log/patchmon-agent.log.old.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /etc/patchmon/logs/patchmon-agent.log.old.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
|
# Clean up old shell script backups (if any exist)
|
||||||
|
ls -t /usr/local/bin/patchmon-agent.sh.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
|
# Clean up old credentials backups (if any exist)
|
||||||
|
ls -t /etc/patchmon/credentials.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
}
|
}
|
||||||
|
|
||||||
# Run cleanup at start
|
# Run cleanup at start
|
||||||
@@ -127,6 +136,12 @@ if [[ -z "$PATCHMON_URL" ]] || [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
|
|||||||
error "Missing required parameters. This script should be called via the PatchMon web interface."
|
error "Missing required parameters. This script should be called via the PatchMon web interface."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Parse architecture parameter (default to amd64)
|
||||||
|
ARCHITECTURE="${ARCHITECTURE:-amd64}"
|
||||||
|
if [[ "$ARCHITECTURE" != "amd64" && "$ARCHITECTURE" != "386" && "$ARCHITECTURE" != "arm64" ]]; then
|
||||||
|
error "Invalid architecture '$ARCHITECTURE'. Must be one of: amd64, 386, arm64"
|
||||||
|
fi
|
||||||
|
|
||||||
# Check if --force flag is set (for bypassing broken packages)
|
# Check if --force flag is set (for bypassing broken packages)
|
||||||
FORCE_INSTALL="${FORCE_INSTALL:-false}"
|
FORCE_INSTALL="${FORCE_INSTALL:-false}"
|
||||||
if [[ "$*" == *"--force"* ]] || [[ "$FORCE_INSTALL" == "true" ]]; then
|
if [[ "$*" == *"--force"* ]] || [[ "$FORCE_INSTALL" == "true" ]]; then
|
||||||
@@ -142,6 +157,7 @@ info "🚀 Starting PatchMon Agent Installation..."
|
|||||||
info "📋 Server: $PATCHMON_URL"
|
info "📋 Server: $PATCHMON_URL"
|
||||||
info "🔑 API ID: ${API_ID:0:16}..."
|
info "🔑 API ID: ${API_ID:0:16}..."
|
||||||
info "🆔 Machine ID: ${MACHINE_ID:0:16}..."
|
info "🆔 Machine ID: ${MACHINE_ID:0:16}..."
|
||||||
|
info "🏗️ Architecture: $ARCHITECTURE"
|
||||||
|
|
||||||
# Display diagnostic information
|
# Display diagnostic information
|
||||||
echo ""
|
echo ""
|
||||||
@@ -150,6 +166,7 @@ echo " • URL: $PATCHMON_URL"
|
|||||||
echo " • CURL FLAGS: $CURL_FLAGS"
|
echo " • CURL FLAGS: $CURL_FLAGS"
|
||||||
echo " • API ID: ${API_ID:0:16}..."
|
echo " • API ID: ${API_ID:0:16}..."
|
||||||
echo " • API Key: ${API_KEY:0:16}..."
|
echo " • API Key: ${API_KEY:0:16}..."
|
||||||
|
echo " • Architecture: $ARCHITECTURE"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# Install required dependencies
|
# Install required dependencies
|
||||||
@@ -294,67 +311,117 @@ else
|
|||||||
mkdir -p /etc/patchmon
|
mkdir -p /etc/patchmon
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Step 2: Create credentials file
|
# Step 2: Create configuration files
|
||||||
info "🔐 Creating API credentials file..."
|
info "🔐 Creating configuration files..."
|
||||||
|
|
||||||
|
# Check if config file already exists
|
||||||
|
if [[ -f "/etc/patchmon/config.yml" ]]; then
|
||||||
|
warning "⚠️ Config file already exists at /etc/patchmon/config.yml"
|
||||||
|
warning "⚠️ Moving existing file out of the way for fresh installation"
|
||||||
|
|
||||||
|
# Clean up old config backups (keep only last 3)
|
||||||
|
ls -t /etc/patchmon/config.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
|
# Move existing file out of the way
|
||||||
|
mv /etc/patchmon/config.yml /etc/patchmon/config.yml.backup.$(date +%Y%m%d_%H%M%S)
|
||||||
|
info "📋 Moved existing config to: /etc/patchmon/config.yml.backup.$(date +%Y%m%d_%H%M%S)"
|
||||||
|
fi
|
||||||
|
|
||||||
# Check if credentials file already exists
|
# Check if credentials file already exists
|
||||||
if [[ -f "/etc/patchmon/credentials" ]]; then
|
if [[ -f "/etc/patchmon/credentials.yml" ]]; then
|
||||||
warning "⚠️ Credentials file already exists at /etc/patchmon/credentials"
|
warning "⚠️ Credentials file already exists at /etc/patchmon/credentials.yml"
|
||||||
warning "⚠️ Moving existing file out of the way for fresh installation"
|
warning "⚠️ Moving existing file out of the way for fresh installation"
|
||||||
|
|
||||||
# Clean up old credential backups (keep only last 3)
|
# Clean up old credential backups (keep only last 3)
|
||||||
ls -t /etc/patchmon/credentials.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /etc/patchmon/credentials.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
# Move existing file out of the way
|
# Move existing file out of the way
|
||||||
mv /etc/patchmon/credentials /etc/patchmon/credentials.backup.$(date +%Y%m%d_%H%M%S)
|
mv /etc/patchmon/credentials.yml /etc/patchmon/credentials.yml.backup.$(date +%Y%m%d_%H%M%S)
|
||||||
info "📋 Moved existing credentials to: /etc/patchmon/credentials.backup.$(date +%Y%m%d_%H%M%S)"
|
info "📋 Moved existing credentials to: /etc/patchmon/credentials.yml.backup.$(date +%Y%m%d_%H%M%S)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cat > /etc/patchmon/credentials << EOF
|
# Clean up old credentials file if it exists (from previous installations)
|
||||||
|
if [[ -f "/etc/patchmon/credentials" ]]; then
|
||||||
|
warning "⚠️ Found old credentials file, removing it..."
|
||||||
|
rm -f /etc/patchmon/credentials
|
||||||
|
info "📋 Removed old credentials file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create main config file
|
||||||
|
cat > /etc/patchmon/config.yml << EOF
|
||||||
|
# PatchMon Agent Configuration
|
||||||
|
# Generated on $(date)
|
||||||
|
patchmon_server: "$PATCHMON_URL"
|
||||||
|
api_version: "v1"
|
||||||
|
credentials_file: "/etc/patchmon/credentials.yml"
|
||||||
|
log_file: "/etc/patchmon/logs/patchmon-agent.log"
|
||||||
|
log_level: "info"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create credentials file
|
||||||
|
cat > /etc/patchmon/credentials.yml << EOF
|
||||||
# PatchMon API Credentials
|
# PatchMon API Credentials
|
||||||
# Generated on $(date)
|
# Generated on $(date)
|
||||||
PATCHMON_URL="$PATCHMON_URL"
|
api_id: "$API_ID"
|
||||||
API_ID="$API_ID"
|
api_key: "$API_KEY"
|
||||||
API_KEY="$API_KEY"
|
|
||||||
EOF
|
EOF
|
||||||
chmod 600 /etc/patchmon/credentials
|
|
||||||
|
|
||||||
# Step 3: Download the agent script using API credentials
|
chmod 600 /etc/patchmon/config.yml
|
||||||
info "📥 Downloading PatchMon agent script..."
|
chmod 600 /etc/patchmon/credentials.yml
|
||||||
|
|
||||||
# Check if agent script already exists
|
# Step 3: Download the PatchMon agent binary using API credentials
|
||||||
if [[ -f "/usr/local/bin/patchmon-agent.sh" ]]; then
|
info "📥 Downloading PatchMon agent binary..."
|
||||||
warning "⚠️ Agent script already exists at /usr/local/bin/patchmon-agent.sh"
|
|
||||||
|
# Determine the binary filename based on architecture
|
||||||
|
BINARY_NAME="patchmon-agent-linux-${ARCHITECTURE}"
|
||||||
|
|
||||||
|
# Check if agent binary already exists
|
||||||
|
if [[ -f "/usr/local/bin/patchmon-agent" ]]; then
|
||||||
|
warning "⚠️ Agent binary already exists at /usr/local/bin/patchmon-agent"
|
||||||
warning "⚠️ Moving existing file out of the way for fresh installation"
|
warning "⚠️ Moving existing file out of the way for fresh installation"
|
||||||
|
|
||||||
# Clean up old agent backups (keep only last 3)
|
# Clean up old agent backups (keep only last 3)
|
||||||
ls -t /usr/local/bin/patchmon-agent.sh.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /usr/local/bin/patchmon-agent.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
# Move existing file out of the way
|
# Move existing file out of the way
|
||||||
mv /usr/local/bin/patchmon-agent.sh /usr/local/bin/patchmon-agent.sh.backup.$(date +%Y%m%d_%H%M%S)
|
mv /usr/local/bin/patchmon-agent /usr/local/bin/patchmon-agent.backup.$(date +%Y%m%d_%H%M%S)
|
||||||
info "📋 Moved existing agent to: /usr/local/bin/patchmon-agent.sh.backup.$(date +%Y%m%d_%H%M%S)"
|
info "📋 Moved existing agent to: /usr/local/bin/patchmon-agent.backup.$(date +%Y%m%d_%H%M%S)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Clean up old shell script if it exists (from previous installations)
|
||||||
|
if [[ -f "/usr/local/bin/patchmon-agent.sh" ]]; then
|
||||||
|
warning "⚠️ Found old shell script agent, removing it..."
|
||||||
|
rm -f /usr/local/bin/patchmon-agent.sh
|
||||||
|
info "📋 Removed old shell script agent"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Download the binary
|
||||||
curl $CURL_FLAGS \
|
curl $CURL_FLAGS \
|
||||||
-H "X-API-ID: $API_ID" \
|
-H "X-API-ID: $API_ID" \
|
||||||
-H "X-API-KEY: $API_KEY" \
|
-H "X-API-KEY: $API_KEY" \
|
||||||
"$PATCHMON_URL/api/v1/hosts/agent/download" \
|
"$PATCHMON_URL/api/v1/hosts/agent/download?arch=$ARCHITECTURE" \
|
||||||
-o /usr/local/bin/patchmon-agent.sh
|
-o /usr/local/bin/patchmon-agent
|
||||||
|
|
||||||
chmod +x /usr/local/bin/patchmon-agent.sh
|
chmod +x /usr/local/bin/patchmon-agent
|
||||||
|
|
||||||
# Get the agent version from the downloaded script
|
# Get the agent version from the binary
|
||||||
AGENT_VERSION=$(grep '^AGENT_VERSION=' /usr/local/bin/patchmon-agent.sh | cut -d'"' -f2 2>/dev/null || echo "Unknown")
|
AGENT_VERSION=$(/usr/local/bin/patchmon-agent version 2>/dev/null || echo "Unknown")
|
||||||
info "📋 Agent version: $AGENT_VERSION"
|
info "📋 Agent version: $AGENT_VERSION"
|
||||||
|
|
||||||
|
# Handle existing log files and create log directory
|
||||||
|
info "📁 Setting up log directory..."
|
||||||
|
|
||||||
|
# Create log directory if it doesn't exist
|
||||||
|
mkdir -p /etc/patchmon/logs
|
||||||
|
|
||||||
# Handle existing log files
|
# Handle existing log files
|
||||||
if [[ -f "/var/log/patchmon-agent.log" ]]; then
|
if [[ -f "/etc/patchmon/logs/patchmon-agent.log" ]]; then
|
||||||
warning "⚠️ Existing log file found at /var/log/patchmon-agent.log"
|
warning "⚠️ Existing log file found at /etc/patchmon/logs/patchmon-agent.log"
|
||||||
warning "⚠️ Rotating log file for fresh start"
|
warning "⚠️ Rotating log file for fresh start"
|
||||||
|
|
||||||
# Rotate the log file
|
# Rotate the log file
|
||||||
mv /var/log/patchmon-agent.log /var/log/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)
|
mv /etc/patchmon/logs/patchmon-agent.log /etc/patchmon/logs/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)
|
||||||
info "📋 Log file rotated to: /var/log/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)"
|
info "📋 Log file rotated to: /etc/patchmon/logs/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Step 4: Test the configuration
|
# Step 4: Test the configuration
|
||||||
@@ -386,19 +453,76 @@ if [[ "$http_code" == "200" ]]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
info "🧪 Testing API credentials and connectivity..."
|
info "🧪 Testing API credentials and connectivity..."
|
||||||
if /usr/local/bin/patchmon-agent.sh test; then
|
if /usr/local/bin/patchmon-agent ping; then
|
||||||
success "✅ TEST: API credentials are valid and server is reachable"
|
success "✅ TEST: API credentials are valid and server is reachable"
|
||||||
else
|
else
|
||||||
error "❌ Failed to validate API credentials or reach server"
|
error "❌ Failed to validate API credentials or reach server"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Step 5: Send initial data and setup automated updates
|
# Step 5: Send initial data and setup systemd service
|
||||||
info "📊 Sending initial package data to server..."
|
info "📊 Sending initial package data to server..."
|
||||||
if /usr/local/bin/patchmon-agent.sh update; then
|
if /usr/local/bin/patchmon-agent report; then
|
||||||
success "✅ UPDATE: Initial package data sent successfully"
|
success "✅ UPDATE: Initial package data sent successfully"
|
||||||
info "✅ Automated updates configured by agent"
|
|
||||||
else
|
else
|
||||||
warning "⚠️ Failed to send initial data. You can retry later with: /usr/local/bin/patchmon-agent.sh update"
|
warning "⚠️ Failed to send initial data. You can retry later with: /usr/local/bin/patchmon-agent report"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 6: Setup systemd service for WebSocket connection
|
||||||
|
info "🔧 Setting up systemd service..."
|
||||||
|
|
||||||
|
# Stop and disable existing service if it exists
|
||||||
|
if systemctl is-active --quiet patchmon-agent.service 2>/dev/null; then
|
||||||
|
warning "⚠️ Stopping existing PatchMon agent service..."
|
||||||
|
systemctl stop patchmon-agent.service
|
||||||
|
fi
|
||||||
|
|
||||||
|
if systemctl is-enabled --quiet patchmon-agent.service 2>/dev/null; then
|
||||||
|
warning "⚠️ Disabling existing PatchMon agent service..."
|
||||||
|
systemctl disable patchmon-agent.service
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create systemd service file
|
||||||
|
cat > /etc/systemd/system/patchmon-agent.service << EOF
|
||||||
|
[Unit]
|
||||||
|
Description=PatchMon Agent Service
|
||||||
|
After=network.target
|
||||||
|
Wants=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=root
|
||||||
|
ExecStart=/usr/local/bin/patchmon-agent serve
|
||||||
|
Restart=always
|
||||||
|
RestartSec=10
|
||||||
|
WorkingDirectory=/etc/patchmon
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
SyslogIdentifier=patchmon-agent
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Clean up old crontab entries if they exist (from previous installations)
|
||||||
|
if crontab -l 2>/dev/null | grep -q "patchmon-agent"; then
|
||||||
|
warning "⚠️ Found old crontab entries, removing them..."
|
||||||
|
crontab -l 2>/dev/null | grep -v "patchmon-agent" | crontab -
|
||||||
|
info "📋 Removed old crontab entries"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Reload systemd and enable/start the service
|
||||||
|
systemctl daemon-reload
|
||||||
|
systemctl enable patchmon-agent.service
|
||||||
|
systemctl start patchmon-agent.service
|
||||||
|
|
||||||
|
# Check if service started successfully
|
||||||
|
if systemctl is-active --quiet patchmon-agent.service; then
|
||||||
|
success "✅ PatchMon Agent service started successfully"
|
||||||
|
info "🔗 WebSocket connection established"
|
||||||
|
else
|
||||||
|
warning "⚠️ Service may have failed to start. Check status with: systemctl status patchmon-agent"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Installation complete
|
# Installation complete
|
||||||
@@ -406,14 +530,16 @@ success "🎉 PatchMon Agent installation completed successfully!"
|
|||||||
echo ""
|
echo ""
|
||||||
echo -e "${GREEN}📋 Installation Summary:${NC}"
|
echo -e "${GREEN}📋 Installation Summary:${NC}"
|
||||||
echo " • Configuration directory: /etc/patchmon"
|
echo " • Configuration directory: /etc/patchmon"
|
||||||
echo " • Agent installed: /usr/local/bin/patchmon-agent.sh"
|
echo " • Agent binary installed: /usr/local/bin/patchmon-agent"
|
||||||
|
echo " • Architecture: $ARCHITECTURE"
|
||||||
echo " • Dependencies installed: jq, curl, bc"
|
echo " • Dependencies installed: jq, curl, bc"
|
||||||
echo " • Automated updates configured via crontab"
|
echo " • Systemd service configured and running"
|
||||||
echo " • API credentials configured and tested"
|
echo " • API credentials configured and tested"
|
||||||
echo " • Update schedule managed by agent"
|
echo " • WebSocket connection established"
|
||||||
|
echo " • Logs directory: /etc/patchmon/logs"
|
||||||
|
|
||||||
# Check for moved files and show them
|
# Check for moved files and show them
|
||||||
MOVED_FILES=$(ls /etc/patchmon/credentials.backup.* /usr/local/bin/patchmon-agent.sh.backup.* /var/log/patchmon-agent.log.old.* 2>/dev/null || true)
|
MOVED_FILES=$(ls /etc/patchmon/credentials.yml.backup.* /etc/patchmon/config.yml.backup.* /usr/local/bin/patchmon-agent.backup.* /etc/patchmon/logs/patchmon-agent.log.old.* /usr/local/bin/patchmon-agent.sh.backup.* /etc/patchmon/credentials.backup.* 2>/dev/null || true)
|
||||||
if [[ -n "$MOVED_FILES" ]]; then
|
if [[ -n "$MOVED_FILES" ]]; then
|
||||||
echo ""
|
echo ""
|
||||||
echo -e "${YELLOW}📋 Files Moved for Fresh Installation:${NC}"
|
echo -e "${YELLOW}📋 Files Moved for Fresh Installation:${NC}"
|
||||||
@@ -426,8 +552,11 @@ fi
|
|||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo -e "${BLUE}🔧 Management Commands:${NC}"
|
echo -e "${BLUE}🔧 Management Commands:${NC}"
|
||||||
echo " • Test connection: /usr/local/bin/patchmon-agent.sh test"
|
echo " • Test connection: /usr/local/bin/patchmon-agent ping"
|
||||||
echo " • Manual update: /usr/local/bin/patchmon-agent.sh update"
|
echo " • Manual report: /usr/local/bin/patchmon-agent report"
|
||||||
echo " • Check status: /usr/local/bin/patchmon-agent.sh diagnostics"
|
echo " • Check status: /usr/local/bin/patchmon-agent diagnostics"
|
||||||
|
echo " • Service status: systemctl status patchmon-agent"
|
||||||
|
echo " • Service logs: journalctl -u patchmon-agent -f"
|
||||||
|
echo " • Restart service: systemctl restart patchmon-agent"
|
||||||
echo ""
|
echo ""
|
||||||
success "✅ Your system is now being monitored by PatchMon!"
|
success "✅ Your system is now being monitored by PatchMon!"
|
||||||
|
|||||||
@@ -3,6 +3,12 @@ DATABASE_URL="postgresql://patchmon_user:p@tchm0n_p@55@localhost:5432/patchmon_d
|
|||||||
PM_DB_CONN_MAX_ATTEMPTS=30
|
PM_DB_CONN_MAX_ATTEMPTS=30
|
||||||
PM_DB_CONN_WAIT_INTERVAL=2
|
PM_DB_CONN_WAIT_INTERVAL=2
|
||||||
|
|
||||||
|
# Redis Configuration
|
||||||
|
REDIS_HOST=localhost
|
||||||
|
REDIS_PORT=6379
|
||||||
|
REDIS_PASSWORD=your-redis-password-here
|
||||||
|
REDIS_DB=0
|
||||||
|
|
||||||
# Server Configuration
|
# Server Configuration
|
||||||
PORT=3001
|
PORT=3001
|
||||||
NODE_ENV=development
|
NODE_ENV=development
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon-backend",
|
"name": "patchmon-backend",
|
||||||
"version": "1.2.7",
|
"version": "1.2.9",
|
||||||
"description": "Backend API for Linux Patch Monitoring System",
|
"description": "Backend API for Linux Patch Monitoring System",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"main": "src/server.js",
|
"main": "src/server.js",
|
||||||
@@ -14,20 +14,26 @@
|
|||||||
"db:studio": "prisma studio"
|
"db:studio": "prisma studio"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@bull-board/api": "^6.13.1",
|
||||||
|
"@bull-board/express": "^6.13.1",
|
||||||
"@prisma/client": "^6.1.0",
|
"@prisma/client": "^6.1.0",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
|
"bullmq": "^5.61.0",
|
||||||
|
"cookie-parser": "^1.4.7",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"dotenv": "^16.4.7",
|
"dotenv": "^16.4.7",
|
||||||
"express": "^4.21.2",
|
"express": "^5.0.0",
|
||||||
"express-rate-limit": "^7.5.0",
|
"express-rate-limit": "^7.5.0",
|
||||||
"express-validator": "^7.2.0",
|
"express-validator": "^7.2.0",
|
||||||
"helmet": "^8.0.0",
|
"helmet": "^8.0.0",
|
||||||
|
"ioredis": "^5.8.1",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"moment": "^2.30.1",
|
"moment": "^2.30.1",
|
||||||
"qrcode": "^1.5.4",
|
"qrcode": "^1.5.4",
|
||||||
"speakeasy": "^2.0.0",
|
"speakeasy": "^2.0.0",
|
||||||
"uuid": "^11.0.3",
|
"uuid": "^11.0.3",
|
||||||
"winston": "^3.17.0"
|
"winston": "^3.17.0",
|
||||||
|
"ws": "^8.18.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/bcryptjs": "^2.4.6",
|
"@types/bcryptjs": "^2.4.6",
|
||||||
|
|||||||
@@ -0,0 +1,64 @@
|
|||||||
|
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
|
||||||
|
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
|
||||||
|
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
table_exists boolean := false;
|
||||||
|
migration_exists boolean := false;
|
||||||
|
BEGIN
|
||||||
|
-- Check if user_sessions table exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'user_sessions'
|
||||||
|
) INTO table_exists;
|
||||||
|
|
||||||
|
-- Check if the migration record already exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
) INTO migration_exists;
|
||||||
|
|
||||||
|
-- If table exists but no migration record, create one
|
||||||
|
IF table_exists AND NOT migration_exists THEN
|
||||||
|
RAISE NOTICE 'Table exists but no migration record found - creating migration record for 1.2.7 upgrade';
|
||||||
|
|
||||||
|
-- Insert a successful migration record for the existing table
|
||||||
|
INSERT INTO _prisma_migrations (
|
||||||
|
id,
|
||||||
|
checksum,
|
||||||
|
finished_at,
|
||||||
|
migration_name,
|
||||||
|
logs,
|
||||||
|
rolled_back_at,
|
||||||
|
started_at,
|
||||||
|
applied_steps_count
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid()::text,
|
||||||
|
'', -- Empty checksum since we're reconciling
|
||||||
|
NOW(),
|
||||||
|
'20251005000000_add_user_sessions',
|
||||||
|
'Reconciled from 1.2.7 - table already exists',
|
||||||
|
NULL,
|
||||||
|
NOW(),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration record created for existing table';
|
||||||
|
ELSIF table_exists AND migration_exists THEN
|
||||||
|
RAISE NOTICE 'Table exists and migration record exists - no action needed';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'Table does not exist - migration will proceed normally';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Additional check: If we have any old migration names, update them
|
||||||
|
IF EXISTS (SELECT 1 FROM _prisma_migrations WHERE migration_name = 'add_user_sessions') THEN
|
||||||
|
RAISE NOTICE 'Found old migration name - updating to new format';
|
||||||
|
UPDATE _prisma_migrations
|
||||||
|
SET migration_name = '20251005000000_add_user_sessions'
|
||||||
|
WHERE migration_name = 'add_user_sessions';
|
||||||
|
RAISE NOTICE 'Old migration name updated';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
END $$;
|
||||||
@@ -0,0 +1,96 @@
|
|||||||
|
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
|
||||||
|
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
|
||||||
|
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
old_migration_exists boolean := false;
|
||||||
|
table_exists boolean := false;
|
||||||
|
failed_migration_exists boolean := false;
|
||||||
|
BEGIN
|
||||||
|
-- Check if the old migration name exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = 'add_user_sessions'
|
||||||
|
) INTO old_migration_exists;
|
||||||
|
|
||||||
|
-- Check if user_sessions table exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'user_sessions'
|
||||||
|
) INTO table_exists;
|
||||||
|
|
||||||
|
-- Check if there's a failed migration attempt
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL
|
||||||
|
) INTO failed_migration_exists;
|
||||||
|
|
||||||
|
-- Scenario 1: Old migration exists, table exists, no failed migration
|
||||||
|
-- This means 1.2.7 was installed and we need to update the migration name
|
||||||
|
IF old_migration_exists AND table_exists AND NOT failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'Found 1.2.7 migration "add_user_sessions" - updating to timestamped version';
|
||||||
|
|
||||||
|
-- Update the old migration name to the new timestamped version
|
||||||
|
UPDATE _prisma_migrations
|
||||||
|
SET migration_name = '20251005000000_add_user_sessions'
|
||||||
|
WHERE migration_name = 'add_user_sessions';
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration name updated: add_user_sessions -> 20251005000000_add_user_sessions';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Scenario 2: Failed migration exists (upgrade attempt gone wrong)
|
||||||
|
IF failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'Found failed migration attempt - cleaning up';
|
||||||
|
|
||||||
|
-- If table exists, it means the migration partially succeeded
|
||||||
|
IF table_exists THEN
|
||||||
|
RAISE NOTICE 'Table exists - marking migration as applied';
|
||||||
|
|
||||||
|
-- Delete the failed migration record
|
||||||
|
DELETE FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL;
|
||||||
|
|
||||||
|
-- Insert a successful migration record
|
||||||
|
INSERT INTO _prisma_migrations (
|
||||||
|
id,
|
||||||
|
checksum,
|
||||||
|
finished_at,
|
||||||
|
migration_name,
|
||||||
|
logs,
|
||||||
|
rolled_back_at,
|
||||||
|
started_at,
|
||||||
|
applied_steps_count
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid()::text,
|
||||||
|
'', -- Empty checksum since we're reconciling
|
||||||
|
NOW(),
|
||||||
|
'20251005000000_add_user_sessions',
|
||||||
|
NULL,
|
||||||
|
NULL,
|
||||||
|
NOW(),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration marked as successfully applied';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'Table does not exist - removing failed migration to allow retry';
|
||||||
|
|
||||||
|
-- Just delete the failed migration to allow it to retry
|
||||||
|
DELETE FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL;
|
||||||
|
|
||||||
|
RAISE NOTICE 'Failed migration removed - will retry on next migration run';
|
||||||
|
END IF;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Scenario 3: Everything is clean (fresh install or already reconciled)
|
||||||
|
IF NOT old_migration_exists AND NOT failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'No migration reconciliation needed';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
END $$;
|
||||||
@@ -0,0 +1,106 @@
|
|||||||
|
-- CreateTable (with existence check for 1.2.7 compatibility)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
-- Check if table already exists (from 1.2.7 installation)
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'user_sessions'
|
||||||
|
) THEN
|
||||||
|
-- Table doesn't exist, create it
|
||||||
|
CREATE TABLE "user_sessions" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"user_id" TEXT NOT NULL,
|
||||||
|
"refresh_token" TEXT NOT NULL,
|
||||||
|
"access_token_hash" TEXT,
|
||||||
|
"ip_address" TEXT,
|
||||||
|
"user_agent" TEXT,
|
||||||
|
"last_activity" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"expires_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"is_revoked" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
|
||||||
|
CONSTRAINT "user_sessions_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
RAISE NOTICE 'Created user_sessions table';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions table already exists, skipping creation';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- CreateIndex (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_indexes
|
||||||
|
WHERE tablename = 'user_sessions'
|
||||||
|
AND indexname = 'user_sessions_refresh_token_key'
|
||||||
|
) THEN
|
||||||
|
CREATE UNIQUE INDEX "user_sessions_refresh_token_key" ON "user_sessions"("refresh_token");
|
||||||
|
RAISE NOTICE 'Created user_sessions_refresh_token_key index';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_refresh_token_key index already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- CreateIndex (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_indexes
|
||||||
|
WHERE tablename = 'user_sessions'
|
||||||
|
AND indexname = 'user_sessions_user_id_idx'
|
||||||
|
) THEN
|
||||||
|
CREATE INDEX "user_sessions_user_id_idx" ON "user_sessions"("user_id");
|
||||||
|
RAISE NOTICE 'Created user_sessions_user_id_idx index';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_user_id_idx index already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- CreateIndex (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_indexes
|
||||||
|
WHERE tablename = 'user_sessions'
|
||||||
|
AND indexname = 'user_sessions_refresh_token_idx'
|
||||||
|
) THEN
|
||||||
|
CREATE INDEX "user_sessions_refresh_token_idx" ON "user_sessions"("refresh_token");
|
||||||
|
RAISE NOTICE 'Created user_sessions_refresh_token_idx index';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_refresh_token_idx index already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- CreateIndex (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_indexes
|
||||||
|
WHERE tablename = 'user_sessions'
|
||||||
|
AND indexname = 'user_sessions_expires_at_idx'
|
||||||
|
) THEN
|
||||||
|
CREATE INDEX "user_sessions_expires_at_idx" ON "user_sessions"("expires_at");
|
||||||
|
RAISE NOTICE 'Created user_sessions_expires_at_idx index';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_expires_at_idx index already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- AddForeignKey (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_name = 'user_sessions'
|
||||||
|
AND constraint_name = 'user_sessions_user_id_fkey'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE "user_sessions" ADD CONSTRAINT "user_sessions_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
RAISE NOTICE 'Created user_sessions_user_id_fkey foreign key';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_user_id_fkey foreign key already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
@@ -0,0 +1,94 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_images" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"repository" TEXT NOT NULL,
|
||||||
|
"tag" TEXT NOT NULL DEFAULT 'latest',
|
||||||
|
"image_id" TEXT NOT NULL,
|
||||||
|
"digest" TEXT,
|
||||||
|
"size_bytes" BIGINT,
|
||||||
|
"source" TEXT NOT NULL DEFAULT 'docker-hub',
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"last_pulled" TIMESTAMP(3),
|
||||||
|
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_images_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_containers" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"host_id" TEXT NOT NULL,
|
||||||
|
"container_id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"image_id" TEXT,
|
||||||
|
"image_name" TEXT NOT NULL,
|
||||||
|
"image_tag" TEXT NOT NULL DEFAULT 'latest',
|
||||||
|
"status" TEXT NOT NULL,
|
||||||
|
"state" TEXT,
|
||||||
|
"ports" JSONB,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"started_at" TIMESTAMP(3),
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_containers_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_image_updates" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"image_id" TEXT NOT NULL,
|
||||||
|
"current_tag" TEXT NOT NULL,
|
||||||
|
"available_tag" TEXT NOT NULL,
|
||||||
|
"is_security_update" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"severity" TEXT,
|
||||||
|
"changelog_url" TEXT,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_image_updates_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_images_repository_idx" ON "docker_images"("repository");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_images_source_idx" ON "docker_images"("source");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_images_repository_tag_idx" ON "docker_images"("repository", "tag");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_images_repository_tag_image_id_key" ON "docker_images"("repository", "tag", "image_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_host_id_idx" ON "docker_containers"("host_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_image_id_idx" ON "docker_containers"("image_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_status_idx" ON "docker_containers"("status");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_name_idx" ON "docker_containers"("name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_containers_host_id_container_id_key" ON "docker_containers"("host_id", "container_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_image_updates_image_id_idx" ON "docker_image_updates"("image_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_image_updates_is_security_update_idx" ON "docker_image_updates"("is_security_update");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_image_updates_image_id_available_tag_key" ON "docker_image_updates"("image_id", "available_tag");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "docker_containers" ADD CONSTRAINT "docker_containers_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "docker_image_updates" ADD CONSTRAINT "docker_image_updates_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "job_history" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"job_id" TEXT NOT NULL,
|
||||||
|
"queue_name" TEXT NOT NULL,
|
||||||
|
"job_name" TEXT NOT NULL,
|
||||||
|
"host_id" TEXT,
|
||||||
|
"api_id" TEXT,
|
||||||
|
"status" TEXT NOT NULL,
|
||||||
|
"attempt_number" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"error_message" TEXT,
|
||||||
|
"output" JSONB,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"completed_at" TIMESTAMP(3),
|
||||||
|
|
||||||
|
CONSTRAINT "job_history_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_job_id_idx" ON "job_history"("job_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_queue_name_idx" ON "job_history"("queue_name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_host_id_idx" ON "job_history"("host_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_api_id_idx" ON "job_history"("api_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_status_idx" ON "job_history"("status");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_created_at_idx" ON "job_history"("created_at");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "job_history" ADD CONSTRAINT "job_history_host_id_fkey" FOREIGN KEY ("host_id") REFERENCES "hosts"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "host_group_memberships" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"host_id" TEXT NOT NULL,
|
||||||
|
"host_group_id" TEXT NOT NULL,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "host_group_memberships_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "host_group_memberships_host_id_host_group_id_key" ON "host_group_memberships"("host_id", "host_group_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "host_group_memberships_host_id_idx" ON "host_group_memberships"("host_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "host_group_memberships_host_group_id_idx" ON "host_group_memberships"("host_group_id");
|
||||||
|
|
||||||
|
-- Migrate existing data from hosts.host_group_id to host_group_memberships
|
||||||
|
INSERT INTO "host_group_memberships" ("id", "host_id", "host_group_id", "created_at")
|
||||||
|
SELECT
|
||||||
|
gen_random_uuid()::text as "id",
|
||||||
|
"id" as "host_id",
|
||||||
|
"host_group_id" as "host_group_id",
|
||||||
|
CURRENT_TIMESTAMP as "created_at"
|
||||||
|
FROM "hosts"
|
||||||
|
WHERE "host_group_id" IS NOT NULL;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "host_group_memberships" ADD CONSTRAINT "host_group_memberships_host_id_fkey" FOREIGN KEY ("host_id") REFERENCES "hosts"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "host_group_memberships" ADD CONSTRAINT "host_group_memberships_host_group_id_fkey" FOREIGN KEY ("host_group_id") REFERENCES "host_groups"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- DropForeignKey
|
||||||
|
ALTER TABLE "hosts" DROP CONSTRAINT IF EXISTS "hosts_host_group_id_fkey";
|
||||||
|
|
||||||
|
-- DropIndex
|
||||||
|
DROP INDEX IF EXISTS "hosts_host_group_id_idx";
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "hosts" DROP COLUMN "host_group_id";
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
-- CreateTable
|
|
||||||
CREATE TABLE "user_sessions" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"user_id" TEXT NOT NULL,
|
|
||||||
"refresh_token" TEXT NOT NULL,
|
|
||||||
"access_token_hash" TEXT,
|
|
||||||
"ip_address" TEXT,
|
|
||||||
"user_agent" TEXT,
|
|
||||||
"last_activity" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"expires_at" TIMESTAMP(3) NOT NULL,
|
|
||||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"is_revoked" BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
|
|
||||||
CONSTRAINT "user_sessions_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "user_sessions_refresh_token_key" ON "user_sessions"("refresh_token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "user_sessions_user_id_idx" ON "user_sessions"("user_id");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "user_sessions_refresh_token_idx" ON "user_sessions"("refresh_token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "user_sessions_expires_at_idx" ON "user_sessions"("expires_at");
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "user_sessions" ADD CONSTRAINT "user_sessions_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
@@ -27,10 +27,23 @@ model host_groups {
|
|||||||
color String? @default("#3B82F6")
|
color String? @default("#3B82F6")
|
||||||
created_at DateTime @default(now())
|
created_at DateTime @default(now())
|
||||||
updated_at DateTime
|
updated_at DateTime
|
||||||
hosts hosts[]
|
host_group_memberships host_group_memberships[]
|
||||||
auto_enrollment_tokens auto_enrollment_tokens[]
|
auto_enrollment_tokens auto_enrollment_tokens[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model host_group_memberships {
|
||||||
|
id String @id
|
||||||
|
host_id String
|
||||||
|
host_group_id String
|
||||||
|
created_at DateTime @default(now())
|
||||||
|
hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade)
|
||||||
|
host_groups host_groups @relation(fields: [host_group_id], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([host_id, host_group_id])
|
||||||
|
@@index([host_id])
|
||||||
|
@@index([host_group_id])
|
||||||
|
}
|
||||||
|
|
||||||
model host_packages {
|
model host_packages {
|
||||||
id String @id
|
id String @id
|
||||||
host_id String
|
host_id String
|
||||||
@@ -80,7 +93,6 @@ model hosts {
|
|||||||
updated_at DateTime
|
updated_at DateTime
|
||||||
api_id String @unique
|
api_id String @unique
|
||||||
api_key String @unique
|
api_key String @unique
|
||||||
host_group_id String?
|
|
||||||
agent_version String?
|
agent_version String?
|
||||||
auto_update Boolean @default(true)
|
auto_update Boolean @default(true)
|
||||||
cpu_cores Int?
|
cpu_cores Int?
|
||||||
@@ -99,8 +111,9 @@ model hosts {
|
|||||||
notes String?
|
notes String?
|
||||||
host_packages host_packages[]
|
host_packages host_packages[]
|
||||||
host_repositories host_repositories[]
|
host_repositories host_repositories[]
|
||||||
host_groups host_groups? @relation(fields: [host_group_id], references: [id])
|
host_group_memberships host_group_memberships[]
|
||||||
update_history update_history[]
|
update_history update_history[]
|
||||||
|
job_history job_history[]
|
||||||
|
|
||||||
@@index([machine_id])
|
@@index([machine_id])
|
||||||
@@index([friendly_name])
|
@@index([friendly_name])
|
||||||
@@ -262,3 +275,89 @@ model auto_enrollment_tokens {
|
|||||||
@@index([token_key])
|
@@index([token_key])
|
||||||
@@index([is_active])
|
@@index([is_active])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model docker_containers {
|
||||||
|
id String @id
|
||||||
|
host_id String
|
||||||
|
container_id String
|
||||||
|
name String
|
||||||
|
image_id String?
|
||||||
|
image_name String
|
||||||
|
image_tag String @default("latest")
|
||||||
|
status String
|
||||||
|
state String?
|
||||||
|
ports Json?
|
||||||
|
created_at DateTime
|
||||||
|
started_at DateTime?
|
||||||
|
updated_at DateTime
|
||||||
|
last_checked DateTime @default(now())
|
||||||
|
docker_images docker_images? @relation(fields: [image_id], references: [id], onDelete: SetNull)
|
||||||
|
|
||||||
|
@@unique([host_id, container_id])
|
||||||
|
@@index([host_id])
|
||||||
|
@@index([image_id])
|
||||||
|
@@index([status])
|
||||||
|
@@index([name])
|
||||||
|
}
|
||||||
|
|
||||||
|
model docker_images {
|
||||||
|
id String @id
|
||||||
|
repository String
|
||||||
|
tag String @default("latest")
|
||||||
|
image_id String
|
||||||
|
digest String?
|
||||||
|
size_bytes BigInt?
|
||||||
|
source String @default("docker-hub")
|
||||||
|
created_at DateTime
|
||||||
|
last_pulled DateTime?
|
||||||
|
last_checked DateTime @default(now())
|
||||||
|
updated_at DateTime
|
||||||
|
docker_containers docker_containers[]
|
||||||
|
docker_image_updates docker_image_updates[]
|
||||||
|
|
||||||
|
@@unique([repository, tag, image_id])
|
||||||
|
@@index([repository])
|
||||||
|
@@index([source])
|
||||||
|
@@index([repository, tag])
|
||||||
|
}
|
||||||
|
|
||||||
|
model docker_image_updates {
|
||||||
|
id String @id
|
||||||
|
image_id String
|
||||||
|
current_tag String
|
||||||
|
available_tag String
|
||||||
|
is_security_update Boolean @default(false)
|
||||||
|
severity String?
|
||||||
|
changelog_url String?
|
||||||
|
created_at DateTime @default(now())
|
||||||
|
updated_at DateTime
|
||||||
|
docker_images docker_images @relation(fields: [image_id], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([image_id, available_tag])
|
||||||
|
@@index([image_id])
|
||||||
|
@@index([is_security_update])
|
||||||
|
}
|
||||||
|
|
||||||
|
model job_history {
|
||||||
|
id String @id
|
||||||
|
job_id String
|
||||||
|
queue_name String
|
||||||
|
job_name String
|
||||||
|
host_id String?
|
||||||
|
api_id String?
|
||||||
|
status String
|
||||||
|
attempt_number Int @default(1)
|
||||||
|
error_message String?
|
||||||
|
output Json?
|
||||||
|
created_at DateTime @default(now())
|
||||||
|
updated_at DateTime
|
||||||
|
completed_at DateTime?
|
||||||
|
hosts hosts? @relation(fields: [host_id], references: [id], onDelete: SetNull)
|
||||||
|
|
||||||
|
@@index([job_id])
|
||||||
|
@@index([queue_name])
|
||||||
|
@@index([host_id])
|
||||||
|
@@index([api_id])
|
||||||
|
@@index([status])
|
||||||
|
@@index([created_at])
|
||||||
|
}
|
||||||
|
|||||||
416
backend/src/routes/automationRoutes.js
Normal file
416
backend/src/routes/automationRoutes.js
Normal file
@@ -0,0 +1,416 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||||
|
const { getConnectedApiIds } = require("../services/agentWs");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Get all queue statistics
|
||||||
|
router.get("/stats", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await queueManager.getAllQueueStats();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: stats,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching queue stats:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch queue statistics",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get specific queue statistics
|
||||||
|
router.get("/stats/:queueName", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { queueName } = req.params;
|
||||||
|
|
||||||
|
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: "Invalid queue name",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = await queueManager.getQueueStats(queueName);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: stats,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching queue stats:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch queue statistics",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get recent jobs for a queue
|
||||||
|
router.get("/jobs/:queueName", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { queueName } = req.params;
|
||||||
|
const { limit = 10 } = req.query;
|
||||||
|
|
||||||
|
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: "Invalid queue name",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobs = await queueManager.getRecentJobs(
|
||||||
|
queueName,
|
||||||
|
parseInt(limit, 10),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Format jobs for frontend
|
||||||
|
const formattedJobs = jobs.map((job) => ({
|
||||||
|
id: job.id,
|
||||||
|
name: job.name,
|
||||||
|
status: job.finishedOn
|
||||||
|
? job.failedReason
|
||||||
|
? "failed"
|
||||||
|
: "completed"
|
||||||
|
: "active",
|
||||||
|
progress: job.progress,
|
||||||
|
data: job.data,
|
||||||
|
returnvalue: job.returnvalue,
|
||||||
|
failedReason: job.failedReason,
|
||||||
|
processedOn: job.processedOn,
|
||||||
|
finishedOn: job.finishedOn,
|
||||||
|
createdAt: new Date(job.timestamp),
|
||||||
|
attemptsMade: job.attemptsMade,
|
||||||
|
delay: job.delay,
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: formattedJobs,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching recent jobs:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch recent jobs",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Trigger manual GitHub update check
|
||||||
|
router.post("/trigger/github-update", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerGitHubUpdateCheck();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "GitHub update check triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering GitHub update check:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger GitHub update check",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Trigger manual session cleanup
|
||||||
|
router.post(
|
||||||
|
"/trigger/session-cleanup",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerSessionCleanup();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Session cleanup triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering session cleanup:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger session cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Trigger Agent Collection: enqueue report_now for connected agents only
|
||||||
|
router.post(
|
||||||
|
"/trigger/agent-collection",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const queue = queueManager.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||||
|
const apiIds = getConnectedApiIds();
|
||||||
|
if (!apiIds || apiIds.length === 0) {
|
||||||
|
return res.json({ success: true, data: { enqueued: 0 } });
|
||||||
|
}
|
||||||
|
const jobs = apiIds.map((apiId) => ({
|
||||||
|
name: "report_now",
|
||||||
|
data: { api_id: apiId, type: "report_now" },
|
||||||
|
opts: { attempts: 3, backoff: { type: "fixed", delay: 2000 } },
|
||||||
|
}));
|
||||||
|
await queue.addBulk(jobs);
|
||||||
|
res.json({ success: true, data: { enqueued: jobs.length } });
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering agent collection:", error);
|
||||||
|
res
|
||||||
|
.status(500)
|
||||||
|
.json({ success: false, error: "Failed to trigger agent collection" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Trigger manual orphaned repo cleanup
|
||||||
|
router.post(
|
||||||
|
"/trigger/orphaned-repo-cleanup",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerOrphanedRepoCleanup();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Orphaned repository cleanup triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering orphaned repository cleanup:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger orphaned repository cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Trigger manual orphaned package cleanup
|
||||||
|
router.post(
|
||||||
|
"/trigger/orphaned-package-cleanup",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerOrphanedPackageCleanup();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Orphaned package cleanup triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering orphaned package cleanup:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger orphaned package cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get queue health status
|
||||||
|
router.get("/health", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await queueManager.getAllQueueStats();
|
||||||
|
const totalJobs = Object.values(stats).reduce((sum, queueStats) => {
|
||||||
|
return sum + queueStats.waiting + queueStats.active + queueStats.failed;
|
||||||
|
}, 0);
|
||||||
|
|
||||||
|
const health = {
|
||||||
|
status: "healthy",
|
||||||
|
totalJobs,
|
||||||
|
queues: Object.keys(stats).length,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check for unhealthy conditions
|
||||||
|
if (totalJobs > 1000) {
|
||||||
|
health.status = "warning";
|
||||||
|
health.message = "High number of queued jobs";
|
||||||
|
}
|
||||||
|
|
||||||
|
const failedJobs = Object.values(stats).reduce((sum, queueStats) => {
|
||||||
|
return sum + queueStats.failed;
|
||||||
|
}, 0);
|
||||||
|
|
||||||
|
if (failedJobs > 10) {
|
||||||
|
health.status = "error";
|
||||||
|
health.message = "High number of failed jobs";
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: health,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error checking queue health:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to check queue health",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get automation overview (for dashboard cards)
|
||||||
|
router.get("/overview", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await queueManager.getAllQueueStats();
|
||||||
|
const { getSettings } = require("../services/settingsService");
|
||||||
|
const settings = await getSettings();
|
||||||
|
|
||||||
|
// Get recent jobs for each queue to show last run times
|
||||||
|
const recentJobs = await Promise.all([
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.GITHUB_UPDATE_CHECK, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Calculate overview metrics
|
||||||
|
const overview = {
|
||||||
|
scheduledTasks:
|
||||||
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
|
||||||
|
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed,
|
||||||
|
|
||||||
|
runningTasks:
|
||||||
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
|
||||||
|
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active,
|
||||||
|
|
||||||
|
failedTasks:
|
||||||
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
|
||||||
|
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed,
|
||||||
|
|
||||||
|
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
|
||||||
|
return (
|
||||||
|
sum +
|
||||||
|
queueStats.completed +
|
||||||
|
queueStats.failed +
|
||||||
|
queueStats.active +
|
||||||
|
queueStats.waiting +
|
||||||
|
queueStats.delayed
|
||||||
|
);
|
||||||
|
}, 0),
|
||||||
|
|
||||||
|
// Automation details with last run times
|
||||||
|
automations: [
|
||||||
|
{
|
||||||
|
name: "GitHub Update Check",
|
||||||
|
queue: QUEUE_NAMES.GITHUB_UPDATE_CHECK,
|
||||||
|
description: "Checks for new PatchMon releases",
|
||||||
|
schedule: "Daily at midnight",
|
||||||
|
lastRun: recentJobs[0][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[0][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[0][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[0][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[0][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Session Cleanup",
|
||||||
|
queue: QUEUE_NAMES.SESSION_CLEANUP,
|
||||||
|
description: "Cleans up expired user sessions",
|
||||||
|
schedule: "Every hour",
|
||||||
|
lastRun: recentJobs[1][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[1][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[1][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[1][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[1][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.SESSION_CLEANUP],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Orphaned Repo Cleanup",
|
||||||
|
queue: QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
|
||||||
|
description: "Removes repositories with no associated hosts",
|
||||||
|
schedule: "Daily at 2 AM",
|
||||||
|
lastRun: recentJobs[2][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[2][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[2][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[2][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[2][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Orphaned Package Cleanup",
|
||||||
|
queue: QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP,
|
||||||
|
description: "Removes packages with no associated hosts",
|
||||||
|
schedule: "Daily at 3 AM",
|
||||||
|
lastRun: recentJobs[3][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[3][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[3][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[3][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[3][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Collect Host Statistics",
|
||||||
|
queue: QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
|
description: "Collects package statistics from connected agents only",
|
||||||
|
schedule: `Every ${settings.update_interval} minutes (Agent-driven)`,
|
||||||
|
lastRun: recentJobs[4][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[4][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[4][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[4][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[4][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.AGENT_COMMANDS],
|
||||||
|
},
|
||||||
|
].sort((a, b) => {
|
||||||
|
// Sort by last run timestamp (most recent first)
|
||||||
|
// If both have never run (timestamp 0), maintain original order
|
||||||
|
if (a.lastRunTimestamp === 0 && b.lastRunTimestamp === 0) return 0;
|
||||||
|
if (a.lastRunTimestamp === 0) return 1; // Never run goes to bottom
|
||||||
|
if (b.lastRunTimestamp === 0) return -1; // Never run goes to bottom
|
||||||
|
return b.lastRunTimestamp - a.lastRunTimestamp; // Most recent first
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: overview,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching automation overview:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch automation overview",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -8,6 +8,7 @@ const {
|
|||||||
requireViewPackages,
|
requireViewPackages,
|
||||||
requireViewUsers,
|
requireViewUsers,
|
||||||
} = require("../middleware/permissions");
|
} = require("../middleware/permissions");
|
||||||
|
const { queueManager } = require("../services/automation");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
@@ -200,6 +201,9 @@ router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => {
|
|||||||
agent_version: true,
|
agent_version: true,
|
||||||
auto_update: true,
|
auto_update: true,
|
||||||
notes: true,
|
notes: true,
|
||||||
|
api_id: true,
|
||||||
|
host_group_memberships: {
|
||||||
|
include: {
|
||||||
host_groups: {
|
host_groups: {
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
@@ -207,6 +211,8 @@ router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => {
|
|||||||
color: true,
|
color: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
_count: {
|
_count: {
|
||||||
select: {
|
select: {
|
||||||
host_packages: {
|
host_packages: {
|
||||||
@@ -353,6 +359,8 @@ router.get(
|
|||||||
const [host, totalHistoryCount] = await Promise.all([
|
const [host, totalHistoryCount] = await Promise.all([
|
||||||
prisma.hosts.findUnique({
|
prisma.hosts.findUnique({
|
||||||
where: { id: hostId },
|
where: { id: hostId },
|
||||||
|
include: {
|
||||||
|
host_group_memberships: {
|
||||||
include: {
|
include: {
|
||||||
host_groups: {
|
host_groups: {
|
||||||
select: {
|
select: {
|
||||||
@@ -361,6 +369,8 @@ router.get(
|
|||||||
color: true,
|
color: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
host_packages: {
|
host_packages: {
|
||||||
include: {
|
include: {
|
||||||
packages: true,
|
packages: true,
|
||||||
@@ -413,6 +423,51 @@ router.get(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Get agent queue status for a specific host
|
||||||
|
router.get(
|
||||||
|
"/hosts/:hostId/queue",
|
||||||
|
authenticateToken,
|
||||||
|
requireViewHosts,
|
||||||
|
async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { hostId } = req.params;
|
||||||
|
const { limit = 20 } = req.query;
|
||||||
|
|
||||||
|
// Get the host to find its API ID
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id: hostId },
|
||||||
|
select: { api_id: true, friendly_name: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(404).json({ error: "Host not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get queue jobs for this host
|
||||||
|
const queueData = await queueManager.getHostJobs(
|
||||||
|
host.api_id,
|
||||||
|
parseInt(limit, 10),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
hostId,
|
||||||
|
apiId: host.api_id,
|
||||||
|
friendlyName: host.friendly_name,
|
||||||
|
...queueData,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching host queue status:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch host queue status",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// Get recent users ordered by last_login desc
|
// Get recent users ordered by last_login desc
|
||||||
router.get(
|
router.get(
|
||||||
"/recent-users",
|
"/recent-users",
|
||||||
@@ -511,22 +566,34 @@ router.get(
|
|||||||
packages_count: true,
|
packages_count: true,
|
||||||
security_count: true,
|
security_count: true,
|
||||||
total_packages: true,
|
total_packages: true,
|
||||||
|
host_id: true,
|
||||||
|
status: true,
|
||||||
},
|
},
|
||||||
orderBy: {
|
orderBy: {
|
||||||
timestamp: "asc",
|
timestamp: "asc",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Process data to show actual values (no averaging)
|
// Enhanced data validation and processing
|
||||||
const processedData = trendsData
|
const processedData = trendsData
|
||||||
.filter((record) => record.total_packages !== null) // Only include records with valid data
|
.filter((record) => {
|
||||||
|
// Enhanced validation
|
||||||
|
return (
|
||||||
|
record.total_packages !== null &&
|
||||||
|
record.total_packages >= 0 &&
|
||||||
|
record.packages_count >= 0 &&
|
||||||
|
record.security_count >= 0 &&
|
||||||
|
record.security_count <= record.packages_count && // Security can't exceed outdated
|
||||||
|
record.status === "success"
|
||||||
|
); // Only include successful reports
|
||||||
|
})
|
||||||
.map((record) => {
|
.map((record) => {
|
||||||
const date = new Date(record.timestamp);
|
const date = new Date(record.timestamp);
|
||||||
let timeKey;
|
let timeKey;
|
||||||
|
|
||||||
if (daysInt <= 1) {
|
if (daysInt <= 1) {
|
||||||
// For hourly view, use exact timestamp
|
// For hourly view, group by hour only (not minutes)
|
||||||
timeKey = date.toISOString().substring(0, 16); // YYYY-MM-DDTHH:MM
|
timeKey = date.toISOString().substring(0, 13); // YYYY-MM-DDTHH
|
||||||
} else {
|
} else {
|
||||||
// For daily view, group by day
|
// For daily view, group by day
|
||||||
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
|
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
|
||||||
@@ -537,64 +604,342 @@ router.get(
|
|||||||
total_packages: record.total_packages,
|
total_packages: record.total_packages,
|
||||||
packages_count: record.packages_count || 0,
|
packages_count: record.packages_count || 0,
|
||||||
security_count: record.security_count || 0,
|
security_count: record.security_count || 0,
|
||||||
|
host_id: record.host_id,
|
||||||
|
timestamp: record.timestamp,
|
||||||
};
|
};
|
||||||
})
|
});
|
||||||
.sort((a, b) => a.timeKey.localeCompare(b.timeKey)); // Sort by time
|
|
||||||
|
|
||||||
// Get hosts list for dropdown (always fetch for dropdown functionality)
|
// Determine if we need aggregation based on host filter
|
||||||
|
const needsAggregation =
|
||||||
|
!hostId || hostId === "all" || hostId === "undefined";
|
||||||
|
|
||||||
|
let aggregatedArray;
|
||||||
|
|
||||||
|
if (needsAggregation) {
|
||||||
|
// For "All Hosts" mode, we need to calculate the actual total packages differently
|
||||||
|
// Instead of aggregating historical data (which is per-host), we'll use the current total
|
||||||
|
// and show that as a flat line, since total packages don't change much over time
|
||||||
|
|
||||||
|
// Get the current total packages count (unique packages across all hosts)
|
||||||
|
const currentTotalPackages = await prisma.packages.count({
|
||||||
|
where: {
|
||||||
|
host_packages: {
|
||||||
|
some: {}, // At least one host has this package
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Aggregate data by timeKey when looking at "All Hosts" or no specific host
|
||||||
|
const aggregatedData = processedData.reduce((acc, item) => {
|
||||||
|
if (!acc[item.timeKey]) {
|
||||||
|
acc[item.timeKey] = {
|
||||||
|
timeKey: item.timeKey,
|
||||||
|
total_packages: currentTotalPackages, // Use current total packages
|
||||||
|
packages_count: 0,
|
||||||
|
security_count: 0,
|
||||||
|
record_count: 0,
|
||||||
|
host_ids: new Set(),
|
||||||
|
min_timestamp: item.timestamp,
|
||||||
|
max_timestamp: item.timestamp,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// For outdated and security packages: SUM (these represent counts across hosts)
|
||||||
|
acc[item.timeKey].packages_count += item.packages_count;
|
||||||
|
acc[item.timeKey].security_count += item.security_count;
|
||||||
|
|
||||||
|
acc[item.timeKey].record_count += 1;
|
||||||
|
acc[item.timeKey].host_ids.add(item.host_id);
|
||||||
|
|
||||||
|
// Track timestamp range
|
||||||
|
if (item.timestamp < acc[item.timeKey].min_timestamp) {
|
||||||
|
acc[item.timeKey].min_timestamp = item.timestamp;
|
||||||
|
}
|
||||||
|
if (item.timestamp > acc[item.timeKey].max_timestamp) {
|
||||||
|
acc[item.timeKey].max_timestamp = item.timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
// Convert to array and add metadata
|
||||||
|
aggregatedArray = Object.values(aggregatedData)
|
||||||
|
.map((item) => ({
|
||||||
|
...item,
|
||||||
|
host_count: item.host_ids.size,
|
||||||
|
host_ids: Array.from(item.host_ids),
|
||||||
|
}))
|
||||||
|
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
|
||||||
|
} else {
|
||||||
|
// For specific host, show individual data points without aggregation
|
||||||
|
// But still group by timeKey to handle multiple reports from same host in same time period
|
||||||
|
const hostAggregatedData = processedData.reduce((acc, item) => {
|
||||||
|
if (!acc[item.timeKey]) {
|
||||||
|
acc[item.timeKey] = {
|
||||||
|
timeKey: item.timeKey,
|
||||||
|
total_packages: 0,
|
||||||
|
packages_count: 0,
|
||||||
|
security_count: 0,
|
||||||
|
record_count: 0,
|
||||||
|
host_ids: new Set([item.host_id]),
|
||||||
|
min_timestamp: item.timestamp,
|
||||||
|
max_timestamp: item.timestamp,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// For same host, take the latest values (not sum)
|
||||||
|
// This handles cases where a host reports multiple times in the same time period
|
||||||
|
if (item.timestamp > acc[item.timeKey].max_timestamp) {
|
||||||
|
acc[item.timeKey].total_packages = item.total_packages;
|
||||||
|
acc[item.timeKey].packages_count = item.packages_count;
|
||||||
|
acc[item.timeKey].security_count = item.security_count;
|
||||||
|
acc[item.timeKey].max_timestamp = item.timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
acc[item.timeKey].record_count += 1;
|
||||||
|
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
// Convert to array
|
||||||
|
aggregatedArray = Object.values(hostAggregatedData)
|
||||||
|
.map((item) => ({
|
||||||
|
...item,
|
||||||
|
host_count: item.host_ids.size,
|
||||||
|
host_ids: Array.from(item.host_ids),
|
||||||
|
}))
|
||||||
|
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle sparse data by filling missing time periods
|
||||||
|
const fillMissingPeriods = (data, daysInt) => {
|
||||||
|
const filledData = [];
|
||||||
|
const startDate = new Date();
|
||||||
|
startDate.setDate(startDate.getDate() - daysInt);
|
||||||
|
|
||||||
|
const dataMap = new Map(data.map((item) => [item.timeKey, item]));
|
||||||
|
|
||||||
|
const endDate = new Date();
|
||||||
|
const currentDate = new Date(startDate);
|
||||||
|
|
||||||
|
// Find the last known values for interpolation
|
||||||
|
let lastKnownValues = null;
|
||||||
|
if (data.length > 0) {
|
||||||
|
lastKnownValues = {
|
||||||
|
total_packages: data[0].total_packages,
|
||||||
|
packages_count: data[0].packages_count,
|
||||||
|
security_count: data[0].security_count,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
while (currentDate <= endDate) {
|
||||||
|
let timeKey;
|
||||||
|
if (daysInt <= 1) {
|
||||||
|
timeKey = currentDate.toISOString().substring(0, 13); // Hourly
|
||||||
|
currentDate.setHours(currentDate.getHours() + 1);
|
||||||
|
} else {
|
||||||
|
timeKey = currentDate.toISOString().split("T")[0]; // Daily
|
||||||
|
currentDate.setDate(currentDate.getDate() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dataMap.has(timeKey)) {
|
||||||
|
const item = dataMap.get(timeKey);
|
||||||
|
filledData.push(item);
|
||||||
|
// Update last known values
|
||||||
|
lastKnownValues = {
|
||||||
|
total_packages: item.total_packages,
|
||||||
|
packages_count: item.packages_count,
|
||||||
|
security_count: item.security_count,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// For missing periods, use the last known values (interpolation)
|
||||||
|
// This creates a continuous line instead of gaps
|
||||||
|
filledData.push({
|
||||||
|
timeKey,
|
||||||
|
total_packages: lastKnownValues?.total_packages || 0,
|
||||||
|
packages_count: lastKnownValues?.packages_count || 0,
|
||||||
|
security_count: lastKnownValues?.security_count || 0,
|
||||||
|
record_count: 0,
|
||||||
|
host_count: 0,
|
||||||
|
host_ids: [],
|
||||||
|
min_timestamp: null,
|
||||||
|
max_timestamp: null,
|
||||||
|
isInterpolated: true, // Mark as interpolated for debugging
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filledData;
|
||||||
|
};
|
||||||
|
|
||||||
|
const finalProcessedData = fillMissingPeriods(aggregatedArray, daysInt);
|
||||||
|
|
||||||
|
// Get hosts list for dropdown
|
||||||
const hostsList = await prisma.hosts.findMany({
|
const hostsList = await prisma.hosts.findMany({
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
friendly_name: true,
|
friendly_name: true,
|
||||||
hostname: true,
|
hostname: true,
|
||||||
|
last_update: true,
|
||||||
|
status: true,
|
||||||
},
|
},
|
||||||
orderBy: {
|
orderBy: {
|
||||||
friendly_name: "asc",
|
friendly_name: "asc",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Get current package state for offline fallback
|
||||||
|
let currentPackageState = null;
|
||||||
|
if (hostId && hostId !== "all" && hostId !== "undefined") {
|
||||||
|
// Get current package counts for specific host
|
||||||
|
const currentState = await prisma.host_packages.aggregate({
|
||||||
|
where: {
|
||||||
|
host_id: hostId,
|
||||||
|
},
|
||||||
|
_count: {
|
||||||
|
id: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get counts for boolean fields separately
|
||||||
|
const outdatedCount = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
host_id: hostId,
|
||||||
|
needs_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const securityCount = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
host_id: hostId,
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
currentPackageState = {
|
||||||
|
total_packages: currentState._count.id,
|
||||||
|
packages_count: outdatedCount,
|
||||||
|
security_count: securityCount,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// Get current package counts for all hosts
|
||||||
|
// Total packages = count of unique packages installed on at least one host
|
||||||
|
const totalPackagesCount = await prisma.packages.count({
|
||||||
|
where: {
|
||||||
|
host_packages: {
|
||||||
|
some: {}, // At least one host has this package
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get counts for boolean fields separately
|
||||||
|
const outdatedCount = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
needs_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const securityCount = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
currentPackageState = {
|
||||||
|
total_packages: totalPackagesCount,
|
||||||
|
packages_count: outdatedCount,
|
||||||
|
security_count: securityCount,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// Format data for chart
|
// Format data for chart
|
||||||
const chartData = {
|
const chartData = {
|
||||||
labels: [],
|
labels: [],
|
||||||
datasets: [
|
datasets: [
|
||||||
{
|
{
|
||||||
label: "Total Packages",
|
label: needsAggregation
|
||||||
|
? "Total Packages (All Hosts)"
|
||||||
|
: "Total Packages",
|
||||||
data: [],
|
data: [],
|
||||||
borderColor: "#3B82F6", // Blue
|
borderColor: "#3B82F6", // Blue
|
||||||
backgroundColor: "rgba(59, 130, 246, 0.1)",
|
backgroundColor: "rgba(59, 130, 246, 0.1)",
|
||||||
tension: 0.4,
|
tension: 0.4,
|
||||||
hidden: true, // Hidden by default
|
hidden: true, // Hidden by default
|
||||||
|
spanGaps: true, // Connect lines across missing data
|
||||||
|
pointRadius: 3,
|
||||||
|
pointHoverRadius: 5,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: "Outdated Packages",
|
label: needsAggregation
|
||||||
|
? "Total Outdated Packages"
|
||||||
|
: "Outdated Packages",
|
||||||
data: [],
|
data: [],
|
||||||
borderColor: "#F59E0B", // Orange
|
borderColor: "#F59E0B", // Orange
|
||||||
backgroundColor: "rgba(245, 158, 11, 0.1)",
|
backgroundColor: "rgba(245, 158, 11, 0.1)",
|
||||||
tension: 0.4,
|
tension: 0.4,
|
||||||
|
spanGaps: true, // Connect lines across missing data
|
||||||
|
pointRadius: 3,
|
||||||
|
pointHoverRadius: 5,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: "Security Packages",
|
label: needsAggregation
|
||||||
|
? "Total Security Packages"
|
||||||
|
: "Security Packages",
|
||||||
data: [],
|
data: [],
|
||||||
borderColor: "#EF4444", // Red
|
borderColor: "#EF4444", // Red
|
||||||
backgroundColor: "rgba(239, 68, 68, 0.1)",
|
backgroundColor: "rgba(239, 68, 68, 0.1)",
|
||||||
tension: 0.4,
|
tension: 0.4,
|
||||||
|
spanGaps: true, // Connect lines across missing data
|
||||||
|
pointRadius: 3,
|
||||||
|
pointHoverRadius: 5,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
// Process aggregated data
|
// Process aggregated data
|
||||||
processedData.forEach((item) => {
|
finalProcessedData.forEach((item) => {
|
||||||
chartData.labels.push(item.timeKey);
|
chartData.labels.push(item.timeKey);
|
||||||
chartData.datasets[0].data.push(item.total_packages);
|
chartData.datasets[0].data.push(item.total_packages);
|
||||||
chartData.datasets[1].data.push(item.packages_count);
|
chartData.datasets[1].data.push(item.packages_count);
|
||||||
chartData.datasets[2].data.push(item.security_count);
|
chartData.datasets[2].data.push(item.security_count);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Calculate data quality metrics
|
||||||
|
const dataQuality = {
|
||||||
|
totalRecords: trendsData.length,
|
||||||
|
validRecords: processedData.length,
|
||||||
|
aggregatedPoints: aggregatedArray.length,
|
||||||
|
filledPoints: finalProcessedData.length,
|
||||||
|
recordsWithNullTotal: trendsData.filter(
|
||||||
|
(r) => r.total_packages === null,
|
||||||
|
).length,
|
||||||
|
recordsWithInvalidData: trendsData.length - processedData.length,
|
||||||
|
successfulReports: trendsData.filter((r) => r.status === "success")
|
||||||
|
.length,
|
||||||
|
failedReports: trendsData.filter((r) => r.status === "error").length,
|
||||||
|
};
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
chartData,
|
chartData,
|
||||||
hosts: hostsList,
|
hosts: hostsList,
|
||||||
period: daysInt,
|
period: daysInt,
|
||||||
hostId: hostId || "all",
|
hostId: hostId || "all",
|
||||||
|
currentPackageState,
|
||||||
|
dataQuality,
|
||||||
|
aggregationInfo: {
|
||||||
|
hasData: aggregatedArray.length > 0,
|
||||||
|
hasGaps: finalProcessedData.some((item) => item.record_count === 0),
|
||||||
|
lastDataPoint:
|
||||||
|
aggregatedArray.length > 0
|
||||||
|
? aggregatedArray[aggregatedArray.length - 1]
|
||||||
|
: null,
|
||||||
|
aggregationMode: needsAggregation
|
||||||
|
? "sum_across_hosts"
|
||||||
|
: "individual_host_data",
|
||||||
|
explanation: needsAggregation
|
||||||
|
? "Data is summed across all hosts for each time period"
|
||||||
|
: "Data shows individual host values without cross-host aggregation",
|
||||||
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching package trends:", error);
|
console.error("Error fetching package trends:", error);
|
||||||
@@ -603,4 +948,348 @@ router.get(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Diagnostic endpoint to investigate package spikes
|
||||||
|
router.get(
|
||||||
|
"/package-spike-analysis",
|
||||||
|
authenticateToken,
|
||||||
|
requireViewHosts,
|
||||||
|
async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { date, time, hours = 2 } = req.query;
|
||||||
|
|
||||||
|
if (!date || !time) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error:
|
||||||
|
"Date and time parameters are required. Format: date=2025-10-17&time=18:00",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the specific date and time
|
||||||
|
const targetDateTime = new Date(`${date}T${time}:00`);
|
||||||
|
const startTime = new Date(targetDateTime);
|
||||||
|
startTime.setHours(startTime.getHours() - parseInt(hours, 10));
|
||||||
|
const endTime = new Date(targetDateTime);
|
||||||
|
endTime.setHours(endTime.getHours() + parseInt(hours, 10));
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Analyzing package spike around ${targetDateTime.toISOString()}`,
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
`Time range: ${startTime.toISOString()} to ${endTime.toISOString()}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get all update history records in the time window
|
||||||
|
const spikeData = await prisma.update_history.findMany({
|
||||||
|
where: {
|
||||||
|
timestamp: {
|
||||||
|
gte: startTime,
|
||||||
|
lte: endTime,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
host_id: true,
|
||||||
|
timestamp: true,
|
||||||
|
packages_count: true,
|
||||||
|
security_count: true,
|
||||||
|
total_packages: true,
|
||||||
|
status: true,
|
||||||
|
error_message: true,
|
||||||
|
execution_time: true,
|
||||||
|
payload_size_kb: true,
|
||||||
|
hosts: {
|
||||||
|
select: {
|
||||||
|
friendly_name: true,
|
||||||
|
hostname: true,
|
||||||
|
os_type: true,
|
||||||
|
os_version: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
timestamp: "asc",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Analyze the data
|
||||||
|
const analysis = {
|
||||||
|
timeWindow: {
|
||||||
|
start: startTime.toISOString(),
|
||||||
|
end: endTime.toISOString(),
|
||||||
|
target: targetDateTime.toISOString(),
|
||||||
|
},
|
||||||
|
totalRecords: spikeData.length,
|
||||||
|
successfulReports: spikeData.filter((r) => r.status === "success")
|
||||||
|
.length,
|
||||||
|
failedReports: spikeData.filter((r) => r.status === "error").length,
|
||||||
|
uniqueHosts: [...new Set(spikeData.map((r) => r.host_id))].length,
|
||||||
|
hosts: {},
|
||||||
|
timeline: [],
|
||||||
|
summary: {
|
||||||
|
maxPackagesCount: 0,
|
||||||
|
maxSecurityCount: 0,
|
||||||
|
maxTotalPackages: 0,
|
||||||
|
avgPackagesCount: 0,
|
||||||
|
avgSecurityCount: 0,
|
||||||
|
avgTotalPackages: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Group by host and analyze each host's behavior
|
||||||
|
spikeData.forEach((record) => {
|
||||||
|
const hostId = record.host_id;
|
||||||
|
if (!analysis.hosts[hostId]) {
|
||||||
|
analysis.hosts[hostId] = {
|
||||||
|
hostInfo: record.hosts,
|
||||||
|
records: [],
|
||||||
|
summary: {
|
||||||
|
totalReports: 0,
|
||||||
|
successfulReports: 0,
|
||||||
|
failedReports: 0,
|
||||||
|
maxPackagesCount: 0,
|
||||||
|
maxSecurityCount: 0,
|
||||||
|
maxTotalPackages: 0,
|
||||||
|
avgPackagesCount: 0,
|
||||||
|
avgSecurityCount: 0,
|
||||||
|
avgTotalPackages: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
analysis.hosts[hostId].records.push({
|
||||||
|
timestamp: record.timestamp,
|
||||||
|
packages_count: record.packages_count,
|
||||||
|
security_count: record.security_count,
|
||||||
|
total_packages: record.total_packages,
|
||||||
|
status: record.status,
|
||||||
|
error_message: record.error_message,
|
||||||
|
execution_time: record.execution_time,
|
||||||
|
payload_size_kb: record.payload_size_kb,
|
||||||
|
});
|
||||||
|
|
||||||
|
analysis.hosts[hostId].summary.totalReports++;
|
||||||
|
if (record.status === "success") {
|
||||||
|
analysis.hosts[hostId].summary.successfulReports++;
|
||||||
|
analysis.hosts[hostId].summary.maxPackagesCount = Math.max(
|
||||||
|
analysis.hosts[hostId].summary.maxPackagesCount,
|
||||||
|
record.packages_count,
|
||||||
|
);
|
||||||
|
analysis.hosts[hostId].summary.maxSecurityCount = Math.max(
|
||||||
|
analysis.hosts[hostId].summary.maxSecurityCount,
|
||||||
|
record.security_count,
|
||||||
|
);
|
||||||
|
analysis.hosts[hostId].summary.maxTotalPackages = Math.max(
|
||||||
|
analysis.hosts[hostId].summary.maxTotalPackages,
|
||||||
|
record.total_packages || 0,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
analysis.hosts[hostId].summary.failedReports++;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Calculate averages for each host
|
||||||
|
Object.keys(analysis.hosts).forEach((hostId) => {
|
||||||
|
const host = analysis.hosts[hostId];
|
||||||
|
const successfulRecords = host.records.filter(
|
||||||
|
(r) => r.status === "success",
|
||||||
|
);
|
||||||
|
|
||||||
|
if (successfulRecords.length > 0) {
|
||||||
|
host.summary.avgPackagesCount = Math.round(
|
||||||
|
successfulRecords.reduce((sum, r) => sum + r.packages_count, 0) /
|
||||||
|
successfulRecords.length,
|
||||||
|
);
|
||||||
|
host.summary.avgSecurityCount = Math.round(
|
||||||
|
successfulRecords.reduce((sum, r) => sum + r.security_count, 0) /
|
||||||
|
successfulRecords.length,
|
||||||
|
);
|
||||||
|
host.summary.avgTotalPackages = Math.round(
|
||||||
|
successfulRecords.reduce(
|
||||||
|
(sum, r) => sum + (r.total_packages || 0),
|
||||||
|
0,
|
||||||
|
) / successfulRecords.length,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create timeline with hourly/daily aggregation
|
||||||
|
const timelineMap = new Map();
|
||||||
|
spikeData.forEach((record) => {
|
||||||
|
const timeKey = record.timestamp.toISOString().substring(0, 13); // Hourly
|
||||||
|
if (!timelineMap.has(timeKey)) {
|
||||||
|
timelineMap.set(timeKey, {
|
||||||
|
timestamp: timeKey,
|
||||||
|
totalReports: 0,
|
||||||
|
successfulReports: 0,
|
||||||
|
failedReports: 0,
|
||||||
|
totalPackagesCount: 0,
|
||||||
|
totalSecurityCount: 0,
|
||||||
|
totalTotalPackages: 0,
|
||||||
|
uniqueHosts: new Set(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const timelineEntry = timelineMap.get(timeKey);
|
||||||
|
timelineEntry.totalReports++;
|
||||||
|
timelineEntry.uniqueHosts.add(record.host_id);
|
||||||
|
|
||||||
|
if (record.status === "success") {
|
||||||
|
timelineEntry.successfulReports++;
|
||||||
|
timelineEntry.totalPackagesCount += record.packages_count;
|
||||||
|
timelineEntry.totalSecurityCount += record.security_count;
|
||||||
|
timelineEntry.totalTotalPackages += record.total_packages || 0;
|
||||||
|
} else {
|
||||||
|
timelineEntry.failedReports++;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Convert timeline map to array
|
||||||
|
analysis.timeline = Array.from(timelineMap.values())
|
||||||
|
.map((entry) => ({
|
||||||
|
...entry,
|
||||||
|
uniqueHosts: entry.uniqueHosts.size,
|
||||||
|
}))
|
||||||
|
.sort((a, b) => a.timestamp.localeCompare(b.timestamp));
|
||||||
|
|
||||||
|
// Calculate overall summary
|
||||||
|
const successfulRecords = spikeData.filter((r) => r.status === "success");
|
||||||
|
if (successfulRecords.length > 0) {
|
||||||
|
analysis.summary.maxPackagesCount = Math.max(
|
||||||
|
...successfulRecords.map((r) => r.packages_count),
|
||||||
|
);
|
||||||
|
analysis.summary.maxSecurityCount = Math.max(
|
||||||
|
...successfulRecords.map((r) => r.security_count),
|
||||||
|
);
|
||||||
|
analysis.summary.maxTotalPackages = Math.max(
|
||||||
|
...successfulRecords.map((r) => r.total_packages || 0),
|
||||||
|
);
|
||||||
|
analysis.summary.avgPackagesCount = Math.round(
|
||||||
|
successfulRecords.reduce((sum, r) => sum + r.packages_count, 0) /
|
||||||
|
successfulRecords.length,
|
||||||
|
);
|
||||||
|
analysis.summary.avgSecurityCount = Math.round(
|
||||||
|
successfulRecords.reduce((sum, r) => sum + r.security_count, 0) /
|
||||||
|
successfulRecords.length,
|
||||||
|
);
|
||||||
|
analysis.summary.avgTotalPackages = Math.round(
|
||||||
|
successfulRecords.reduce(
|
||||||
|
(sum, r) => sum + (r.total_packages || 0),
|
||||||
|
0,
|
||||||
|
) / successfulRecords.length,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Identify potential causes of the spike
|
||||||
|
const potentialCauses = [];
|
||||||
|
|
||||||
|
// Check for hosts with unusually high package counts
|
||||||
|
Object.keys(analysis.hosts).forEach((hostId) => {
|
||||||
|
const host = analysis.hosts[hostId];
|
||||||
|
if (
|
||||||
|
host.summary.maxPackagesCount >
|
||||||
|
analysis.summary.avgPackagesCount * 2
|
||||||
|
) {
|
||||||
|
potentialCauses.push({
|
||||||
|
type: "high_package_count",
|
||||||
|
hostId,
|
||||||
|
hostName: host.hostInfo.friendly_name || host.hostInfo.hostname,
|
||||||
|
value: host.summary.maxPackagesCount,
|
||||||
|
avg: analysis.summary.avgPackagesCount,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check for multiple hosts reporting at the same time (this explains the 500 vs 59 discrepancy)
|
||||||
|
const concurrentReports = analysis.timeline.filter(
|
||||||
|
(entry) => entry.uniqueHosts > 1,
|
||||||
|
);
|
||||||
|
if (concurrentReports.length > 0) {
|
||||||
|
potentialCauses.push({
|
||||||
|
type: "concurrent_reports",
|
||||||
|
description:
|
||||||
|
"Multiple hosts reported simultaneously - this explains why chart shows higher numbers than individual host reports",
|
||||||
|
count: concurrentReports.length,
|
||||||
|
details: concurrentReports.map((entry) => ({
|
||||||
|
timestamp: entry.timestamp,
|
||||||
|
totalPackagesCount: entry.totalPackagesCount,
|
||||||
|
uniqueHosts: entry.uniqueHosts,
|
||||||
|
avgPerHost: Math.round(
|
||||||
|
entry.totalPackagesCount / entry.uniqueHosts,
|
||||||
|
),
|
||||||
|
})),
|
||||||
|
explanation:
|
||||||
|
"The chart sums package counts across all hosts. If multiple hosts report at the same time, the chart shows the total sum, not individual host counts.",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for failed reports that might indicate system issues
|
||||||
|
if (analysis.failedReports > 0) {
|
||||||
|
potentialCauses.push({
|
||||||
|
type: "failed_reports",
|
||||||
|
count: analysis.failedReports,
|
||||||
|
percentage: Math.round(
|
||||||
|
(analysis.failedReports / analysis.totalRecords) * 100,
|
||||||
|
),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add aggregation explanation
|
||||||
|
const aggregationExplanation = {
|
||||||
|
type: "aggregation_explanation",
|
||||||
|
description: "Chart Aggregation Logic",
|
||||||
|
details: {
|
||||||
|
howItWorks:
|
||||||
|
"The package trends chart sums package counts across all hosts for each time period",
|
||||||
|
individualHosts:
|
||||||
|
"Each host reports its own package count (e.g., 59 packages)",
|
||||||
|
chartDisplay:
|
||||||
|
"Chart shows the sum of all hosts' package counts (e.g., 59 + other hosts = 500)",
|
||||||
|
timeGrouping:
|
||||||
|
"Multiple hosts reporting in the same hour/day are aggregated together",
|
||||||
|
},
|
||||||
|
example: {
|
||||||
|
host1: "Host A reports 59 outdated packages",
|
||||||
|
host2: "Host B reports 120 outdated packages",
|
||||||
|
host3: "Host C reports 321 outdated packages",
|
||||||
|
chartShows: "Chart displays 500 total packages (59+120+321)",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
potentialCauses.push(aggregationExplanation);
|
||||||
|
|
||||||
|
// Add specific host breakdown if a host ID is provided
|
||||||
|
let specificHostAnalysis = null;
|
||||||
|
if (req.query.hostId) {
|
||||||
|
const hostId = req.query.hostId;
|
||||||
|
const hostData = analysis.hosts[hostId];
|
||||||
|
if (hostData) {
|
||||||
|
specificHostAnalysis = {
|
||||||
|
hostId,
|
||||||
|
hostInfo: hostData.hostInfo,
|
||||||
|
summary: hostData.summary,
|
||||||
|
records: hostData.records,
|
||||||
|
explanation: `This host reported ${hostData.summary.maxPackagesCount} outdated packages, but the chart shows ${analysis.summary.maxPackagesCount} because it sums across all hosts that reported at the same time.`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
analysis,
|
||||||
|
potentialCauses,
|
||||||
|
specificHostAnalysis,
|
||||||
|
recommendations: [
|
||||||
|
"Check if any hosts had major package updates around this time",
|
||||||
|
"Verify if any new hosts were added to the system",
|
||||||
|
"Check for system maintenance or updates that might have triggered package checks",
|
||||||
|
"Review any automation or scheduled tasks that run around 6pm",
|
||||||
|
"Check if any repositories were updated or new packages were released",
|
||||||
|
"Remember: Chart shows SUM of all hosts' package counts, not individual host counts",
|
||||||
|
],
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error analyzing package spike:", error);
|
||||||
|
res.status(500).json({ error: "Failed to analyze package spike" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = router;
|
||||||
|
|||||||
779
backend/src/routes/dockerRoutes.js
Normal file
779
backend/src/routes/dockerRoutes.js
Normal file
@@ -0,0 +1,779 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
const { PrismaClient } = require("@prisma/client");
|
||||||
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Helper function to convert BigInt fields to strings for JSON serialization
|
||||||
|
const convertBigIntToString = (obj) => {
|
||||||
|
if (obj === null || obj === undefined) return obj;
|
||||||
|
|
||||||
|
if (typeof obj === "bigint") {
|
||||||
|
return obj.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(obj)) {
|
||||||
|
return obj.map(convertBigIntToString);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof obj === "object") {
|
||||||
|
const converted = {};
|
||||||
|
for (const key in obj) {
|
||||||
|
converted[key] = convertBigIntToString(obj[key]);
|
||||||
|
}
|
||||||
|
return converted;
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
};
|
||||||
|
|
||||||
|
// GET /api/v1/docker/dashboard - Get Docker dashboard statistics
|
||||||
|
router.get("/dashboard", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
// Get total hosts with Docker containers
|
||||||
|
const hostsWithDocker = await prisma.docker_containers.groupBy({
|
||||||
|
by: ["host_id"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total containers
|
||||||
|
const totalContainers = await prisma.docker_containers.count();
|
||||||
|
|
||||||
|
// Get running containers
|
||||||
|
const runningContainers = await prisma.docker_containers.count({
|
||||||
|
where: { status: "running" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total images
|
||||||
|
const totalImages = await prisma.docker_images.count();
|
||||||
|
|
||||||
|
// Get available updates
|
||||||
|
const availableUpdates = await prisma.docker_image_updates.count();
|
||||||
|
|
||||||
|
// Get containers by status
|
||||||
|
const containersByStatus = await prisma.docker_containers.groupBy({
|
||||||
|
by: ["status"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get images by source
|
||||||
|
const imagesBySource = await prisma.docker_images.groupBy({
|
||||||
|
by: ["source"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
stats: {
|
||||||
|
totalHostsWithDocker: hostsWithDocker.length,
|
||||||
|
totalContainers,
|
||||||
|
runningContainers,
|
||||||
|
totalImages,
|
||||||
|
availableUpdates,
|
||||||
|
},
|
||||||
|
containersByStatus,
|
||||||
|
imagesBySource,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching Docker dashboard:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch Docker dashboard" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/containers - Get all containers with filters
|
||||||
|
router.get("/containers", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { status, hostId, imageId, search, page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (status) where.status = status;
|
||||||
|
if (hostId) where.host_id = hostId;
|
||||||
|
if (imageId) where.image_id = imageId;
|
||||||
|
if (search) {
|
||||||
|
where.OR = [
|
||||||
|
{ name: { contains: search, mode: "insensitive" } },
|
||||||
|
{ image_name: { contains: search, mode: "insensitive" } },
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [containers, total] = await Promise.all([
|
||||||
|
prisma.docker_containers.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
docker_images: true,
|
||||||
|
},
|
||||||
|
orderBy: { updated_at: "desc" },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_containers.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get host information for each container
|
||||||
|
const hostIds = [...new Set(containers.map((c) => c.host_id))];
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
select: { id: true, friendly_name: true, hostname: true, ip: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const hostsMap = hosts.reduce((acc, host) => {
|
||||||
|
acc[host.id] = host;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
const containersWithHosts = containers.map((container) => ({
|
||||||
|
...container,
|
||||||
|
host: hostsMap[container.host_id],
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
containers: containersWithHosts,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching containers:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch containers" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/containers/:id - Get container detail
|
||||||
|
router.get("/containers/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const container = await prisma.docker_containers.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
docker_images: {
|
||||||
|
include: {
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!container) {
|
||||||
|
return res.status(404).json({ error: "Container not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get host information
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id: container.host_id },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
friendly_name: true,
|
||||||
|
hostname: true,
|
||||||
|
ip: true,
|
||||||
|
os_type: true,
|
||||||
|
os_version: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get other containers using the same image
|
||||||
|
const similarContainers = await prisma.docker_containers.findMany({
|
||||||
|
where: {
|
||||||
|
image_id: container.image_id,
|
||||||
|
id: { not: id },
|
||||||
|
},
|
||||||
|
take: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
container: {
|
||||||
|
...container,
|
||||||
|
host,
|
||||||
|
},
|
||||||
|
similarContainers,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching container detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch container detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/images - Get all images with filters
|
||||||
|
router.get("/images", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { source, search, page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (source) where.source = source;
|
||||||
|
if (search) {
|
||||||
|
where.OR = [
|
||||||
|
{ repository: { contains: search, mode: "insensitive" } },
|
||||||
|
{ tag: { contains: search, mode: "insensitive" } },
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [images, total] = await Promise.all([
|
||||||
|
prisma.docker_images.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
docker_containers: true,
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
docker_image_updates: {
|
||||||
|
take: 1,
|
||||||
|
orderBy: { created_at: "desc" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { updated_at: "desc" },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_images.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get unique hosts using each image
|
||||||
|
const imagesWithHosts = await Promise.all(
|
||||||
|
images.map(async (image) => {
|
||||||
|
const containers = await prisma.docker_containers.findMany({
|
||||||
|
where: { image_id: image.id },
|
||||||
|
select: { host_id: true },
|
||||||
|
distinct: ["host_id"],
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
...image,
|
||||||
|
hostsCount: containers.length,
|
||||||
|
hasUpdates: image._count.docker_image_updates > 0,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
images: imagesWithHosts,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching images:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch images" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/images/:id - Get image detail
|
||||||
|
router.get("/images/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const image = await prisma.docker_images.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
docker_containers: {
|
||||||
|
take: 100,
|
||||||
|
},
|
||||||
|
docker_image_updates: {
|
||||||
|
orderBy: { created_at: "desc" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!image) {
|
||||||
|
return res.status(404).json({ error: "Image not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get unique hosts using this image
|
||||||
|
const hostIds = [...new Set(image.docker_containers.map((c) => c.host_id))];
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
select: { id: true, friendly_name: true, hostname: true, ip: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
image,
|
||||||
|
hosts,
|
||||||
|
totalContainers: image.docker_containers.length,
|
||||||
|
totalHosts: hosts.length,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching image detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch image detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/hosts - Get all hosts with Docker
|
||||||
|
router.get("/hosts", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
// Get hosts that have Docker containers
|
||||||
|
const hostsWithContainers = await prisma.docker_containers.groupBy({
|
||||||
|
by: ["host_id"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hostIds = hostsWithContainers.map((h) => h.host_id);
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
orderBy: { friendly_name: "asc" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get container counts and statuses for each host
|
||||||
|
const hostsWithStats = await Promise.all(
|
||||||
|
hosts.map(async (host) => {
|
||||||
|
const [totalContainers, runningContainers, totalImages] =
|
||||||
|
await Promise.all([
|
||||||
|
prisma.docker_containers.count({
|
||||||
|
where: { host_id: host.id },
|
||||||
|
}),
|
||||||
|
prisma.docker_containers.count({
|
||||||
|
where: { host_id: host.id, status: "running" },
|
||||||
|
}),
|
||||||
|
prisma.docker_containers.findMany({
|
||||||
|
where: { host_id: host.id },
|
||||||
|
select: { image_id: true },
|
||||||
|
distinct: ["image_id"],
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...host,
|
||||||
|
dockerStats: {
|
||||||
|
totalContainers,
|
||||||
|
runningContainers,
|
||||||
|
totalImages: totalImages.length,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
hosts: hostsWithStats,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total: hostIds.length,
|
||||||
|
totalPages: Math.ceil(hostIds.length / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching Docker hosts:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch Docker hosts" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/hosts/:id - Get host Docker detail
|
||||||
|
router.get("/hosts/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(404).json({ error: "Host not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get containers on this host
|
||||||
|
const containers = await prisma.docker_containers.findMany({
|
||||||
|
where: { host_id: id },
|
||||||
|
include: {
|
||||||
|
docker_images: {
|
||||||
|
include: {
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { name: "asc" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get unique images on this host
|
||||||
|
const imageIds = [...new Set(containers.map((c) => c.image_id))].filter(
|
||||||
|
Boolean,
|
||||||
|
);
|
||||||
|
const images = await prisma.docker_images.findMany({
|
||||||
|
where: { id: { in: imageIds } },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get container statistics
|
||||||
|
const runningContainers = containers.filter(
|
||||||
|
(c) => c.status === "running",
|
||||||
|
).length;
|
||||||
|
const stoppedContainers = containers.filter(
|
||||||
|
(c) => c.status === "exited" || c.status === "stopped",
|
||||||
|
).length;
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
host,
|
||||||
|
containers,
|
||||||
|
images,
|
||||||
|
stats: {
|
||||||
|
totalContainers: containers.length,
|
||||||
|
runningContainers,
|
||||||
|
stoppedContainers,
|
||||||
|
totalImages: images.length,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching host Docker detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch host Docker detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/updates - Get available updates
|
||||||
|
router.get("/updates", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { page = 1, limit = 50, securityOnly = false } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (securityOnly === "true") {
|
||||||
|
where.is_security_update = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [updates, total] = await Promise.all([
|
||||||
|
prisma.docker_image_updates.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
docker_images: {
|
||||||
|
include: {
|
||||||
|
docker_containers: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
host_id: true,
|
||||||
|
name: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: [{ is_security_update: "desc" }, { created_at: "desc" }],
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_image_updates.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get affected hosts for each update
|
||||||
|
const updatesWithHosts = await Promise.all(
|
||||||
|
updates.map(async (update) => {
|
||||||
|
const hostIds = [
|
||||||
|
...new Set(
|
||||||
|
update.docker_images.docker_containers.map((c) => c.host_id),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
select: { id: true, friendly_name: true, hostname: true },
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
...update,
|
||||||
|
affectedHosts: hosts,
|
||||||
|
affectedContainersCount:
|
||||||
|
update.docker_images.docker_containers.length,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
updates: updatesWithHosts,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching Docker updates:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch Docker updates" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/v1/docker/collect - Collect Docker data from agent
|
||||||
|
router.post("/collect", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId, apiKey, containers, images, updates } = req.body;
|
||||||
|
|
||||||
|
// Validate API credentials
|
||||||
|
const host = await prisma.hosts.findFirst({
|
||||||
|
where: { api_id: apiId, api_key: apiKey },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(401).json({ error: "Invalid API credentials" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Helper function to validate and parse dates
|
||||||
|
const parseDate = (dateString) => {
|
||||||
|
if (!dateString) return now;
|
||||||
|
const date = new Date(dateString);
|
||||||
|
return Number.isNaN(date.getTime()) ? now : date;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Process containers
|
||||||
|
if (containers && Array.isArray(containers)) {
|
||||||
|
for (const containerData of containers) {
|
||||||
|
const containerId = uuidv4();
|
||||||
|
|
||||||
|
// Find or create image
|
||||||
|
let imageId = null;
|
||||||
|
if (containerData.image_repository && containerData.image_tag) {
|
||||||
|
const image = await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
source: containerData.image_source || "docker-hub",
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
imageId = image.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert container
|
||||||
|
await prisma.docker_containers.upsert({
|
||||||
|
where: {
|
||||||
|
host_id_container_id: {
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
last_checked: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: containerId,
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process standalone images
|
||||||
|
if (images && Array.isArray(images)) {
|
||||||
|
for (const imageData of images) {
|
||||||
|
await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
digest: imageData.digest,
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
source: imageData.source || "docker-hub",
|
||||||
|
created_at: parseDate(imageData.created_at),
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process updates
|
||||||
|
// First, get all images for this host to clean up old updates
|
||||||
|
const hostImageIds = await prisma.docker_containers
|
||||||
|
.findMany({
|
||||||
|
where: { host_id: host.id },
|
||||||
|
select: { image_id: true },
|
||||||
|
distinct: ["image_id"],
|
||||||
|
})
|
||||||
|
.then((results) => results.map((r) => r.image_id).filter(Boolean));
|
||||||
|
|
||||||
|
// Delete old updates for images on this host that are no longer reported
|
||||||
|
if (hostImageIds.length > 0) {
|
||||||
|
const reportedImageIds = [];
|
||||||
|
|
||||||
|
// Process new updates
|
||||||
|
if (updates && Array.isArray(updates)) {
|
||||||
|
for (const updateData of updates) {
|
||||||
|
// Find the image by repository, tag, and image_id
|
||||||
|
const image = await prisma.docker_images.findFirst({
|
||||||
|
where: {
|
||||||
|
repository: updateData.repository,
|
||||||
|
tag: updateData.current_tag,
|
||||||
|
image_id: updateData.image_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (image) {
|
||||||
|
reportedImageIds.push(image.id);
|
||||||
|
|
||||||
|
// Store digest info in changelog_url field as JSON for now
|
||||||
|
const digestInfo = JSON.stringify({
|
||||||
|
method: "digest_comparison",
|
||||||
|
current_digest: updateData.current_digest,
|
||||||
|
available_digest: updateData.available_digest,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upsert the update record
|
||||||
|
await prisma.docker_image_updates.upsert({
|
||||||
|
where: {
|
||||||
|
image_id_available_tag: {
|
||||||
|
image_id: image.id,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
updated_at: now,
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
severity: "digest_changed",
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
image_id: image.id,
|
||||||
|
current_tag: updateData.current_tag,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
severity: "digest_changed",
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove stale updates for images on this host that are no longer in the updates list
|
||||||
|
const imageIdsToCleanup = hostImageIds.filter(
|
||||||
|
(id) => !reportedImageIds.includes(id),
|
||||||
|
);
|
||||||
|
if (imageIdsToCleanup.length > 0) {
|
||||||
|
await prisma.docker_image_updates.deleteMany({
|
||||||
|
where: {
|
||||||
|
image_id: { in: imageIdsToCleanup },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({ success: true, message: "Docker data collected successfully" });
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error collecting Docker data:", error);
|
||||||
|
console.error("Error stack:", error.stack);
|
||||||
|
console.error("Request body:", JSON.stringify(req.body, null, 2));
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to collect Docker data",
|
||||||
|
message: error.message,
|
||||||
|
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/agent - Serve the Docker agent installation script
|
||||||
|
router.get("/agent", async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const fs = require("node:fs");
|
||||||
|
const path = require("node:path");
|
||||||
|
const agentPath = path.join(
|
||||||
|
__dirname,
|
||||||
|
"../../..",
|
||||||
|
"agents",
|
||||||
|
"patchmon-docker-agent.sh",
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if file exists
|
||||||
|
if (!fs.existsSync(agentPath)) {
|
||||||
|
return res.status(404).json({ error: "Docker agent script not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read and serve the file
|
||||||
|
const agentScript = fs.readFileSync(agentPath, "utf8");
|
||||||
|
res.setHeader("Content-Type", "text/x-shellscript");
|
||||||
|
res.setHeader(
|
||||||
|
"Content-Disposition",
|
||||||
|
'inline; filename="patchmon-docker-agent.sh"',
|
||||||
|
);
|
||||||
|
res.send(agentScript);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error serving Docker agent:", error);
|
||||||
|
res.status(500).json({ error: "Failed to serve Docker agent script" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
236
backend/src/routes/gethomepageRoutes.js
Normal file
236
backend/src/routes/gethomepageRoutes.js
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { createPrismaClient } = require("../config/database");
|
||||||
|
const bcrypt = require("bcryptjs");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
const prisma = createPrismaClient();
|
||||||
|
|
||||||
|
// Middleware to authenticate API key
|
||||||
|
const authenticateApiKey = async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
|
||||||
|
if (!authHeader || !authHeader.startsWith("Basic ")) {
|
||||||
|
return res
|
||||||
|
.status(401)
|
||||||
|
.json({ error: "Missing or invalid authorization header" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode base64 credentials
|
||||||
|
const base64Credentials = authHeader.split(" ")[1];
|
||||||
|
const credentials = Buffer.from(base64Credentials, "base64").toString(
|
||||||
|
"ascii",
|
||||||
|
);
|
||||||
|
const [apiKey, apiSecret] = credentials.split(":");
|
||||||
|
|
||||||
|
if (!apiKey || !apiSecret) {
|
||||||
|
return res.status(401).json({ error: "Invalid credentials format" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the token in database
|
||||||
|
const token = await prisma.auto_enrollment_tokens.findUnique({
|
||||||
|
where: { token_key: apiKey },
|
||||||
|
include: {
|
||||||
|
users: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
username: true,
|
||||||
|
role: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
console.log(`API key not found: ${apiKey}`);
|
||||||
|
return res.status(401).json({ error: "Invalid API key" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token is active
|
||||||
|
if (!token.is_active) {
|
||||||
|
return res.status(401).json({ error: "API key is disabled" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token has expired
|
||||||
|
if (token.expires_at && new Date(token.expires_at) < new Date()) {
|
||||||
|
return res.status(401).json({ error: "API key has expired" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token is for gethomepage integration
|
||||||
|
if (token.metadata?.integration_type !== "gethomepage") {
|
||||||
|
return res.status(401).json({ error: "Invalid API key type" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the secret
|
||||||
|
const isValidSecret = await bcrypt.compare(apiSecret, token.token_secret);
|
||||||
|
if (!isValidSecret) {
|
||||||
|
return res.status(401).json({ error: "Invalid API secret" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check IP restrictions if any
|
||||||
|
if (token.allowed_ip_ranges && token.allowed_ip_ranges.length > 0) {
|
||||||
|
const clientIp = req.ip || req.connection.remoteAddress;
|
||||||
|
const forwardedFor = req.headers["x-forwarded-for"];
|
||||||
|
const realIp = req.headers["x-real-ip"];
|
||||||
|
|
||||||
|
// Get the actual client IP (considering proxies)
|
||||||
|
const actualClientIp = forwardedFor
|
||||||
|
? forwardedFor.split(",")[0].trim()
|
||||||
|
: realIp || clientIp;
|
||||||
|
|
||||||
|
const isAllowedIp = token.allowed_ip_ranges.some((range) => {
|
||||||
|
// Simple IP range check (can be enhanced for CIDR support)
|
||||||
|
return actualClientIp.startsWith(range) || actualClientIp === range;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!isAllowedIp) {
|
||||||
|
console.log(
|
||||||
|
`IP validation failed. Client IP: ${actualClientIp}, Allowed ranges: ${token.allowed_ip_ranges.join(", ")}`,
|
||||||
|
);
|
||||||
|
return res.status(403).json({ error: "IP address not allowed" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update last used timestamp
|
||||||
|
await prisma.auto_enrollment_tokens.update({
|
||||||
|
where: { id: token.id },
|
||||||
|
data: { last_used_at: new Date() },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Attach token info to request
|
||||||
|
req.apiToken = token;
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
console.error("API key authentication error:", error);
|
||||||
|
res.status(500).json({ error: "Authentication failed" });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get homepage widget statistics
|
||||||
|
router.get("/stats", authenticateApiKey, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
// Get total hosts count
|
||||||
|
const totalHosts = await prisma.hosts.count({
|
||||||
|
where: { status: "active" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total outdated packages count
|
||||||
|
const totalOutdatedPackages = await prisma.host_packages.count({
|
||||||
|
where: { needs_update: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total repositories count
|
||||||
|
const totalRepos = await prisma.repositories.count({
|
||||||
|
where: { is_active: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get hosts that need updates (have outdated packages)
|
||||||
|
const hostsNeedingUpdates = await prisma.hosts.count({
|
||||||
|
where: {
|
||||||
|
status: "active",
|
||||||
|
host_packages: {
|
||||||
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get security updates count
|
||||||
|
const securityUpdates = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
needs_update: true,
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get hosts with security updates
|
||||||
|
const hostsWithSecurityUpdates = await prisma.hosts.count({
|
||||||
|
where: {
|
||||||
|
status: "active",
|
||||||
|
host_packages: {
|
||||||
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get up-to-date hosts count
|
||||||
|
const upToDateHosts = totalHosts - hostsNeedingUpdates;
|
||||||
|
|
||||||
|
// Get recent update activity (last 24 hours)
|
||||||
|
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||||
|
const recentUpdates = await prisma.update_history.count({
|
||||||
|
where: {
|
||||||
|
timestamp: {
|
||||||
|
gte: oneDayAgo,
|
||||||
|
},
|
||||||
|
status: "success",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get OS distribution
|
||||||
|
const osDistribution = await prisma.hosts.groupBy({
|
||||||
|
by: ["os_type"],
|
||||||
|
where: { status: "active" },
|
||||||
|
_count: {
|
||||||
|
id: true,
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
_count: {
|
||||||
|
id: "desc",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Format OS distribution data
|
||||||
|
const osDistributionFormatted = osDistribution.map((os) => ({
|
||||||
|
name: os.os_type,
|
||||||
|
count: os._count.id,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Extract top 3 OS types for flat display in widgets
|
||||||
|
const top_os_1 = osDistributionFormatted[0] || { name: "None", count: 0 };
|
||||||
|
const top_os_2 = osDistributionFormatted[1] || { name: "None", count: 0 };
|
||||||
|
const top_os_3 = osDistributionFormatted[2] || { name: "None", count: 0 };
|
||||||
|
|
||||||
|
// Prepare response data
|
||||||
|
const stats = {
|
||||||
|
total_hosts: totalHosts,
|
||||||
|
total_outdated_packages: totalOutdatedPackages,
|
||||||
|
total_repos: totalRepos,
|
||||||
|
hosts_needing_updates: hostsNeedingUpdates,
|
||||||
|
up_to_date_hosts: upToDateHosts,
|
||||||
|
security_updates: securityUpdates,
|
||||||
|
hosts_with_security_updates: hostsWithSecurityUpdates,
|
||||||
|
recent_updates_24h: recentUpdates,
|
||||||
|
os_distribution: osDistributionFormatted,
|
||||||
|
// Flattened OS data for easy widget display
|
||||||
|
top_os_1_name: top_os_1.name,
|
||||||
|
top_os_1_count: top_os_1.count,
|
||||||
|
top_os_2_name: top_os_2.name,
|
||||||
|
top_os_2_count: top_os_2.count,
|
||||||
|
top_os_3_name: top_os_3.name,
|
||||||
|
top_os_3_count: top_os_3.count,
|
||||||
|
last_updated: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
res.json(stats);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching homepage stats:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch statistics" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Health check endpoint for the API
|
||||||
|
router.get("/health", authenticateApiKey, async (req, res) => {
|
||||||
|
res.json({
|
||||||
|
status: "ok",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
api_key: req.apiToken.token_name,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -15,7 +15,7 @@ router.get("/", authenticateToken, async (_req, res) => {
|
|||||||
include: {
|
include: {
|
||||||
_count: {
|
_count: {
|
||||||
select: {
|
select: {
|
||||||
hosts: true,
|
host_group_memberships: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -38,6 +38,8 @@ router.get("/:id", authenticateToken, async (req, res) => {
|
|||||||
|
|
||||||
const hostGroup = await prisma.host_groups.findUnique({
|
const hostGroup = await prisma.host_groups.findUnique({
|
||||||
where: { id },
|
where: { id },
|
||||||
|
include: {
|
||||||
|
host_group_memberships: {
|
||||||
include: {
|
include: {
|
||||||
hosts: {
|
hosts: {
|
||||||
select: {
|
select: {
|
||||||
@@ -52,6 +54,8 @@ router.get("/:id", authenticateToken, async (req, res) => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!hostGroup) {
|
if (!hostGroup) {
|
||||||
@@ -195,7 +199,7 @@ router.delete(
|
|||||||
include: {
|
include: {
|
||||||
_count: {
|
_count: {
|
||||||
select: {
|
select: {
|
||||||
hosts: true,
|
host_group_memberships: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -205,11 +209,10 @@ router.delete(
|
|||||||
return res.status(404).json({ error: "Host group not found" });
|
return res.status(404).json({ error: "Host group not found" });
|
||||||
}
|
}
|
||||||
|
|
||||||
// If host group has hosts, ungroup them first
|
// If host group has memberships, remove them first
|
||||||
if (existingGroup._count.hosts > 0) {
|
if (existingGroup._count.host_group_memberships > 0) {
|
||||||
await prisma.hosts.updateMany({
|
await prisma.host_group_memberships.deleteMany({
|
||||||
where: { host_group_id: id },
|
where: { host_group_id: id },
|
||||||
data: { host_group_id: null },
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -231,7 +234,13 @@ router.get("/:id/hosts", authenticateToken, async (req, res) => {
|
|||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
const hosts = await prisma.hosts.findMany({
|
const hosts = await prisma.hosts.findMany({
|
||||||
where: { host_group_id: id },
|
where: {
|
||||||
|
host_group_memberships: {
|
||||||
|
some: {
|
||||||
|
host_group_id: id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
friendly_name: true,
|
friendly_name: true,
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ const {
|
|||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
// Secure endpoint to download the agent script (requires API authentication)
|
// Secure endpoint to download the agent binary (requires API authentication)
|
||||||
router.get("/agent/download", async (req, res) => {
|
router.get("/agent/download", async (req, res) => {
|
||||||
try {
|
try {
|
||||||
// Verify API credentials
|
// Verify API credentials
|
||||||
@@ -34,46 +34,50 @@ router.get("/agent/download", async (req, res) => {
|
|||||||
return res.status(401).json({ error: "Invalid API credentials" });
|
return res.status(401).json({ error: "Invalid API credentials" });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Serve agent script directly from file system
|
// Get architecture parameter (default to amd64)
|
||||||
|
const architecture = req.query.arch || "amd64";
|
||||||
|
|
||||||
|
// Validate architecture
|
||||||
|
const validArchitectures = ["amd64", "386", "arm64"];
|
||||||
|
if (!validArchitectures.includes(architecture)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: "Invalid architecture. Must be one of: amd64, 386, arm64",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serve agent binary directly from file system
|
||||||
const fs = require("node:fs");
|
const fs = require("node:fs");
|
||||||
const path = require("node:path");
|
const path = require("node:path");
|
||||||
|
|
||||||
const agentPath = path.join(__dirname, "../../../agents/patchmon-agent.sh");
|
const binaryName = `patchmon-agent-linux-${architecture}`;
|
||||||
|
const binaryPath = path.join(__dirname, "../../../agents", binaryName);
|
||||||
|
|
||||||
if (!fs.existsSync(agentPath)) {
|
if (!fs.existsSync(binaryPath)) {
|
||||||
return res.status(404).json({ error: "Agent script not found" });
|
return res.status(404).json({
|
||||||
|
error: `Agent binary not found for architecture: ${architecture}`,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read file and convert line endings
|
// Set appropriate headers for binary download
|
||||||
let scriptContent = fs
|
res.setHeader("Content-Type", "application/octet-stream");
|
||||||
.readFileSync(agentPath, "utf8")
|
|
||||||
.replace(/\r\n/g, "\n")
|
|
||||||
.replace(/\r/g, "\n");
|
|
||||||
|
|
||||||
// Determine curl flags dynamically from settings for consistency
|
|
||||||
let curlFlags = "-s";
|
|
||||||
try {
|
|
||||||
const settings = await prisma.settings.findFirst();
|
|
||||||
if (settings && settings.ignore_ssl_self_signed === true) {
|
|
||||||
curlFlags = "-sk";
|
|
||||||
}
|
|
||||||
} catch (_) {}
|
|
||||||
|
|
||||||
// Inject the curl flags into the script
|
|
||||||
scriptContent = scriptContent.replace(
|
|
||||||
'CURL_FLAGS=""',
|
|
||||||
`CURL_FLAGS="${curlFlags}"`,
|
|
||||||
);
|
|
||||||
|
|
||||||
res.setHeader("Content-Type", "application/x-shellscript");
|
|
||||||
res.setHeader(
|
res.setHeader(
|
||||||
"Content-Disposition",
|
"Content-Disposition",
|
||||||
'attachment; filename="patchmon-agent.sh"',
|
`attachment; filename="${binaryName}"`,
|
||||||
);
|
);
|
||||||
res.send(scriptContent);
|
|
||||||
|
// Stream the binary file
|
||||||
|
const fileStream = fs.createReadStream(binaryPath);
|
||||||
|
fileStream.pipe(res);
|
||||||
|
|
||||||
|
fileStream.on("error", (error) => {
|
||||||
|
console.error("Binary stream error:", error);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(500).json({ error: "Failed to stream agent binary" });
|
||||||
|
}
|
||||||
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Agent download error:", error);
|
console.error("Agent download error:", error);
|
||||||
res.status(500).json({ error: "Failed to download agent script" });
|
res.status(500).json({ error: "Failed to serve agent binary" });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -158,7 +162,14 @@ router.post(
|
|||||||
body("friendly_name")
|
body("friendly_name")
|
||||||
.isLength({ min: 1 })
|
.isLength({ min: 1 })
|
||||||
.withMessage("Friendly name is required"),
|
.withMessage("Friendly name is required"),
|
||||||
body("hostGroupId").optional(),
|
body("hostGroupIds")
|
||||||
|
.optional()
|
||||||
|
.isArray()
|
||||||
|
.withMessage("Host group IDs must be an array"),
|
||||||
|
body("hostGroupIds.*")
|
||||||
|
.optional()
|
||||||
|
.isUUID()
|
||||||
|
.withMessage("Each host group ID must be a valid UUID"),
|
||||||
],
|
],
|
||||||
async (req, res) => {
|
async (req, res) => {
|
||||||
try {
|
try {
|
||||||
@@ -167,19 +178,21 @@ router.post(
|
|||||||
return res.status(400).json({ errors: errors.array() });
|
return res.status(400).json({ errors: errors.array() });
|
||||||
}
|
}
|
||||||
|
|
||||||
const { friendly_name, hostGroupId } = req.body;
|
const { friendly_name, hostGroupIds } = req.body;
|
||||||
|
|
||||||
// Generate unique API credentials for this host
|
// Generate unique API credentials for this host
|
||||||
const { apiId, apiKey } = generateApiCredentials();
|
const { apiId, apiKey } = generateApiCredentials();
|
||||||
|
|
||||||
// If hostGroupId is provided, verify the group exists
|
// If hostGroupIds is provided, verify all groups exist
|
||||||
if (hostGroupId) {
|
if (hostGroupIds && hostGroupIds.length > 0) {
|
||||||
const hostGroup = await prisma.host_groups.findUnique({
|
const hostGroups = await prisma.host_groups.findMany({
|
||||||
where: { id: hostGroupId },
|
where: { id: { in: hostGroupIds } },
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!hostGroup) {
|
if (hostGroups.length !== hostGroupIds.length) {
|
||||||
return res.status(400).json({ error: "Host group not found" });
|
return res
|
||||||
|
.status(400)
|
||||||
|
.json({ error: "One or more host groups not found" });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -195,10 +208,23 @@ router.post(
|
|||||||
architecture: null, // Will be updated when agent connects
|
architecture: null, // Will be updated when agent connects
|
||||||
api_id: apiId,
|
api_id: apiId,
|
||||||
api_key: apiKey,
|
api_key: apiKey,
|
||||||
host_group_id: hostGroupId || null,
|
|
||||||
status: "pending", // Will change to 'active' when agent connects
|
status: "pending", // Will change to 'active' when agent connects
|
||||||
updated_at: new Date(),
|
updated_at: new Date(),
|
||||||
|
// Create host group memberships if hostGroupIds are provided
|
||||||
|
host_group_memberships:
|
||||||
|
hostGroupIds && hostGroupIds.length > 0
|
||||||
|
? {
|
||||||
|
create: hostGroupIds.map((groupId) => ({
|
||||||
|
id: uuidv4(),
|
||||||
|
host_groups: {
|
||||||
|
connect: { id: groupId },
|
||||||
},
|
},
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
host_group_memberships: {
|
||||||
include: {
|
include: {
|
||||||
host_groups: {
|
host_groups: {
|
||||||
select: {
|
select: {
|
||||||
@@ -208,6 +234,8 @@ router.post(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
res.status(201).json({
|
res.status(201).json({
|
||||||
@@ -216,7 +244,10 @@ router.post(
|
|||||||
friendlyName: host.friendly_name,
|
friendlyName: host.friendly_name,
|
||||||
apiId: host.api_id,
|
apiId: host.api_id,
|
||||||
apiKey: host.api_key,
|
apiKey: host.api_key,
|
||||||
hostGroup: host.host_groups,
|
hostGroups:
|
||||||
|
host.host_group_memberships?.map(
|
||||||
|
(membership) => membership.host_groups,
|
||||||
|
) || [],
|
||||||
instructions:
|
instructions:
|
||||||
"Use these credentials in your patchmon agent configuration. System information will be automatically detected when the agent connects.",
|
"Use these credentials in your patchmon agent configuration. System information will be automatically detected when the agent connects.",
|
||||||
});
|
});
|
||||||
@@ -390,7 +421,8 @@ router.post(
|
|||||||
const totalPackages = packages.length;
|
const totalPackages = packages.length;
|
||||||
|
|
||||||
// Process everything in a single transaction to avoid race conditions
|
// Process everything in a single transaction to avoid race conditions
|
||||||
await prisma.$transaction(async (tx) => {
|
await prisma.$transaction(
|
||||||
|
async (tx) => {
|
||||||
// Update host data
|
// Update host data
|
||||||
await tx.hosts.update({
|
await tx.hosts.update({
|
||||||
where: { id: host.id },
|
where: { id: host.id },
|
||||||
@@ -402,43 +434,74 @@ router.post(
|
|||||||
where: { host_id: host.id },
|
where: { host_id: host.id },
|
||||||
});
|
});
|
||||||
|
|
||||||
// Process each package
|
// Process packages in batches using createMany/updateMany
|
||||||
for (const packageData of packages) {
|
const packagesToCreate = [];
|
||||||
// Find or create package
|
const packagesToUpdate = [];
|
||||||
let pkg = await tx.packages.findUnique({
|
const _hostPackagesToUpsert = [];
|
||||||
where: { name: packageData.name },
|
|
||||||
|
// First pass: identify what needs to be created/updated
|
||||||
|
const existingPackages = await tx.packages.findMany({
|
||||||
|
where: {
|
||||||
|
name: { in: packages.map((p) => p.name) },
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!pkg) {
|
const existingPackageMap = new Map(
|
||||||
pkg = await tx.packages.create({
|
existingPackages.map((p) => [p.name, p]),
|
||||||
data: {
|
);
|
||||||
|
|
||||||
|
for (const packageData of packages) {
|
||||||
|
const existingPkg = existingPackageMap.get(packageData.name);
|
||||||
|
|
||||||
|
if (!existingPkg) {
|
||||||
|
// Package doesn't exist, create it
|
||||||
|
const newPkg = {
|
||||||
id: uuidv4(),
|
id: uuidv4(),
|
||||||
name: packageData.name,
|
name: packageData.name,
|
||||||
description: packageData.description || null,
|
description: packageData.description || null,
|
||||||
category: packageData.category || null,
|
category: packageData.category || null,
|
||||||
latest_version:
|
latest_version:
|
||||||
packageData.availableVersion || packageData.currentVersion,
|
packageData.availableVersion || packageData.currentVersion,
|
||||||
|
created_at: new Date(),
|
||||||
updated_at: new Date(),
|
updated_at: new Date(),
|
||||||
},
|
};
|
||||||
});
|
packagesToCreate.push(newPkg);
|
||||||
} else {
|
existingPackageMap.set(packageData.name, newPkg);
|
||||||
// Update package latest version if newer
|
} else if (
|
||||||
if (
|
|
||||||
packageData.availableVersion &&
|
packageData.availableVersion &&
|
||||||
packageData.availableVersion !== pkg.latest_version
|
packageData.availableVersion !== existingPkg.latest_version
|
||||||
) {
|
) {
|
||||||
await tx.packages.update({
|
// Package exists but needs version update
|
||||||
where: { id: pkg.id },
|
packagesToUpdate.push({
|
||||||
data: {
|
id: existingPkg.id,
|
||||||
latest_version: packageData.availableVersion,
|
latest_version: packageData.availableVersion,
|
||||||
updated_at: new Date(),
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create host package relationship
|
// Batch create new packages
|
||||||
// Use upsert to handle potential duplicates gracefully
|
if (packagesToCreate.length > 0) {
|
||||||
|
await tx.packages.createMany({
|
||||||
|
data: packagesToCreate,
|
||||||
|
skipDuplicates: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Batch update existing packages
|
||||||
|
for (const update of packagesToUpdate) {
|
||||||
|
await tx.packages.update({
|
||||||
|
where: { id: update.id },
|
||||||
|
data: {
|
||||||
|
latest_version: update.latest_version,
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now process host_packages
|
||||||
|
for (const packageData of packages) {
|
||||||
|
const pkg = existingPackageMap.get(packageData.name);
|
||||||
|
|
||||||
await tx.host_packages.upsert({
|
await tx.host_packages.upsert({
|
||||||
where: {
|
where: {
|
||||||
host_id_package_id: {
|
host_id_package_id: {
|
||||||
@@ -536,7 +599,12 @@ router.post(
|
|||||||
status: "success",
|
status: "success",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
},
|
||||||
|
{
|
||||||
|
maxWait: 30000, // Wait up to 30s for a transaction slot
|
||||||
|
timeout: 60000, // Allow transaction to run for up to 60s
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// Agent auto-update is now handled client-side by the agent itself
|
// Agent auto-update is now handled client-side by the agent itself
|
||||||
|
|
||||||
@@ -695,18 +763,96 @@ router.post(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Admin endpoint to bulk update host groups
|
// TODO: Admin endpoint to bulk update host groups - needs to be rewritten for many-to-many relationship
|
||||||
|
// router.put(
|
||||||
|
// "/bulk/group",
|
||||||
|
// authenticateToken,
|
||||||
|
// requireManageHosts,
|
||||||
|
// [
|
||||||
|
// body("hostIds").isArray().withMessage("Host IDs must be an array"),
|
||||||
|
// body("hostIds.*")
|
||||||
|
// .isLength({ min: 1 })
|
||||||
|
// .withMessage("Each host ID must be provided"),
|
||||||
|
// body("hostGroupId").optional(),
|
||||||
|
// ],
|
||||||
|
// async (req, res) => {
|
||||||
|
// try {
|
||||||
|
// const errors = validationResult(req);
|
||||||
|
// if (!errors.isEmpty()) {
|
||||||
|
// return res.status(400).json({ errors: errors.array() });
|
||||||
|
// }
|
||||||
|
|
||||||
|
// const { hostIds, hostGroupId } = req.body;
|
||||||
|
|
||||||
|
// // If hostGroupId is provided, verify the group exists
|
||||||
|
// if (hostGroupId) {
|
||||||
|
// const hostGroup = await prisma.host_groups.findUnique({
|
||||||
|
// where: { id: hostGroupId },
|
||||||
|
// });
|
||||||
|
|
||||||
|
// if (!hostGroup) {
|
||||||
|
// return res.status(400).json({ error: "Host group not found" });
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // Check if all hosts exist
|
||||||
|
// const existingHosts = await prisma.hosts.findMany({
|
||||||
|
// where: { id: { in: hostIds } },
|
||||||
|
// select: { id: true, friendly_name: true },
|
||||||
|
// });
|
||||||
|
|
||||||
|
// if (existingHosts.length !== hostIds.length) {
|
||||||
|
// const foundIds = existingHosts.map((h) => h.id);
|
||||||
|
// const missingIds = hostIds.filter((id) => !foundIds.includes(id));
|
||||||
|
// return res.status(400).json({
|
||||||
|
// error: "Some hosts not found",
|
||||||
|
// missingHostIds: missingIds,
|
||||||
|
// });
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // Bulk update host groups
|
||||||
|
// const updateResult = await prisma.hosts.updateMany({
|
||||||
|
// where: { id: { in: hostIds } },
|
||||||
|
// data: {
|
||||||
|
// host_group_id: hostGroupId || null,
|
||||||
|
// updated_at: new Date(),
|
||||||
|
// },
|
||||||
|
// });
|
||||||
|
|
||||||
|
// // Get updated hosts with group information
|
||||||
|
// const updatedHosts = await prisma.hosts.findMany({
|
||||||
|
// where: { id: { in: hostIds } },
|
||||||
|
// select: {
|
||||||
|
// id: true,
|
||||||
|
// friendly_name: true,
|
||||||
|
// host_groups: {
|
||||||
|
// select: {
|
||||||
|
// id: true,
|
||||||
|
// name: true,
|
||||||
|
// color: true,
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// });
|
||||||
|
|
||||||
|
// res.json({
|
||||||
|
// message: `Successfully updated ${updateResult.count} host${updateResult.count !== 1 ? "s" : ""}`,
|
||||||
|
// updatedCount: updateResult.count,
|
||||||
|
// hosts: updatedHosts,
|
||||||
|
// });
|
||||||
|
// } catch (error) {
|
||||||
|
// console.error("Bulk host group update error:", error);
|
||||||
|
// res.status(500).json({ error: "Failed to update host groups" });
|
||||||
|
// }
|
||||||
|
// },
|
||||||
|
// );
|
||||||
|
|
||||||
|
// Admin endpoint to update host groups (many-to-many)
|
||||||
router.put(
|
router.put(
|
||||||
"/bulk/group",
|
"/:hostId/groups",
|
||||||
authenticateToken,
|
authenticateToken,
|
||||||
requireManageHosts,
|
requireManageHosts,
|
||||||
[
|
[body("groupIds").isArray().optional()],
|
||||||
body("hostIds").isArray().withMessage("Host IDs must be an array"),
|
|
||||||
body("hostIds.*")
|
|
||||||
.isLength({ min: 1 })
|
|
||||||
.withMessage("Each host ID must be provided"),
|
|
||||||
body("hostGroupId").optional(),
|
|
||||||
],
|
|
||||||
async (req, res) => {
|
async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const errors = validationResult(req);
|
const errors = validationResult(req);
|
||||||
@@ -714,49 +860,58 @@ router.put(
|
|||||||
return res.status(400).json({ errors: errors.array() });
|
return res.status(400).json({ errors: errors.array() });
|
||||||
}
|
}
|
||||||
|
|
||||||
const { hostIds, hostGroupId } = req.body;
|
const { hostId } = req.params;
|
||||||
|
const { groupIds = [] } = req.body;
|
||||||
|
|
||||||
// If hostGroupId is provided, verify the group exists
|
// Check if host exists
|
||||||
if (hostGroupId) {
|
const host = await prisma.hosts.findUnique({
|
||||||
const hostGroup = await prisma.host_groups.findUnique({
|
where: { id: hostId },
|
||||||
where: { id: hostGroupId },
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!hostGroup) {
|
if (!host) {
|
||||||
return res.status(400).json({ error: "Host group not found" });
|
return res.status(404).json({ error: "Host not found" });
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if all hosts exist
|
// Verify all groups exist
|
||||||
const existingHosts = await prisma.hosts.findMany({
|
if (groupIds.length > 0) {
|
||||||
where: { id: { in: hostIds } },
|
const existingGroups = await prisma.host_groups.findMany({
|
||||||
select: { id: true, friendly_name: true },
|
where: { id: { in: groupIds } },
|
||||||
|
select: { id: true },
|
||||||
});
|
});
|
||||||
|
|
||||||
if (existingHosts.length !== hostIds.length) {
|
if (existingGroups.length !== groupIds.length) {
|
||||||
const foundIds = existingHosts.map((h) => h.id);
|
|
||||||
const missingIds = hostIds.filter((id) => !foundIds.includes(id));
|
|
||||||
return res.status(400).json({
|
return res.status(400).json({
|
||||||
error: "Some hosts not found",
|
error: "One or more host groups not found",
|
||||||
missingHostIds: missingIds,
|
provided: groupIds,
|
||||||
|
found: existingGroups.map((g) => g.id),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use transaction to update group memberships
|
||||||
|
const updatedHost = await prisma.$transaction(async (tx) => {
|
||||||
|
// Remove existing memberships
|
||||||
|
await tx.host_group_memberships.deleteMany({
|
||||||
|
where: { host_id: hostId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add new memberships
|
||||||
|
if (groupIds.length > 0) {
|
||||||
|
await tx.host_group_memberships.createMany({
|
||||||
|
data: groupIds.map((groupId) => ({
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
host_id: hostId,
|
||||||
|
host_group_id: groupId,
|
||||||
|
})),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Bulk update host groups
|
// Return updated host with groups
|
||||||
const updateResult = await prisma.hosts.updateMany({
|
return await tx.hosts.findUnique({
|
||||||
where: { id: { in: hostIds } },
|
where: { id: hostId },
|
||||||
data: {
|
include: {
|
||||||
host_group_id: hostGroupId || null,
|
host_group_memberships: {
|
||||||
updated_at: new Date(),
|
include: {
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get updated hosts with group information
|
|
||||||
const updatedHosts = await prisma.hosts.findMany({
|
|
||||||
where: { id: { in: hostIds } },
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
friendly_name: true,
|
|
||||||
host_groups: {
|
host_groups: {
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
@@ -765,21 +920,23 @@ router.put(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
message: `Successfully updated ${updateResult.count} host${updateResult.count !== 1 ? "s" : ""}`,
|
message: "Host groups updated successfully",
|
||||||
updatedCount: updateResult.count,
|
host: updatedHost,
|
||||||
hosts: updatedHosts,
|
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Bulk host group update error:", error);
|
console.error("Host groups update error:", error);
|
||||||
res.status(500).json({ error: "Failed to update host groups" });
|
res.status(500).json({ error: "Failed to update host groups" });
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Admin endpoint to update host group
|
// Legacy endpoint to update single host group (for backward compatibility)
|
||||||
router.put(
|
router.put(
|
||||||
"/:hostId/group",
|
"/:hostId/group",
|
||||||
authenticateToken,
|
authenticateToken,
|
||||||
@@ -795,6 +952,9 @@ router.put(
|
|||||||
const { hostId } = req.params;
|
const { hostId } = req.params;
|
||||||
const { hostGroupId } = req.body;
|
const { hostGroupId } = req.body;
|
||||||
|
|
||||||
|
// Convert single group to array and use the new endpoint logic
|
||||||
|
const _groupIds = hostGroupId ? [hostGroupId] : [];
|
||||||
|
|
||||||
// Check if host exists
|
// Check if host exists
|
||||||
const host = await prisma.hosts.findUnique({
|
const host = await prisma.hosts.findUnique({
|
||||||
where: { id: hostId },
|
where: { id: hostId },
|
||||||
@@ -804,7 +964,7 @@ router.put(
|
|||||||
return res.status(404).json({ error: "Host not found" });
|
return res.status(404).json({ error: "Host not found" });
|
||||||
}
|
}
|
||||||
|
|
||||||
// If hostGroupId is provided, verify the group exists
|
// Verify group exists if provided
|
||||||
if (hostGroupId) {
|
if (hostGroupId) {
|
||||||
const hostGroup = await prisma.host_groups.findUnique({
|
const hostGroup = await prisma.host_groups.findUnique({
|
||||||
where: { id: hostGroupId },
|
where: { id: hostGroupId },
|
||||||
@@ -815,13 +975,29 @@ router.put(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update host group
|
// Use transaction to update group memberships
|
||||||
const updatedHost = await prisma.hosts.update({
|
const updatedHost = await prisma.$transaction(async (tx) => {
|
||||||
where: { id: hostId },
|
// Remove existing memberships
|
||||||
|
await tx.host_group_memberships.deleteMany({
|
||||||
|
where: { host_id: hostId },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add new membership if group provided
|
||||||
|
if (hostGroupId) {
|
||||||
|
await tx.host_group_memberships.create({
|
||||||
data: {
|
data: {
|
||||||
host_group_id: hostGroupId || null,
|
id: crypto.randomUUID(),
|
||||||
updated_at: new Date(),
|
host_id: hostId,
|
||||||
|
host_group_id: hostGroupId,
|
||||||
},
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return updated host with groups
|
||||||
|
return await tx.hosts.findUnique({
|
||||||
|
where: { id: hostId },
|
||||||
|
include: {
|
||||||
|
host_group_memberships: {
|
||||||
include: {
|
include: {
|
||||||
host_groups: {
|
host_groups: {
|
||||||
select: {
|
select: {
|
||||||
@@ -831,6 +1007,9 @@ router.put(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
@@ -866,8 +1045,9 @@ router.get(
|
|||||||
agent_version: true,
|
agent_version: true,
|
||||||
auto_update: true,
|
auto_update: true,
|
||||||
created_at: true,
|
created_at: true,
|
||||||
host_group_id: true,
|
|
||||||
notes: true,
|
notes: true,
|
||||||
|
host_group_memberships: {
|
||||||
|
include: {
|
||||||
host_groups: {
|
host_groups: {
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
@@ -876,6 +1056,8 @@ router.get(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
orderBy: { created_at: "desc" },
|
orderBy: { created_at: "desc" },
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1138,13 +1320,17 @@ router.get("/install", async (req, res) => {
|
|||||||
// Check for --force parameter
|
// Check for --force parameter
|
||||||
const forceInstall = req.query.force === "true" || req.query.force === "1";
|
const forceInstall = req.query.force === "true" || req.query.force === "1";
|
||||||
|
|
||||||
// Inject the API credentials, server URL, curl flags, and force flag into the script
|
// Get architecture parameter (default to amd64)
|
||||||
|
const architecture = req.query.arch || "amd64";
|
||||||
|
|
||||||
|
// Inject the API credentials, server URL, curl flags, force flag, and architecture into the script
|
||||||
const envVars = `#!/bin/bash
|
const envVars = `#!/bin/bash
|
||||||
export PATCHMON_URL="${serverUrl}"
|
export PATCHMON_URL="${serverUrl}"
|
||||||
export API_ID="${host.api_id}"
|
export API_ID="${host.api_id}"
|
||||||
export API_KEY="${host.api_key}"
|
export API_KEY="${host.api_key}"
|
||||||
export CURL_FLAGS="${curlFlags}"
|
export CURL_FLAGS="${curlFlags}"
|
||||||
export FORCE_INSTALL="${forceInstall ? "true" : "false"}"
|
export FORCE_INSTALL="${forceInstall ? "true" : "false"}"
|
||||||
|
export ARCHITECTURE="${architecture}"
|
||||||
|
|
||||||
`;
|
`;
|
||||||
|
|
||||||
@@ -1521,11 +1707,9 @@ router.patch(
|
|||||||
architecture: true,
|
architecture: true,
|
||||||
last_update: true,
|
last_update: true,
|
||||||
status: true,
|
status: true,
|
||||||
host_group_id: true,
|
|
||||||
agent_version: true,
|
|
||||||
auto_update: true,
|
|
||||||
created_at: true,
|
|
||||||
updated_at: true,
|
updated_at: true,
|
||||||
|
host_group_memberships: {
|
||||||
|
include: {
|
||||||
host_groups: {
|
host_groups: {
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
@@ -1534,6 +1718,8 @@ router.patch(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
@@ -1594,12 +1780,9 @@ router.patch(
|
|||||||
architecture: true,
|
architecture: true,
|
||||||
last_update: true,
|
last_update: true,
|
||||||
status: true,
|
status: true,
|
||||||
host_group_id: true,
|
|
||||||
agent_version: true,
|
|
||||||
auto_update: true,
|
|
||||||
created_at: true,
|
|
||||||
updated_at: true,
|
|
||||||
notes: true,
|
notes: true,
|
||||||
|
host_group_memberships: {
|
||||||
|
include: {
|
||||||
host_groups: {
|
host_groups: {
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
@@ -1608,6 +1791,8 @@ router.patch(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
|
|||||||
@@ -8,102 +8,9 @@ const { getSettings, updateSettings } = require("../services/settingsService");
|
|||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
// Function to trigger crontab updates on all hosts with auto-update enabled
|
// WebSocket broadcaster for agent policy updates (no longer used - queue-based delivery preferred)
|
||||||
async function triggerCrontabUpdates() {
|
// const { broadcastSettingsUpdate } = require("../services/agentWs");
|
||||||
try {
|
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||||
console.log(
|
|
||||||
"Triggering crontab updates on all hosts with auto-update enabled...",
|
|
||||||
);
|
|
||||||
|
|
||||||
// Get current settings for server URL
|
|
||||||
const settings = await getSettings();
|
|
||||||
const serverUrl = settings.server_url;
|
|
||||||
|
|
||||||
// Get all hosts that have auto-update enabled
|
|
||||||
const hosts = await prisma.hosts.findMany({
|
|
||||||
where: {
|
|
||||||
auto_update: true,
|
|
||||||
status: "active", // Only update active hosts
|
|
||||||
},
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
friendly_name: true,
|
|
||||||
api_id: true,
|
|
||||||
api_key: true,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log(`Found ${hosts.length} hosts with auto-update enabled`);
|
|
||||||
|
|
||||||
// For each host, we'll send a special update command that triggers crontab update
|
|
||||||
// This is done by sending a ping with a special flag
|
|
||||||
for (const host of hosts) {
|
|
||||||
try {
|
|
||||||
console.log(
|
|
||||||
`Triggering crontab update for host: ${host.friendly_name}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// We'll use the existing ping endpoint but add a special parameter
|
|
||||||
// The agent will detect this and run update-crontab command
|
|
||||||
const http = require("node:http");
|
|
||||||
const https = require("node:https");
|
|
||||||
|
|
||||||
const url = new URL(`${serverUrl}/api/v1/hosts/ping`);
|
|
||||||
const isHttps = url.protocol === "https:";
|
|
||||||
const client = isHttps ? https : http;
|
|
||||||
|
|
||||||
const postData = JSON.stringify({
|
|
||||||
triggerCrontabUpdate: true,
|
|
||||||
message: "Update interval changed, please update your crontab",
|
|
||||||
});
|
|
||||||
|
|
||||||
const options = {
|
|
||||||
hostname: url.hostname,
|
|
||||||
port: url.port || (isHttps ? 443 : 80),
|
|
||||||
path: url.pathname,
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"Content-Length": Buffer.byteLength(postData),
|
|
||||||
"X-API-ID": host.api_id,
|
|
||||||
"X-API-KEY": host.api_key,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const req = client.request(options, (res) => {
|
|
||||||
if (res.statusCode === 200) {
|
|
||||||
console.log(
|
|
||||||
`Successfully triggered crontab update for ${host.friendly_name}`,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
console.error(
|
|
||||||
`Failed to trigger crontab update for ${host.friendly_name}: ${res.statusCode}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
req.on("error", (error) => {
|
|
||||||
console.error(
|
|
||||||
`Error triggering crontab update for ${host.friendly_name}:`,
|
|
||||||
error.message,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
req.write(postData);
|
|
||||||
req.end();
|
|
||||||
} catch (error) {
|
|
||||||
console.error(
|
|
||||||
`Error triggering crontab update for ${host.friendly_name}:`,
|
|
||||||
error.message,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("Crontab update trigger completed");
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error in triggerCrontabUpdates:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helpers
|
// Helpers
|
||||||
function normalizeUpdateInterval(minutes) {
|
function normalizeUpdateInterval(minutes) {
|
||||||
@@ -290,15 +197,36 @@ router.put(
|
|||||||
|
|
||||||
console.log("Settings updated successfully:", updatedSettings);
|
console.log("Settings updated successfully:", updatedSettings);
|
||||||
|
|
||||||
// If update interval changed, trigger crontab updates on all hosts with auto-update enabled
|
// If update interval changed, enqueue persistent jobs for agents
|
||||||
if (
|
if (
|
||||||
updateInterval !== undefined &&
|
updateInterval !== undefined &&
|
||||||
oldUpdateInterval !== updateData.update_interval
|
oldUpdateInterval !== updateData.update_interval
|
||||||
) {
|
) {
|
||||||
console.log(
|
console.log(
|
||||||
`Update interval changed from ${oldUpdateInterval} to ${updateData.update_interval} minutes. Triggering crontab updates...`,
|
`Update interval changed from ${oldUpdateInterval} to ${updateData.update_interval} minutes. Enqueueing agent settings updates...`,
|
||||||
);
|
);
|
||||||
await triggerCrontabUpdates();
|
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { status: "active" },
|
||||||
|
select: { api_id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const queue = queueManager.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||||
|
const jobs = hosts.map((h) => ({
|
||||||
|
name: "settings_update",
|
||||||
|
data: {
|
||||||
|
api_id: h.api_id,
|
||||||
|
type: "settings_update",
|
||||||
|
update_interval: updateData.update_interval,
|
||||||
|
},
|
||||||
|
opts: { attempts: 10, backoff: { type: "exponential", delay: 5000 } },
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Bulk add jobs
|
||||||
|
await queue.addBulk(jobs);
|
||||||
|
|
||||||
|
// Note: Queue-based delivery handles retries and ensures reliable delivery
|
||||||
|
// No need for immediate broadcast as it would cause duplicate messages
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
|
|||||||
@@ -14,13 +14,13 @@ const router = express.Router();
|
|||||||
function getCurrentVersion() {
|
function getCurrentVersion() {
|
||||||
try {
|
try {
|
||||||
const packageJson = require("../../package.json");
|
const packageJson = require("../../package.json");
|
||||||
return packageJson?.version || "1.2.7";
|
return packageJson?.version || "1.2.9";
|
||||||
} catch (packageError) {
|
} catch (packageError) {
|
||||||
console.warn(
|
console.warn(
|
||||||
"Could not read version from package.json, using fallback:",
|
"Could not read version from package.json, using fallback:",
|
||||||
packageError.message,
|
packageError.message,
|
||||||
);
|
);
|
||||||
return "1.2.7";
|
return "1.2.9";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -126,10 +126,16 @@ async function getLatestCommit(owner, repo) {
|
|||||||
|
|
||||||
// Helper function to get commit count difference
|
// Helper function to get commit count difference
|
||||||
async function getCommitDifference(owner, repo, currentVersion) {
|
async function getCommitDifference(owner, repo, currentVersion) {
|
||||||
|
// Try both with and without 'v' prefix for compatibility
|
||||||
|
const versionTags = [
|
||||||
|
currentVersion, // Try without 'v' first (new format)
|
||||||
|
`v${currentVersion}`, // Try with 'v' prefix (old format)
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const versionTag of versionTags) {
|
||||||
try {
|
try {
|
||||||
const currentVersionTag = `v${currentVersion}`;
|
|
||||||
// Compare main branch with the released version tag
|
// Compare main branch with the released version tag
|
||||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${currentVersionTag}...main`;
|
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${versionTag}...main`;
|
||||||
|
|
||||||
const response = await fetch(apiUrl, {
|
const response = await fetch(apiUrl, {
|
||||||
method: "GET",
|
method: "GET",
|
||||||
@@ -147,6 +153,10 @@ async function getCommitDifference(owner, repo, currentVersion) {
|
|||||||
) {
|
) {
|
||||||
throw new Error("GitHub API rate limit exceeded");
|
throw new Error("GitHub API rate limit exceeded");
|
||||||
}
|
}
|
||||||
|
// If 404, try next tag format
|
||||||
|
if (response.status === 404) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`GitHub API error: ${response.status} ${response.statusText}`,
|
`GitHub API error: ${response.status} ${response.statusText}`,
|
||||||
);
|
);
|
||||||
@@ -160,10 +170,18 @@ async function getCommitDifference(owner, repo, currentVersion) {
|
|||||||
branchInfo: "main branch vs release",
|
branchInfo: "main branch vs release",
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching commit difference:", error.message);
|
// If rate limit, throw immediately
|
||||||
|
if (error.message.includes("rate limit")) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If all attempts failed, throw error
|
||||||
|
throw new Error(
|
||||||
|
`Could not find tag '${currentVersion}' or 'v${currentVersion}' in repository`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// Helper function to compare version strings (semantic versioning)
|
// Helper function to compare version strings (semantic versioning)
|
||||||
function compareVersions(version1, version2) {
|
function compareVersions(version1, version2) {
|
||||||
@@ -274,11 +292,11 @@ router.get(
|
|||||||
) {
|
) {
|
||||||
console.log("GitHub API rate limited, providing fallback data");
|
console.log("GitHub API rate limited, providing fallback data");
|
||||||
latestRelease = {
|
latestRelease = {
|
||||||
tagName: "v1.2.7",
|
tagName: "v1.2.8",
|
||||||
version: "1.2.7",
|
version: "1.2.8",
|
||||||
publishedAt: "2025-10-02T17:12:53Z",
|
publishedAt: "2025-10-02T17:12:53Z",
|
||||||
htmlUrl:
|
htmlUrl:
|
||||||
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.7",
|
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.8",
|
||||||
};
|
};
|
||||||
latestCommit = {
|
latestCommit = {
|
||||||
sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd",
|
sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd",
|
||||||
@@ -296,10 +314,13 @@ router.get(
|
|||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
// Fall back to cached data for other errors
|
// Fall back to cached data for other errors
|
||||||
|
const githubRepoUrl = settings.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
||||||
latestRelease = settings.latest_version
|
latestRelease = settings.latest_version
|
||||||
? {
|
? {
|
||||||
version: settings.latest_version,
|
version: settings.latest_version,
|
||||||
tagName: `v${settings.latest_version}`,
|
tagName: `v${settings.latest_version}`,
|
||||||
|
publishedAt: null, // Only use date from GitHub API, not cached data
|
||||||
|
htmlUrl: `${githubRepoUrl.replace(/\.git$/, "")}/releases/tag/v${settings.latest_version}`,
|
||||||
}
|
}
|
||||||
: null;
|
: null;
|
||||||
}
|
}
|
||||||
|
|||||||
143
backend/src/routes/wsRoutes.js
Normal file
143
backend/src/routes/wsRoutes.js
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
const {
|
||||||
|
getConnectionInfo,
|
||||||
|
subscribeToConnectionChanges,
|
||||||
|
} = require("../services/agentWs");
|
||||||
|
const {
|
||||||
|
validate_session,
|
||||||
|
update_session_activity,
|
||||||
|
} = require("../utils/session_manager");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Get WebSocket connection status by api_id (no database access - pure memory lookup)
|
||||||
|
router.get("/status/:apiId", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId } = req.params;
|
||||||
|
|
||||||
|
// Direct in-memory check - no database query needed
|
||||||
|
const connectionInfo = getConnectionInfo(apiId);
|
||||||
|
|
||||||
|
// Minimal response for maximum speed
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: connectionInfo,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching WebSocket status:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch WebSocket status",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Server-Sent Events endpoint for real-time status updates (no polling needed!)
|
||||||
|
router.get("/status/:apiId/stream", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId } = req.params;
|
||||||
|
|
||||||
|
// Manual authentication for SSE (EventSource doesn't support custom headers)
|
||||||
|
const token =
|
||||||
|
req.query.token || req.headers.authorization?.replace("Bearer ", "");
|
||||||
|
if (!token) {
|
||||||
|
return res.status(401).json({ error: "Authentication required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify token manually with session validation
|
||||||
|
const jwt = require("jsonwebtoken");
|
||||||
|
try {
|
||||||
|
const decoded = jwt.verify(token, process.env.JWT_SECRET);
|
||||||
|
|
||||||
|
// Validate session (same as regular auth middleware)
|
||||||
|
const validation = await validate_session(decoded.sessionId, token);
|
||||||
|
if (!validation.valid) {
|
||||||
|
console.error("[SSE] Session validation failed:", validation.reason);
|
||||||
|
console.error("[SSE] Invalid session for api_id:", apiId);
|
||||||
|
return res.status(401).json({ error: "Invalid or expired session" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update session activity to prevent inactivity timeout
|
||||||
|
await update_session_activity(decoded.sessionId);
|
||||||
|
|
||||||
|
req.user = validation.user;
|
||||||
|
} catch (err) {
|
||||||
|
console.error("[SSE] JWT verification failed:", err.message);
|
||||||
|
console.error("[SSE] Invalid token for api_id:", apiId);
|
||||||
|
return res.status(401).json({ error: "Invalid or expired token" });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[SSE] Client connected for api_id:", apiId);
|
||||||
|
|
||||||
|
// Set headers for SSE
|
||||||
|
res.setHeader("Content-Type", "text/event-stream");
|
||||||
|
res.setHeader("Cache-Control", "no-cache");
|
||||||
|
res.setHeader("Connection", "keep-alive");
|
||||||
|
res.setHeader("X-Accel-Buffering", "no"); // Disable nginx buffering
|
||||||
|
|
||||||
|
// Send initial status immediately
|
||||||
|
const initialInfo = getConnectionInfo(apiId);
|
||||||
|
res.write(`data: ${JSON.stringify(initialInfo)}\n\n`);
|
||||||
|
res.flushHeaders(); // Ensure headers are sent immediately
|
||||||
|
|
||||||
|
// Subscribe to connection changes for this specific api_id
|
||||||
|
const unsubscribe = subscribeToConnectionChanges(apiId, (_connected) => {
|
||||||
|
try {
|
||||||
|
// Push update to client instantly when status changes
|
||||||
|
const connectionInfo = getConnectionInfo(apiId);
|
||||||
|
console.log(
|
||||||
|
`[SSE] Pushing status change for ${apiId}: connected=${connectionInfo.connected} secure=${connectionInfo.secure}`,
|
||||||
|
);
|
||||||
|
res.write(`data: ${JSON.stringify(connectionInfo)}\n\n`);
|
||||||
|
} catch (err) {
|
||||||
|
console.error("[SSE] Error writing to stream:", err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Heartbeat to keep connection alive (every 30 seconds)
|
||||||
|
const heartbeat = setInterval(() => {
|
||||||
|
try {
|
||||||
|
res.write(": heartbeat\n\n");
|
||||||
|
} catch (err) {
|
||||||
|
console.error("[SSE] Error writing heartbeat:", err);
|
||||||
|
clearInterval(heartbeat);
|
||||||
|
}
|
||||||
|
}, 30000);
|
||||||
|
|
||||||
|
// Cleanup on client disconnect
|
||||||
|
req.on("close", () => {
|
||||||
|
console.log("[SSE] Client disconnected for api_id:", apiId);
|
||||||
|
clearInterval(heartbeat);
|
||||||
|
unsubscribe();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle errors - distinguish between different error types
|
||||||
|
req.on("error", (err) => {
|
||||||
|
// Only log non-connection-reset errors to reduce noise
|
||||||
|
if (err.code !== "ECONNRESET" && err.code !== "EPIPE") {
|
||||||
|
console.error("[SSE] Request error:", err);
|
||||||
|
} else {
|
||||||
|
console.log("[SSE] Client connection reset for api_id:", apiId);
|
||||||
|
}
|
||||||
|
clearInterval(heartbeat);
|
||||||
|
unsubscribe();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle response errors
|
||||||
|
res.on("error", (err) => {
|
||||||
|
if (err.code !== "ECONNRESET" && err.code !== "EPIPE") {
|
||||||
|
console.error("[SSE] Response error:", err);
|
||||||
|
}
|
||||||
|
clearInterval(heartbeat);
|
||||||
|
unsubscribe();
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[SSE] Unexpected error:", error);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(500).json({ error: "Internal server error" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -39,6 +39,7 @@ const express = require("express");
|
|||||||
const cors = require("cors");
|
const cors = require("cors");
|
||||||
const helmet = require("helmet");
|
const helmet = require("helmet");
|
||||||
const rateLimit = require("express-rate-limit");
|
const rateLimit = require("express-rate-limit");
|
||||||
|
const cookieParser = require("cookie-parser");
|
||||||
const {
|
const {
|
||||||
createPrismaClient,
|
createPrismaClient,
|
||||||
waitForDatabase,
|
waitForDatabase,
|
||||||
@@ -62,9 +63,16 @@ const versionRoutes = require("./routes/versionRoutes");
|
|||||||
const tfaRoutes = require("./routes/tfaRoutes");
|
const tfaRoutes = require("./routes/tfaRoutes");
|
||||||
const searchRoutes = require("./routes/searchRoutes");
|
const searchRoutes = require("./routes/searchRoutes");
|
||||||
const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
|
const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
|
||||||
const updateScheduler = require("./services/updateScheduler");
|
const gethomepageRoutes = require("./routes/gethomepageRoutes");
|
||||||
|
const automationRoutes = require("./routes/automationRoutes");
|
||||||
|
const dockerRoutes = require("./routes/dockerRoutes");
|
||||||
|
const wsRoutes = require("./routes/wsRoutes");
|
||||||
const { initSettings } = require("./services/settingsService");
|
const { initSettings } = require("./services/settingsService");
|
||||||
const { cleanup_expired_sessions } = require("./utils/session_manager");
|
const { queueManager } = require("./services/automation");
|
||||||
|
const { authenticateToken, requireAdmin } = require("./middleware/auth");
|
||||||
|
const { createBullBoard } = require("@bull-board/api");
|
||||||
|
const { BullMQAdapter } = require("@bull-board/api/bullMQAdapter");
|
||||||
|
const { ExpressAdapter } = require("@bull-board/express");
|
||||||
|
|
||||||
// Initialize Prisma client with optimized connection pooling for multiple instances
|
// Initialize Prisma client with optimized connection pooling for multiple instances
|
||||||
const prisma = createPrismaClient();
|
const prisma = createPrismaClient();
|
||||||
@@ -251,6 +259,9 @@ if (process.env.ENABLE_LOGGING === "true") {
|
|||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
const PORT = process.env.PORT || 3001;
|
const PORT = process.env.PORT || 3001;
|
||||||
|
const http = require("node:http");
|
||||||
|
const server = http.createServer(app);
|
||||||
|
const { init: initAgentWs } = require("./services/agentWs");
|
||||||
|
|
||||||
// Trust proxy (needed when behind reverse proxy) and remove X-Powered-By
|
// Trust proxy (needed when behind reverse proxy) and remove X-Powered-By
|
||||||
if (process.env.TRUST_PROXY) {
|
if (process.env.TRUST_PROXY) {
|
||||||
@@ -338,12 +349,17 @@ app.use(
|
|||||||
// Allow non-browser/SSR tools with no origin
|
// Allow non-browser/SSR tools with no origin
|
||||||
if (!origin) return callback(null, true);
|
if (!origin) return callback(null, true);
|
||||||
if (allowedOrigins.includes(origin)) return callback(null, true);
|
if (allowedOrigins.includes(origin)) return callback(null, true);
|
||||||
|
// Allow same-origin requests (e.g., Bull Board accessing its own API)
|
||||||
|
// This allows http://hostname:3001 to make requests to http://hostname:3001
|
||||||
|
if (origin?.includes(":3001")) return callback(null, true);
|
||||||
return callback(new Error("Not allowed by CORS"));
|
return callback(new Error("Not allowed by CORS"));
|
||||||
},
|
},
|
||||||
credentials: true,
|
credentials: true,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
app.use(limiter);
|
app.use(limiter);
|
||||||
|
// Cookie parser for Bull Board sessions
|
||||||
|
app.use(cookieParser());
|
||||||
// Reduce body size limits to reasonable defaults
|
// Reduce body size limits to reasonable defaults
|
||||||
app.use(express.json({ limit: process.env.JSON_BODY_LIMIT || "5mb" }));
|
app.use(express.json({ limit: process.env.JSON_BODY_LIMIT || "5mb" }));
|
||||||
app.use(
|
app.use(
|
||||||
@@ -422,6 +438,126 @@ app.use(
|
|||||||
authLimiter,
|
authLimiter,
|
||||||
autoEnrollmentRoutes,
|
autoEnrollmentRoutes,
|
||||||
);
|
);
|
||||||
|
app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/automation`, automationRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/docker`, dockerRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/ws`, wsRoutes);
|
||||||
|
|
||||||
|
// Bull Board - will be populated after queue manager initializes
|
||||||
|
let bullBoardRouter = null;
|
||||||
|
const bullBoardSessions = new Map(); // Store authenticated sessions
|
||||||
|
|
||||||
|
// Mount Bull Board at /admin instead of /api/v1/admin to avoid path conflicts
|
||||||
|
app.use(`/admin/queues`, (_req, res, next) => {
|
||||||
|
// Relax COOP/COEP for Bull Board in non-production to avoid browser warnings
|
||||||
|
if (process.env.NODE_ENV !== "production") {
|
||||||
|
res.setHeader("Cross-Origin-Opener-Policy", "same-origin-allow-popups");
|
||||||
|
res.setHeader("Cross-Origin-Embedder-Policy", "unsafe-none");
|
||||||
|
}
|
||||||
|
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Authentication middleware for Bull Board
|
||||||
|
app.use(`/admin/queues`, async (req, res, next) => {
|
||||||
|
// Skip authentication for static assets only
|
||||||
|
if (req.path.includes("/static/") || req.path.includes("/favicon")) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for bull-board-session cookie first
|
||||||
|
const sessionId = req.cookies["bull-board-session"];
|
||||||
|
if (sessionId) {
|
||||||
|
const session = bullBoardSessions.get(sessionId);
|
||||||
|
if (session && Date.now() - session.timestamp < 3600000) {
|
||||||
|
// 1 hour
|
||||||
|
// Valid session, extend it
|
||||||
|
session.timestamp = Date.now();
|
||||||
|
return next();
|
||||||
|
} else if (session) {
|
||||||
|
// Expired session, remove it
|
||||||
|
bullBoardSessions.delete(sessionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No valid session, check for token
|
||||||
|
let token = req.query.token;
|
||||||
|
if (!token && req.headers.authorization) {
|
||||||
|
token = req.headers.authorization.replace("Bearer ", "");
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no token, deny access
|
||||||
|
if (!token) {
|
||||||
|
return res.status(401).json({ error: "Access token required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add token to headers for authentication
|
||||||
|
req.headers.authorization = `Bearer ${token}`;
|
||||||
|
|
||||||
|
// Authenticate the user
|
||||||
|
return authenticateToken(req, res, (err) => {
|
||||||
|
if (err) {
|
||||||
|
return res.status(401).json({ error: "Authentication failed" });
|
||||||
|
}
|
||||||
|
return requireAdmin(req, res, (adminErr) => {
|
||||||
|
if (adminErr) {
|
||||||
|
return res.status(403).json({ error: "Admin access required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authentication successful - create a session
|
||||||
|
const newSessionId = require("node:crypto")
|
||||||
|
.randomBytes(32)
|
||||||
|
.toString("hex");
|
||||||
|
bullBoardSessions.set(newSessionId, {
|
||||||
|
timestamp: Date.now(),
|
||||||
|
userId: req.user.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set session cookie
|
||||||
|
res.cookie("bull-board-session", newSessionId, {
|
||||||
|
httpOnly: true,
|
||||||
|
secure: process.env.NODE_ENV === "production",
|
||||||
|
sameSite: "lax",
|
||||||
|
maxAge: 3600000, // 1 hour
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clean up old sessions periodically
|
||||||
|
if (bullBoardSessions.size > 100) {
|
||||||
|
const now = Date.now();
|
||||||
|
for (const [sid, session] of bullBoardSessions.entries()) {
|
||||||
|
if (now - session.timestamp > 3600000) {
|
||||||
|
bullBoardSessions.delete(sid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
app.use(`/admin/queues`, (req, res, next) => {
|
||||||
|
if (bullBoardRouter) {
|
||||||
|
return bullBoardRouter(req, res, next);
|
||||||
|
}
|
||||||
|
return res.status(503).json({ error: "Bull Board not initialized yet" });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Error handler specifically for Bull Board routes
|
||||||
|
app.use("/admin/queues", (err, req, res, _next) => {
|
||||||
|
console.error("Bull Board error on", req.method, req.url);
|
||||||
|
console.error("Error details:", err.message);
|
||||||
|
console.error("Stack:", err.stack);
|
||||||
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
|
logger.error(`Bull Board error on ${req.method} ${req.url}:`, err);
|
||||||
|
}
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Internal server error",
|
||||||
|
message: err.message,
|
||||||
|
path: req.path,
|
||||||
|
url: req.url,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// Error handling middleware
|
// Error handling middleware
|
||||||
app.use((err, _req, res, _next) => {
|
app.use((err, _req, res, _next) => {
|
||||||
@@ -444,10 +580,7 @@ process.on("SIGINT", async () => {
|
|||||||
if (process.env.ENABLE_LOGGING === "true") {
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
logger.info("SIGINT received, shutting down gracefully");
|
logger.info("SIGINT received, shutting down gracefully");
|
||||||
}
|
}
|
||||||
if (app.locals.session_cleanup_interval) {
|
await queueManager.shutdown();
|
||||||
clearInterval(app.locals.session_cleanup_interval);
|
|
||||||
}
|
|
||||||
updateScheduler.stop();
|
|
||||||
await disconnectPrisma(prisma);
|
await disconnectPrisma(prisma);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
@@ -456,10 +589,7 @@ process.on("SIGTERM", async () => {
|
|||||||
if (process.env.ENABLE_LOGGING === "true") {
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
logger.info("SIGTERM received, shutting down gracefully");
|
logger.info("SIGTERM received, shutting down gracefully");
|
||||||
}
|
}
|
||||||
if (app.locals.session_cleanup_interval) {
|
await queueManager.shutdown();
|
||||||
clearInterval(app.locals.session_cleanup_interval);
|
|
||||||
}
|
|
||||||
updateScheduler.stop();
|
|
||||||
await disconnectPrisma(prisma);
|
await disconnectPrisma(prisma);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
@@ -728,34 +858,40 @@ async function startServer() {
|
|||||||
// Initialize dashboard preferences for all users
|
// Initialize dashboard preferences for all users
|
||||||
await initializeDashboardPreferences();
|
await initializeDashboardPreferences();
|
||||||
|
|
||||||
// Initial session cleanup
|
// Initialize BullMQ queue manager
|
||||||
await cleanup_expired_sessions();
|
await queueManager.initialize();
|
||||||
|
|
||||||
// Schedule session cleanup every hour
|
// Schedule recurring jobs
|
||||||
const session_cleanup_interval = setInterval(
|
await queueManager.scheduleAllJobs();
|
||||||
async () => {
|
|
||||||
try {
|
|
||||||
await cleanup_expired_sessions();
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Session cleanup error:", error);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
60 * 60 * 1000,
|
|
||||||
); // Every hour
|
|
||||||
|
|
||||||
app.listen(PORT, () => {
|
// Set up Bull Board for queue monitoring
|
||||||
|
const serverAdapter = new ExpressAdapter();
|
||||||
|
// Set basePath to match where we mount the router
|
||||||
|
serverAdapter.setBasePath("/admin/queues");
|
||||||
|
|
||||||
|
const { QUEUE_NAMES } = require("./services/automation");
|
||||||
|
const bullAdapters = Object.values(QUEUE_NAMES).map(
|
||||||
|
(queueName) => new BullMQAdapter(queueManager.queues[queueName]),
|
||||||
|
);
|
||||||
|
|
||||||
|
createBullBoard({
|
||||||
|
queues: bullAdapters,
|
||||||
|
serverAdapter: serverAdapter,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set the router for the Bull Board middleware (secured middleware above)
|
||||||
|
bullBoardRouter = serverAdapter.getRouter();
|
||||||
|
console.log("✅ Bull Board mounted at /admin/queues (secured)");
|
||||||
|
|
||||||
|
// Initialize WS layer with the underlying HTTP server
|
||||||
|
initAgentWs(server, prisma);
|
||||||
|
|
||||||
|
server.listen(PORT, () => {
|
||||||
if (process.env.ENABLE_LOGGING === "true") {
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
logger.info(`Server running on port ${PORT}`);
|
logger.info(`Server running on port ${PORT}`);
|
||||||
logger.info(`Environment: ${process.env.NODE_ENV}`);
|
logger.info(`Environment: ${process.env.NODE_ENV}`);
|
||||||
logger.info("✅ Session cleanup scheduled (every hour)");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start update scheduler
|
|
||||||
updateScheduler.start();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Store interval for cleanup on shutdown
|
|
||||||
app.locals.session_cleanup_interval = session_cleanup_interval;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("❌ Failed to start server:", error.message);
|
console.error("❌ Failed to start server:", error.message);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
|
|||||||
190
backend/src/services/agentWs.js
Normal file
190
backend/src/services/agentWs.js
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
// Lightweight WebSocket hub for agent connections
|
||||||
|
// Auth: X-API-ID / X-API-KEY headers on the upgrade request
|
||||||
|
|
||||||
|
const WebSocket = require("ws");
|
||||||
|
const url = require("node:url");
|
||||||
|
|
||||||
|
// Connection registry by api_id
|
||||||
|
const apiIdToSocket = new Map();
|
||||||
|
|
||||||
|
// Connection metadata (secure/insecure)
|
||||||
|
// Map<api_id, { ws: WebSocket, secure: boolean }>
|
||||||
|
const connectionMetadata = new Map();
|
||||||
|
|
||||||
|
// Subscribers for connection status changes (for SSE)
|
||||||
|
// Map<api_id, Set<callback>>
|
||||||
|
const connectionChangeSubscribers = new Map();
|
||||||
|
|
||||||
|
let wss;
|
||||||
|
let prisma;
|
||||||
|
|
||||||
|
function init(server, prismaClient) {
|
||||||
|
prisma = prismaClient;
|
||||||
|
wss = new WebSocket.Server({ noServer: true });
|
||||||
|
|
||||||
|
// Handle HTTP upgrade events and authenticate before accepting WS
|
||||||
|
server.on("upgrade", async (request, socket, head) => {
|
||||||
|
try {
|
||||||
|
const { pathname } = url.parse(request.url);
|
||||||
|
if (!pathname || !pathname.startsWith("/api/")) {
|
||||||
|
socket.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expected path: /api/{v}/agents/ws
|
||||||
|
const parts = pathname.split("/").filter(Boolean); // [api, v1, agents, ws]
|
||||||
|
if (parts.length !== 4 || parts[2] !== "agents" || parts[3] !== "ws") {
|
||||||
|
socket.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const apiId = request.headers["x-api-id"];
|
||||||
|
const apiKey = request.headers["x-api-key"];
|
||||||
|
if (!apiId || !apiKey) {
|
||||||
|
socket.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate credentials
|
||||||
|
const host = await prisma.hosts.findUnique({ where: { api_id: apiId } });
|
||||||
|
if (!host || host.api_key !== apiKey) {
|
||||||
|
socket.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
wss.handleUpgrade(request, socket, head, (ws) => {
|
||||||
|
ws.apiId = apiId;
|
||||||
|
|
||||||
|
// Detect if connection is secure (wss://) or not (ws://)
|
||||||
|
const isSecure =
|
||||||
|
socket.encrypted || request.headers["x-forwarded-proto"] === "https";
|
||||||
|
|
||||||
|
apiIdToSocket.set(apiId, ws);
|
||||||
|
connectionMetadata.set(apiId, { ws, secure: isSecure });
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[agent-ws] connected api_id=${apiId} protocol=${isSecure ? "wss" : "ws"} total=${apiIdToSocket.size}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Notify subscribers of connection
|
||||||
|
notifyConnectionChange(apiId, true);
|
||||||
|
|
||||||
|
ws.on("message", () => {
|
||||||
|
// Currently we don't need to handle agent->server messages
|
||||||
|
});
|
||||||
|
|
||||||
|
ws.on("close", () => {
|
||||||
|
const existing = apiIdToSocket.get(apiId);
|
||||||
|
if (existing === ws) {
|
||||||
|
apiIdToSocket.delete(apiId);
|
||||||
|
connectionMetadata.delete(apiId);
|
||||||
|
// Notify subscribers of disconnection
|
||||||
|
notifyConnectionChange(apiId, false);
|
||||||
|
}
|
||||||
|
console.log(
|
||||||
|
`[agent-ws] disconnected api_id=${apiId} total=${apiIdToSocket.size}`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Optional: greet/ack
|
||||||
|
safeSend(ws, JSON.stringify({ type: "connected" }));
|
||||||
|
});
|
||||||
|
} catch (_err) {
|
||||||
|
try {
|
||||||
|
socket.destroy();
|
||||||
|
} catch {
|
||||||
|
/* ignore */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function safeSend(ws, data) {
|
||||||
|
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||||
|
try {
|
||||||
|
ws.send(data);
|
||||||
|
} catch {
|
||||||
|
/* ignore */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function broadcastSettingsUpdate(newInterval) {
|
||||||
|
const payload = JSON.stringify({
|
||||||
|
type: "settings_update",
|
||||||
|
update_interval: newInterval,
|
||||||
|
});
|
||||||
|
for (const [, ws] of apiIdToSocket) {
|
||||||
|
safeSend(ws, payload);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushReportNow(apiId) {
|
||||||
|
const ws = apiIdToSocket.get(apiId);
|
||||||
|
safeSend(ws, JSON.stringify({ type: "report_now" }));
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushSettingsUpdate(apiId, newInterval) {
|
||||||
|
const ws = apiIdToSocket.get(apiId);
|
||||||
|
safeSend(
|
||||||
|
ws,
|
||||||
|
JSON.stringify({ type: "settings_update", update_interval: newInterval }),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify all subscribers when connection status changes
|
||||||
|
function notifyConnectionChange(apiId, connected) {
|
||||||
|
const subscribers = connectionChangeSubscribers.get(apiId);
|
||||||
|
if (subscribers) {
|
||||||
|
for (const callback of subscribers) {
|
||||||
|
try {
|
||||||
|
callback(connected);
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`[agent-ws] error notifying subscriber:`, err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subscribe to connection status changes for a specific api_id
|
||||||
|
function subscribeToConnectionChanges(apiId, callback) {
|
||||||
|
if (!connectionChangeSubscribers.has(apiId)) {
|
||||||
|
connectionChangeSubscribers.set(apiId, new Set());
|
||||||
|
}
|
||||||
|
connectionChangeSubscribers.get(apiId).add(callback);
|
||||||
|
|
||||||
|
// Return unsubscribe function
|
||||||
|
return () => {
|
||||||
|
const subscribers = connectionChangeSubscribers.get(apiId);
|
||||||
|
if (subscribers) {
|
||||||
|
subscribers.delete(callback);
|
||||||
|
if (subscribers.size === 0) {
|
||||||
|
connectionChangeSubscribers.delete(apiId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
init,
|
||||||
|
broadcastSettingsUpdate,
|
||||||
|
pushReportNow,
|
||||||
|
pushSettingsUpdate,
|
||||||
|
// Expose read-only view of connected agents
|
||||||
|
getConnectedApiIds: () => Array.from(apiIdToSocket.keys()),
|
||||||
|
isConnected: (apiId) => {
|
||||||
|
const ws = apiIdToSocket.get(apiId);
|
||||||
|
return !!ws && ws.readyState === WebSocket.OPEN;
|
||||||
|
},
|
||||||
|
// Get connection info including protocol (ws/wss)
|
||||||
|
getConnectionInfo: (apiId) => {
|
||||||
|
const metadata = connectionMetadata.get(apiId);
|
||||||
|
if (!metadata) {
|
||||||
|
return { connected: false, secure: false };
|
||||||
|
}
|
||||||
|
const connected = metadata.ws.readyState === WebSocket.OPEN;
|
||||||
|
return { connected, secure: metadata.secure };
|
||||||
|
},
|
||||||
|
// Subscribe to connection status changes (for SSE)
|
||||||
|
subscribeToConnectionChanges,
|
||||||
|
};
|
||||||
153
backend/src/services/automation/githubUpdateCheck.js
Normal file
153
backend/src/services/automation/githubUpdateCheck.js
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
const { compareVersions, checkPublicRepo } = require("./shared/utils");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GitHub Update Check Automation
|
||||||
|
* Checks for new releases on GitHub using HTTPS API
|
||||||
|
*/
|
||||||
|
class GitHubUpdateCheck {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "github-update-check";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process GitHub update check job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🔍 Starting GitHub update check...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get settings
|
||||||
|
const settings = await prisma.settings.findFirst();
|
||||||
|
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
|
||||||
|
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
||||||
|
let owner, repo;
|
||||||
|
|
||||||
|
// Parse GitHub repository URL (supports both HTTPS and SSH formats)
|
||||||
|
if (repoUrl.includes("git@github.com:")) {
|
||||||
|
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
|
||||||
|
if (match) {
|
||||||
|
[, owner, repo] = match;
|
||||||
|
}
|
||||||
|
} else if (repoUrl.includes("github.com/")) {
|
||||||
|
const match = repoUrl.match(
|
||||||
|
/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/,
|
||||||
|
);
|
||||||
|
if (match) {
|
||||||
|
[, owner, repo] = match;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!owner || !repo) {
|
||||||
|
throw new Error("Could not parse GitHub repository URL");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always use HTTPS GitHub API (simpler and more reliable)
|
||||||
|
const latestVersion = await checkPublicRepo(owner, repo);
|
||||||
|
|
||||||
|
if (!latestVersion) {
|
||||||
|
throw new Error("Could not determine latest version");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read version from package.json
|
||||||
|
let currentVersion = "1.2.7"; // fallback
|
||||||
|
try {
|
||||||
|
const packageJson = require("../../../package.json");
|
||||||
|
if (packageJson?.version) {
|
||||||
|
currentVersion = packageJson.version;
|
||||||
|
}
|
||||||
|
} catch (packageError) {
|
||||||
|
console.warn(
|
||||||
|
"Could not read version from package.json:",
|
||||||
|
packageError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isUpdateAvailable =
|
||||||
|
compareVersions(latestVersion, currentVersion) > 0;
|
||||||
|
|
||||||
|
// Update settings with check results
|
||||||
|
await prisma.settings.update({
|
||||||
|
where: { id: settings.id },
|
||||||
|
data: {
|
||||||
|
last_update_check: new Date(),
|
||||||
|
update_available: isUpdateAvailable,
|
||||||
|
latest_version: latestVersion,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ GitHub update check completed in ${executionTime}ms - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
currentVersion,
|
||||||
|
latestVersion,
|
||||||
|
isUpdateAvailable,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ GitHub update check failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update last check time even on error
|
||||||
|
try {
|
||||||
|
const settings = await prisma.settings.findFirst();
|
||||||
|
if (settings) {
|
||||||
|
await prisma.settings.update({
|
||||||
|
where: { id: settings.id },
|
||||||
|
data: {
|
||||||
|
last_update_check: new Date(),
|
||||||
|
update_available: false,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (updateError) {
|
||||||
|
console.error(
|
||||||
|
"❌ Error updating last check time:",
|
||||||
|
updateError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring GitHub update check (daily at midnight)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"github-update-check",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 0 * * *" }, // Daily at midnight
|
||||||
|
jobId: "github-update-check-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ GitHub update check scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual GitHub update check
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"github-update-check-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual GitHub update check triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = GitHubUpdateCheck;
|
||||||
517
backend/src/services/automation/index.js
Normal file
517
backend/src/services/automation/index.js
Normal file
@@ -0,0 +1,517 @@
|
|||||||
|
const { Queue, Worker } = require("bullmq");
|
||||||
|
const { redis, redisConnection } = require("./shared/redis");
|
||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
const agentWs = require("../agentWs");
|
||||||
|
|
||||||
|
// Import automation classes
|
||||||
|
const GitHubUpdateCheck = require("./githubUpdateCheck");
|
||||||
|
const SessionCleanup = require("./sessionCleanup");
|
||||||
|
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
|
||||||
|
const OrphanedPackageCleanup = require("./orphanedPackageCleanup");
|
||||||
|
|
||||||
|
// Queue names
|
||||||
|
const QUEUE_NAMES = {
|
||||||
|
GITHUB_UPDATE_CHECK: "github-update-check",
|
||||||
|
SESSION_CLEANUP: "session-cleanup",
|
||||||
|
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
|
||||||
|
ORPHANED_PACKAGE_CLEANUP: "orphaned-package-cleanup",
|
||||||
|
AGENT_COMMANDS: "agent-commands",
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main Queue Manager
|
||||||
|
* Manages all BullMQ queues and workers
|
||||||
|
*/
|
||||||
|
class QueueManager {
|
||||||
|
constructor() {
|
||||||
|
this.queues = {};
|
||||||
|
this.workers = {};
|
||||||
|
this.automations = {};
|
||||||
|
this.isInitialized = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all queues, workers, and automations
|
||||||
|
*/
|
||||||
|
async initialize() {
|
||||||
|
try {
|
||||||
|
console.log("✅ Redis connection successful");
|
||||||
|
|
||||||
|
// Initialize queues
|
||||||
|
await this.initializeQueues();
|
||||||
|
|
||||||
|
// Initialize automation classes
|
||||||
|
await this.initializeAutomations();
|
||||||
|
|
||||||
|
// Initialize workers
|
||||||
|
await this.initializeWorkers();
|
||||||
|
|
||||||
|
// Setup event listeners
|
||||||
|
this.setupEventListeners();
|
||||||
|
|
||||||
|
this.isInitialized = true;
|
||||||
|
console.log("✅ Queue manager initialized successfully");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to initialize queue manager:", error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all queues
|
||||||
|
*/
|
||||||
|
async initializeQueues() {
|
||||||
|
for (const [_key, queueName] of Object.entries(QUEUE_NAMES)) {
|
||||||
|
this.queues[queueName] = new Queue(queueName, {
|
||||||
|
connection: redisConnection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
removeOnComplete: 50, // Keep last 50 completed jobs
|
||||||
|
removeOnFail: 20, // Keep last 20 failed jobs
|
||||||
|
attempts: 3, // Retry failed jobs 3 times
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`✅ Queue '${queueName}' initialized`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize automation classes
|
||||||
|
*/
|
||||||
|
async initializeAutomations() {
|
||||||
|
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new GitHubUpdateCheck(
|
||||||
|
this,
|
||||||
|
);
|
||||||
|
this.automations[QUEUE_NAMES.SESSION_CLEANUP] = new SessionCleanup(this);
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] =
|
||||||
|
new OrphanedRepoCleanup(this);
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] =
|
||||||
|
new OrphanedPackageCleanup(this);
|
||||||
|
|
||||||
|
console.log("✅ All automation classes initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all workers
|
||||||
|
*/
|
||||||
|
async initializeWorkers() {
|
||||||
|
// GitHub Update Check Worker
|
||||||
|
this.workers[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new Worker(
|
||||||
|
QUEUE_NAMES.GITHUB_UPDATE_CHECK,
|
||||||
|
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
|
||||||
|
),
|
||||||
|
{
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Session Cleanup Worker
|
||||||
|
this.workers[QUEUE_NAMES.SESSION_CLEANUP] = new Worker(
|
||||||
|
QUEUE_NAMES.SESSION_CLEANUP,
|
||||||
|
this.automations[QUEUE_NAMES.SESSION_CLEANUP].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.SESSION_CLEANUP],
|
||||||
|
),
|
||||||
|
{
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Orphaned Repo Cleanup Worker
|
||||||
|
this.workers[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] = new Worker(
|
||||||
|
QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
|
||||||
|
),
|
||||||
|
{
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Orphaned Package Cleanup Worker
|
||||||
|
this.workers[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] = new Worker(
|
||||||
|
QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP,
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||||
|
),
|
||||||
|
{
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Agent Commands Worker
|
||||||
|
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
|
||||||
|
QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
|
async (job) => {
|
||||||
|
const { api_id, type, update_interval } = job.data || {};
|
||||||
|
console.log("[agent-commands] processing job", job.id, api_id, type);
|
||||||
|
|
||||||
|
// Log job attempt to history - use job.id as the unique identifier
|
||||||
|
const attemptNumber = job.attemptsMade || 1;
|
||||||
|
const historyId = job.id; // Single row per job, updated with each attempt
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (!api_id || !type) {
|
||||||
|
throw new Error("invalid job data");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find host by api_id
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { api_id },
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Ensure agent is connected; if not, retry later
|
||||||
|
if (!agentWs.isConnected(api_id)) {
|
||||||
|
const error = new Error("agent not connected");
|
||||||
|
// Log failed attempt
|
||||||
|
await prisma.job_history.upsert({
|
||||||
|
where: { id: historyId },
|
||||||
|
create: {
|
||||||
|
id: historyId,
|
||||||
|
job_id: job.id,
|
||||||
|
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
|
job_name: type,
|
||||||
|
host_id: host?.id,
|
||||||
|
api_id,
|
||||||
|
status: "failed",
|
||||||
|
attempt_number: attemptNumber,
|
||||||
|
error_message: error.message,
|
||||||
|
created_at: new Date(),
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
status: "failed",
|
||||||
|
attempt_number: attemptNumber,
|
||||||
|
error_message: error.message,
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
console.log(
|
||||||
|
"[agent-commands] agent not connected, will retry",
|
||||||
|
api_id,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process the command
|
||||||
|
let result;
|
||||||
|
if (type === "settings_update") {
|
||||||
|
agentWs.pushSettingsUpdate(api_id, update_interval);
|
||||||
|
console.log(
|
||||||
|
"[agent-commands] delivered settings_update",
|
||||||
|
api_id,
|
||||||
|
update_interval,
|
||||||
|
);
|
||||||
|
result = { delivered: true, update_interval };
|
||||||
|
} else if (type === "report_now") {
|
||||||
|
agentWs.pushReportNow(api_id);
|
||||||
|
console.log("[agent-commands] delivered report_now", api_id);
|
||||||
|
result = { delivered: true };
|
||||||
|
} else {
|
||||||
|
throw new Error("unsupported agent command");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log successful completion
|
||||||
|
await prisma.job_history.upsert({
|
||||||
|
where: { id: historyId },
|
||||||
|
create: {
|
||||||
|
id: historyId,
|
||||||
|
job_id: job.id,
|
||||||
|
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
|
job_name: type,
|
||||||
|
host_id: host?.id,
|
||||||
|
api_id,
|
||||||
|
status: "completed",
|
||||||
|
attempt_number: attemptNumber,
|
||||||
|
output: result,
|
||||||
|
created_at: new Date(),
|
||||||
|
updated_at: new Date(),
|
||||||
|
completed_at: new Date(),
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
status: "completed",
|
||||||
|
attempt_number: attemptNumber,
|
||||||
|
output: result,
|
||||||
|
error_message: null,
|
||||||
|
updated_at: new Date(),
|
||||||
|
completed_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
// Log error to history (if not already logged above)
|
||||||
|
if (error.message !== "agent not connected") {
|
||||||
|
const host = await prisma.hosts
|
||||||
|
.findUnique({
|
||||||
|
where: { api_id },
|
||||||
|
select: { id: true },
|
||||||
|
})
|
||||||
|
.catch(() => null);
|
||||||
|
|
||||||
|
await prisma.job_history
|
||||||
|
.upsert({
|
||||||
|
where: { id: historyId },
|
||||||
|
create: {
|
||||||
|
id: historyId,
|
||||||
|
job_id: job.id,
|
||||||
|
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
|
job_name: type || "unknown",
|
||||||
|
host_id: host?.id,
|
||||||
|
api_id,
|
||||||
|
status: "failed",
|
||||||
|
attempt_number: attemptNumber,
|
||||||
|
error_message: error.message,
|
||||||
|
created_at: new Date(),
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
status: "failed",
|
||||||
|
attempt_number: attemptNumber,
|
||||||
|
error_message: error.message,
|
||||||
|
updated_at: new Date(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.catch((err) =>
|
||||||
|
console.error("[agent-commands] failed to log error:", err),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 10,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add error handling for all workers
|
||||||
|
Object.values(this.workers).forEach((worker) => {
|
||||||
|
worker.on("error", (error) => {
|
||||||
|
console.error("Worker error:", error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("✅ All workers initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup event listeners for all queues
|
||||||
|
*/
|
||||||
|
setupEventListeners() {
|
||||||
|
for (const queueName of Object.values(QUEUE_NAMES)) {
|
||||||
|
const queue = this.queues[queueName];
|
||||||
|
queue.on("error", (error) => {
|
||||||
|
console.error(`❌ Queue '${queueName}' experienced an error:`, error);
|
||||||
|
});
|
||||||
|
queue.on("failed", (job, err) => {
|
||||||
|
console.error(
|
||||||
|
`❌ Job '${job.id}' in queue '${queueName}' failed:`,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
queue.on("completed", (job) => {
|
||||||
|
console.log(`✅ Job '${job.id}' in queue '${queueName}' completed.`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
console.log("✅ Queue events initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule all recurring jobs
|
||||||
|
*/
|
||||||
|
async scheduleAllJobs() {
|
||||||
|
await this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].schedule();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manual job triggers
|
||||||
|
*/
|
||||||
|
async triggerGitHubUpdateCheck() {
|
||||||
|
return this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerSessionCleanup() {
|
||||||
|
return this.automations[QUEUE_NAMES.SESSION_CLEANUP].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerOrphanedRepoCleanup() {
|
||||||
|
return this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerOrphanedPackageCleanup() {
|
||||||
|
return this.automations[
|
||||||
|
QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP
|
||||||
|
].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get queue statistics
|
||||||
|
*/
|
||||||
|
async getQueueStats(queueName) {
|
||||||
|
const queue = this.queues[queueName];
|
||||||
|
if (!queue) {
|
||||||
|
throw new Error(`Queue ${queueName} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||||
|
queue.getWaiting(),
|
||||||
|
queue.getActive(),
|
||||||
|
queue.getCompleted(),
|
||||||
|
queue.getFailed(),
|
||||||
|
queue.getDelayed(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
waiting: waiting.length,
|
||||||
|
active: active.length,
|
||||||
|
completed: completed.length,
|
||||||
|
failed: failed.length,
|
||||||
|
delayed: delayed.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all queue statistics
|
||||||
|
*/
|
||||||
|
async getAllQueueStats() {
|
||||||
|
const stats = {};
|
||||||
|
for (const queueName of Object.values(QUEUE_NAMES)) {
|
||||||
|
stats[queueName] = await this.getQueueStats(queueName);
|
||||||
|
}
|
||||||
|
return stats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get recent jobs for a queue
|
||||||
|
*/
|
||||||
|
async getRecentJobs(queueName, limit = 10) {
|
||||||
|
const queue = this.queues[queueName];
|
||||||
|
if (!queue) {
|
||||||
|
throw new Error(`Queue ${queueName} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [completed, failed] = await Promise.all([
|
||||||
|
queue.getCompleted(0, limit - 1),
|
||||||
|
queue.getFailed(0, limit - 1),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return [...completed, ...failed]
|
||||||
|
.sort((a, b) => new Date(b.finishedOn) - new Date(a.finishedOn))
|
||||||
|
.slice(0, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get jobs for a specific host (by API ID)
|
||||||
|
*/
|
||||||
|
async getHostJobs(apiId, limit = 20) {
|
||||||
|
const queue = this.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||||
|
if (!queue) {
|
||||||
|
throw new Error(`Queue ${QUEUE_NAMES.AGENT_COMMANDS} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[getHostJobs] Looking for jobs with api_id: ${apiId}`);
|
||||||
|
|
||||||
|
// Get active queue status (waiting, active, delayed, failed)
|
||||||
|
const [waiting, active, delayed, failed] = await Promise.all([
|
||||||
|
queue.getWaiting(),
|
||||||
|
queue.getActive(),
|
||||||
|
queue.getDelayed(),
|
||||||
|
queue.getFailed(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Filter by API ID
|
||||||
|
const filterByApiId = (jobs) =>
|
||||||
|
jobs.filter((job) => job.data && job.data.api_id === apiId);
|
||||||
|
|
||||||
|
const waitingCount = filterByApiId(waiting).length;
|
||||||
|
const activeCount = filterByApiId(active).length;
|
||||||
|
const delayedCount = filterByApiId(delayed).length;
|
||||||
|
const failedCount = filterByApiId(failed).length;
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[getHostJobs] Queue status - Waiting: ${waitingCount}, Active: ${activeCount}, Delayed: ${delayedCount}, Failed: ${failedCount}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get job history from database (shows all attempts and status changes)
|
||||||
|
const jobHistory = await prisma.job_history.findMany({
|
||||||
|
where: {
|
||||||
|
api_id: apiId,
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
created_at: "desc",
|
||||||
|
},
|
||||||
|
take: limit,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[getHostJobs] Found ${jobHistory.length} job history records for api_id: ${apiId}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
waiting: waitingCount,
|
||||||
|
active: activeCount,
|
||||||
|
delayed: delayedCount,
|
||||||
|
failed: failedCount,
|
||||||
|
jobHistory: jobHistory.map((job) => ({
|
||||||
|
id: job.id,
|
||||||
|
job_id: job.job_id,
|
||||||
|
job_name: job.job_name,
|
||||||
|
status: job.status,
|
||||||
|
attempt_number: job.attempt_number,
|
||||||
|
error_message: job.error_message,
|
||||||
|
output: job.output,
|
||||||
|
created_at: job.created_at,
|
||||||
|
updated_at: job.updated_at,
|
||||||
|
completed_at: job.completed_at,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Graceful shutdown
|
||||||
|
*/
|
||||||
|
async shutdown() {
|
||||||
|
console.log("🛑 Shutting down queue manager...");
|
||||||
|
|
||||||
|
for (const queueName of Object.keys(this.queues)) {
|
||||||
|
try {
|
||||||
|
await this.queues[queueName].close();
|
||||||
|
} catch (e) {
|
||||||
|
console.warn(
|
||||||
|
`⚠️ Failed to close queue '${queueName}':`,
|
||||||
|
e?.message || e,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (this.workers?.[queueName]) {
|
||||||
|
try {
|
||||||
|
await this.workers[queueName].close();
|
||||||
|
} catch (e) {
|
||||||
|
console.warn(
|
||||||
|
`⚠️ Failed to close worker for '${queueName}':`,
|
||||||
|
e?.message || e,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await redis.quit();
|
||||||
|
console.log("✅ Queue manager shutdown complete");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const queueManager = new QueueManager();
|
||||||
|
|
||||||
|
module.exports = { queueManager, QUEUE_NAMES };
|
||||||
116
backend/src/services/automation/orphanedPackageCleanup.js
Normal file
116
backend/src/services/automation/orphanedPackageCleanup.js
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Orphaned Package Cleanup Automation
|
||||||
|
* Removes packages with no associated hosts
|
||||||
|
*/
|
||||||
|
class OrphanedPackageCleanup {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "orphaned-package-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process orphaned package cleanup job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🧹 Starting orphaned package cleanup...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find packages with 0 hosts
|
||||||
|
const orphanedPackages = await prisma.packages.findMany({
|
||||||
|
where: {
|
||||||
|
host_packages: {
|
||||||
|
none: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
host_packages: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let deletedCount = 0;
|
||||||
|
const deletedPackages = [];
|
||||||
|
|
||||||
|
// Delete orphaned packages
|
||||||
|
for (const pkg of orphanedPackages) {
|
||||||
|
try {
|
||||||
|
await prisma.packages.delete({
|
||||||
|
where: { id: pkg.id },
|
||||||
|
});
|
||||||
|
deletedCount++;
|
||||||
|
deletedPackages.push({
|
||||||
|
id: pkg.id,
|
||||||
|
name: pkg.name,
|
||||||
|
description: pkg.description,
|
||||||
|
category: pkg.category,
|
||||||
|
latest_version: pkg.latest_version,
|
||||||
|
});
|
||||||
|
console.log(
|
||||||
|
`🗑️ Deleted orphaned package: ${pkg.name} (${pkg.latest_version})`,
|
||||||
|
);
|
||||||
|
} catch (deleteError) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to delete package ${pkg.id}:`,
|
||||||
|
deleteError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Orphaned package cleanup completed in ${executionTime}ms - Deleted ${deletedCount} packages`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
deletedCount,
|
||||||
|
deletedPackages,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Orphaned package cleanup failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring orphaned package cleanup (daily at 3 AM)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-package-cleanup",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 3 * * *" }, // Daily at 3 AM
|
||||||
|
jobId: "orphaned-package-cleanup-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Orphaned package cleanup scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual orphaned package cleanup
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-package-cleanup-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual orphaned package cleanup triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = OrphanedPackageCleanup;
|
||||||
114
backend/src/services/automation/orphanedRepoCleanup.js
Normal file
114
backend/src/services/automation/orphanedRepoCleanup.js
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Orphaned Repository Cleanup Automation
|
||||||
|
* Removes repositories with no associated hosts
|
||||||
|
*/
|
||||||
|
class OrphanedRepoCleanup {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "orphaned-repo-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process orphaned repository cleanup job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🧹 Starting orphaned repository cleanup...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find repositories with 0 hosts
|
||||||
|
const orphanedRepos = await prisma.repositories.findMany({
|
||||||
|
where: {
|
||||||
|
host_repositories: {
|
||||||
|
none: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
host_repositories: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let deletedCount = 0;
|
||||||
|
const deletedRepos = [];
|
||||||
|
|
||||||
|
// Delete orphaned repositories
|
||||||
|
for (const repo of orphanedRepos) {
|
||||||
|
try {
|
||||||
|
await prisma.repositories.delete({
|
||||||
|
where: { id: repo.id },
|
||||||
|
});
|
||||||
|
deletedCount++;
|
||||||
|
deletedRepos.push({
|
||||||
|
id: repo.id,
|
||||||
|
name: repo.name,
|
||||||
|
url: repo.url,
|
||||||
|
});
|
||||||
|
console.log(
|
||||||
|
`🗑️ Deleted orphaned repository: ${repo.name} (${repo.url})`,
|
||||||
|
);
|
||||||
|
} catch (deleteError) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to delete repository ${repo.id}:`,
|
||||||
|
deleteError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Orphaned repository cleanup completed in ${executionTime}ms - Deleted ${deletedCount} repositories`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
deletedCount,
|
||||||
|
deletedRepos,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Orphaned repository cleanup failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring orphaned repository cleanup (daily at 2 AM)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-repo-cleanup",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 2 * * *" }, // Daily at 2 AM
|
||||||
|
jobId: "orphaned-repo-cleanup-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Orphaned repository cleanup scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual orphaned repository cleanup
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-repo-cleanup-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual orphaned repository cleanup triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = OrphanedRepoCleanup;
|
||||||
77
backend/src/services/automation/sessionCleanup.js
Normal file
77
backend/src/services/automation/sessionCleanup.js
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session Cleanup Automation
|
||||||
|
* Cleans up expired user sessions
|
||||||
|
*/
|
||||||
|
class SessionCleanup {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "session-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process session cleanup job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🧹 Starting session cleanup...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await prisma.user_sessions.deleteMany({
|
||||||
|
where: {
|
||||||
|
OR: [{ expires_at: { lt: new Date() } }, { is_revoked: true }],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Session cleanup completed in ${executionTime}ms - Cleaned up ${result.count} expired sessions`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
sessionsCleaned: result.count,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Session cleanup failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring session cleanup (every hour)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"session-cleanup",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 * * * *" }, // Every hour
|
||||||
|
jobId: "session-cleanup-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Session cleanup scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual session cleanup
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"session-cleanup-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual session cleanup triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SessionCleanup;
|
||||||
5
backend/src/services/automation/shared/prisma.js
Normal file
5
backend/src/services/automation/shared/prisma.js
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
const { PrismaClient } = require("@prisma/client");
|
||||||
|
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
|
module.exports = { prisma };
|
||||||
16
backend/src/services/automation/shared/redis.js
Normal file
16
backend/src/services/automation/shared/redis.js
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
const IORedis = require("ioredis");
|
||||||
|
|
||||||
|
// Redis connection configuration
|
||||||
|
const redisConnection = {
|
||||||
|
host: process.env.REDIS_HOST || "localhost",
|
||||||
|
port: parseInt(process.env.REDIS_PORT, 10) || 6379,
|
||||||
|
password: process.env.REDIS_PASSWORD || undefined,
|
||||||
|
db: parseInt(process.env.REDIS_DB, 10) || 0,
|
||||||
|
retryDelayOnFailover: 100,
|
||||||
|
maxRetriesPerRequest: null, // BullMQ requires this to be null
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create Redis connection
|
||||||
|
const redis = new IORedis(redisConnection);
|
||||||
|
|
||||||
|
module.exports = { redis, redisConnection };
|
||||||
82
backend/src/services/automation/shared/utils.js
Normal file
82
backend/src/services/automation/shared/utils.js
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
// Common utilities for automation jobs
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare two semantic versions
|
||||||
|
* @param {string} version1 - First version
|
||||||
|
* @param {string} version2 - Second version
|
||||||
|
* @returns {number} - 1 if version1 > version2, -1 if version1 < version2, 0 if equal
|
||||||
|
*/
|
||||||
|
function compareVersions(version1, version2) {
|
||||||
|
const v1parts = version1.split(".").map(Number);
|
||||||
|
const v2parts = version2.split(".").map(Number);
|
||||||
|
|
||||||
|
const maxLength = Math.max(v1parts.length, v2parts.length);
|
||||||
|
|
||||||
|
for (let i = 0; i < maxLength; i++) {
|
||||||
|
const v1part = v1parts[i] || 0;
|
||||||
|
const v2part = v2parts[i] || 0;
|
||||||
|
|
||||||
|
if (v1part > v2part) return 1;
|
||||||
|
if (v1part < v2part) return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check public GitHub repository for latest release
|
||||||
|
* @param {string} owner - Repository owner
|
||||||
|
* @param {string} repo - Repository name
|
||||||
|
* @returns {Promise<string|null>} - Latest version or null
|
||||||
|
*/
|
||||||
|
async function checkPublicRepo(owner, repo) {
|
||||||
|
try {
|
||||||
|
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
||||||
|
|
||||||
|
let currentVersion = "1.2.7"; // fallback
|
||||||
|
try {
|
||||||
|
const packageJson = require("../../../package.json");
|
||||||
|
if (packageJson?.version) {
|
||||||
|
currentVersion = packageJson.version;
|
||||||
|
}
|
||||||
|
} catch (packageError) {
|
||||||
|
console.warn(
|
||||||
|
"Could not read version from package.json for User-Agent, using fallback:",
|
||||||
|
packageError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(httpsRepoUrl, {
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
"User-Agent": `PatchMon-Server/${currentVersion}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
if (
|
||||||
|
errorText.includes("rate limit") ||
|
||||||
|
errorText.includes("API rate limit")
|
||||||
|
) {
|
||||||
|
console.log("⚠️ GitHub API rate limit exceeded, skipping update check");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw new Error(
|
||||||
|
`GitHub API error: ${response.status} ${response.statusText}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const releaseData = await response.json();
|
||||||
|
return releaseData.tag_name.replace("v", "");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("GitHub API error:", error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
compareVersions,
|
||||||
|
checkPublicRepo,
|
||||||
|
};
|
||||||
@@ -1,295 +0,0 @@
|
|||||||
const { PrismaClient } = require("@prisma/client");
|
|
||||||
const { exec } = require("node:child_process");
|
|
||||||
const { promisify } = require("node:util");
|
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
class UpdateScheduler {
|
|
||||||
constructor() {
|
|
||||||
this.isRunning = false;
|
|
||||||
this.intervalId = null;
|
|
||||||
this.checkInterval = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start the scheduler
|
|
||||||
start() {
|
|
||||||
if (this.isRunning) {
|
|
||||||
console.log("Update scheduler is already running");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("🔄 Starting update scheduler...");
|
|
||||||
this.isRunning = true;
|
|
||||||
|
|
||||||
// Run initial check
|
|
||||||
this.checkForUpdates();
|
|
||||||
|
|
||||||
// Schedule regular checks
|
|
||||||
this.intervalId = setInterval(() => {
|
|
||||||
this.checkForUpdates();
|
|
||||||
}, this.checkInterval);
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
`✅ Update scheduler started - checking every ${this.checkInterval / (60 * 60 * 1000)} hours`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop the scheduler
|
|
||||||
stop() {
|
|
||||||
if (!this.isRunning) {
|
|
||||||
console.log("Update scheduler is not running");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("🛑 Stopping update scheduler...");
|
|
||||||
this.isRunning = false;
|
|
||||||
|
|
||||||
if (this.intervalId) {
|
|
||||||
clearInterval(this.intervalId);
|
|
||||||
this.intervalId = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("✅ Update scheduler stopped");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for updates
|
|
||||||
async checkForUpdates() {
|
|
||||||
try {
|
|
||||||
console.log("🔍 Checking for updates...");
|
|
||||||
|
|
||||||
// Get settings
|
|
||||||
const settings = await prisma.settings.findFirst();
|
|
||||||
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
|
|
||||||
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
|
||||||
let owner, repo;
|
|
||||||
|
|
||||||
if (repoUrl.includes("git@github.com:")) {
|
|
||||||
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
|
|
||||||
if (match) {
|
|
||||||
[, owner, repo] = match;
|
|
||||||
}
|
|
||||||
} else if (repoUrl.includes("github.com/")) {
|
|
||||||
const match = repoUrl.match(
|
|
||||||
/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/,
|
|
||||||
);
|
|
||||||
if (match) {
|
|
||||||
[, owner, repo] = match;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!owner || !repo) {
|
|
||||||
console.log(
|
|
||||||
"⚠️ Could not parse GitHub repository URL, skipping update check",
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let latestVersion;
|
|
||||||
const isPrivate = settings.repositoryType === "private";
|
|
||||||
|
|
||||||
if (isPrivate) {
|
|
||||||
// Use SSH for private repositories
|
|
||||||
latestVersion = await this.checkPrivateRepo(settings, owner, repo);
|
|
||||||
} else {
|
|
||||||
// Use GitHub API for public repositories
|
|
||||||
latestVersion = await this.checkPublicRepo(owner, repo);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!latestVersion) {
|
|
||||||
console.log(
|
|
||||||
"⚠️ Could not determine latest version, skipping update check",
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read version from package.json dynamically
|
|
||||||
let currentVersion = "1.2.7"; // fallback
|
|
||||||
try {
|
|
||||||
const packageJson = require("../../package.json");
|
|
||||||
if (packageJson?.version) {
|
|
||||||
currentVersion = packageJson.version;
|
|
||||||
}
|
|
||||||
} catch (packageError) {
|
|
||||||
console.warn(
|
|
||||||
"Could not read version from package.json, using fallback:",
|
|
||||||
packageError.message,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const isUpdateAvailable =
|
|
||||||
this.compareVersions(latestVersion, currentVersion) > 0;
|
|
||||||
|
|
||||||
// Update settings with check results
|
|
||||||
await prisma.settings.update({
|
|
||||||
where: { id: settings.id },
|
|
||||||
data: {
|
|
||||||
last_update_check: new Date(),
|
|
||||||
update_available: isUpdateAvailable,
|
|
||||||
latest_version: latestVersion,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
`✅ Update check completed - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`,
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
console.error("❌ Error checking for updates:", error.message);
|
|
||||||
|
|
||||||
// Update last check time even on error
|
|
||||||
try {
|
|
||||||
const settings = await prisma.settings.findFirst();
|
|
||||||
if (settings) {
|
|
||||||
await prisma.settings.update({
|
|
||||||
where: { id: settings.id },
|
|
||||||
data: {
|
|
||||||
last_update_check: new Date(),
|
|
||||||
update_available: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (updateError) {
|
|
||||||
console.error(
|
|
||||||
"❌ Error updating last check time:",
|
|
||||||
updateError.message,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check private repository using SSH
|
|
||||||
async checkPrivateRepo(settings, owner, repo) {
|
|
||||||
try {
|
|
||||||
let sshKeyPath = settings.sshKeyPath;
|
|
||||||
|
|
||||||
// Try to find SSH key if not configured
|
|
||||||
if (!sshKeyPath) {
|
|
||||||
const possibleKeyPaths = [
|
|
||||||
"/root/.ssh/id_ed25519",
|
|
||||||
"/root/.ssh/id_rsa",
|
|
||||||
"/home/patchmon/.ssh/id_ed25519",
|
|
||||||
"/home/patchmon/.ssh/id_rsa",
|
|
||||||
"/var/www/.ssh/id_ed25519",
|
|
||||||
"/var/www/.ssh/id_rsa",
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const path of possibleKeyPaths) {
|
|
||||||
try {
|
|
||||||
require("node:fs").accessSync(path);
|
|
||||||
sshKeyPath = path;
|
|
||||||
break;
|
|
||||||
} catch {
|
|
||||||
// Key not found at this path, try next
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!sshKeyPath) {
|
|
||||||
throw new Error("No SSH deploy key found");
|
|
||||||
}
|
|
||||||
|
|
||||||
const sshRepoUrl = `git@github.com:${owner}/${repo}.git`;
|
|
||||||
const env = {
|
|
||||||
...process.env,
|
|
||||||
GIT_SSH_COMMAND: `ssh -i ${sshKeyPath} -o StrictHostKeyChecking=no -o IdentitiesOnly=yes`,
|
|
||||||
};
|
|
||||||
|
|
||||||
const { stdout: sshLatestTag } = await execAsync(
|
|
||||||
`git ls-remote --tags --sort=-version:refname ${sshRepoUrl} | head -n 1 | sed 's/.*refs\\/tags\\///' | sed 's/\\^{}//'`,
|
|
||||||
{
|
|
||||||
timeout: 10000,
|
|
||||||
env: env,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
return sshLatestTag.trim().replace("v", "");
|
|
||||||
} catch (error) {
|
|
||||||
console.error("SSH Git error:", error.message);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check public repository using GitHub API
|
|
||||||
async checkPublicRepo(owner, repo) {
|
|
||||||
try {
|
|
||||||
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
|
||||||
|
|
||||||
// Get current version for User-Agent
|
|
||||||
let currentVersion = "1.2.7"; // fallback
|
|
||||||
try {
|
|
||||||
const packageJson = require("../../package.json");
|
|
||||||
if (packageJson?.version) {
|
|
||||||
currentVersion = packageJson.version;
|
|
||||||
}
|
|
||||||
} catch (packageError) {
|
|
||||||
console.warn(
|
|
||||||
"Could not read version from package.json for User-Agent, using fallback:",
|
|
||||||
packageError.message,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(httpsRepoUrl, {
|
|
||||||
method: "GET",
|
|
||||||
headers: {
|
|
||||||
Accept: "application/vnd.github.v3+json",
|
|
||||||
"User-Agent": `PatchMon-Server/${currentVersion}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text();
|
|
||||||
if (
|
|
||||||
errorText.includes("rate limit") ||
|
|
||||||
errorText.includes("API rate limit")
|
|
||||||
) {
|
|
||||||
console.log(
|
|
||||||
"⚠️ GitHub API rate limit exceeded, skipping update check",
|
|
||||||
);
|
|
||||||
return null; // Return null instead of throwing error
|
|
||||||
}
|
|
||||||
throw new Error(
|
|
||||||
`GitHub API error: ${response.status} ${response.statusText}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const releaseData = await response.json();
|
|
||||||
return releaseData.tag_name.replace("v", "");
|
|
||||||
} catch (error) {
|
|
||||||
console.error("GitHub API error:", error.message);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compare version strings (semantic versioning)
|
|
||||||
compareVersions(version1, version2) {
|
|
||||||
const v1parts = version1.split(".").map(Number);
|
|
||||||
const v2parts = version2.split(".").map(Number);
|
|
||||||
|
|
||||||
const maxLength = Math.max(v1parts.length, v2parts.length);
|
|
||||||
|
|
||||||
for (let i = 0; i < maxLength; i++) {
|
|
||||||
const v1part = v1parts[i] || 0;
|
|
||||||
const v2part = v2parts[i] || 0;
|
|
||||||
|
|
||||||
if (v1part > v2part) return 1;
|
|
||||||
if (v1part < v2part) return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get scheduler status
|
|
||||||
getStatus() {
|
|
||||||
return {
|
|
||||||
isRunning: this.isRunning,
|
|
||||||
checkInterval: this.checkInterval,
|
|
||||||
nextCheck: this.isRunning
|
|
||||||
? new Date(Date.now() + this.checkInterval)
|
|
||||||
: null,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create singleton instance
|
|
||||||
const updateScheduler = new UpdateScheduler();
|
|
||||||
|
|
||||||
module.exports = updateScheduler;
|
|
||||||
@@ -2,9 +2,10 @@
|
|||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
PatchMon is a containerised application that monitors system patches and updates. The application consists of three main services:
|
PatchMon is a containerised application that monitors system patches and updates. The application consists of four main services:
|
||||||
|
|
||||||
- **Database**: PostgreSQL 17
|
- **Database**: PostgreSQL 17
|
||||||
|
- **Redis**: Redis 7 for BullMQ job queues and caching
|
||||||
- **Backend**: Node.js API server
|
- **Backend**: Node.js API server
|
||||||
- **Frontend**: React application served via NGINX
|
- **Frontend**: React application served via NGINX
|
||||||
|
|
||||||
@@ -38,21 +39,31 @@ These tags are available for both backend and frontend images as they are versio
|
|||||||
environment:
|
environment:
|
||||||
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
|
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
|
||||||
```
|
```
|
||||||
4. Generate a strong JWT secret. You can do this like so:
|
4. Set a Redis password in the Redis service where it says:
|
||||||
|
```yaml
|
||||||
|
environment:
|
||||||
|
REDIS_PASSWORD: # CREATE A STRONG REDIS PASSWORD AND PUT IT HERE
|
||||||
|
```
|
||||||
|
5. Update the corresponding `REDIS_PASSWORD` in the backend service where it says:
|
||||||
|
```yaml
|
||||||
|
environment:
|
||||||
|
REDIS_PASSWORD: REPLACE_YOUR_REDIS_PASSWORD_HERE
|
||||||
|
```
|
||||||
|
6. Generate a strong JWT secret. You can do this like so:
|
||||||
```bash
|
```bash
|
||||||
openssl rand -hex 64
|
openssl rand -hex 64
|
||||||
```
|
```
|
||||||
5. Set a JWT secret in the backend service where it says:
|
7. Set a JWT secret in the backend service where it says:
|
||||||
```yaml
|
```yaml
|
||||||
environment:
|
environment:
|
||||||
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE
|
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE
|
||||||
```
|
```
|
||||||
6. Configure environment variables (see [Configuration](#configuration) section)
|
8. Configure environment variables (see [Configuration](#configuration) section)
|
||||||
7. Start the application:
|
9. Start the application:
|
||||||
```bash
|
```bash
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
```
|
```
|
||||||
8. Access the application at `http://localhost:3000`
|
10. Access the application at `http://localhost:3000`
|
||||||
|
|
||||||
## Updating
|
## Updating
|
||||||
|
|
||||||
@@ -106,6 +117,12 @@ When you do this, updating to a new version requires manually updating the image
|
|||||||
| `POSTGRES_USER` | Database user | `patchmon_user` |
|
| `POSTGRES_USER` | Database user | `patchmon_user` |
|
||||||
| `POSTGRES_PASSWORD` | Database password | **MUST BE SET!** |
|
| `POSTGRES_PASSWORD` | Database password | **MUST BE SET!** |
|
||||||
|
|
||||||
|
#### Redis Service
|
||||||
|
|
||||||
|
| Variable | Description | Default |
|
||||||
|
| -------------- | ------------------ | ---------------- |
|
||||||
|
| `REDIS_PASSWORD` | Redis password | **MUST BE SET!** |
|
||||||
|
|
||||||
#### Backend Service
|
#### Backend Service
|
||||||
|
|
||||||
##### Database Configuration
|
##### Database Configuration
|
||||||
@@ -116,6 +133,15 @@ When you do this, updating to a new version requires manually updating the image
|
|||||||
| `PM_DB_CONN_MAX_ATTEMPTS` | Maximum database connection attempts | `30` |
|
| `PM_DB_CONN_MAX_ATTEMPTS` | Maximum database connection attempts | `30` |
|
||||||
| `PM_DB_CONN_WAIT_INTERVAL` | Wait interval between connection attempts in seconds | `2` |
|
| `PM_DB_CONN_WAIT_INTERVAL` | Wait interval between connection attempts in seconds | `2` |
|
||||||
|
|
||||||
|
##### Redis Configuration
|
||||||
|
|
||||||
|
| Variable | Description | Default |
|
||||||
|
| --------------- | ------------------------------ | ------- |
|
||||||
|
| `REDIS_HOST` | Redis server hostname | `redis` |
|
||||||
|
| `REDIS_PORT` | Redis server port | `6379` |
|
||||||
|
| `REDIS_PASSWORD` | Redis authentication password | **MUST BE UPDATED WITH YOUR REDIS_PASSWORD!** |
|
||||||
|
| `REDIS_DB` | Redis database number | `0` |
|
||||||
|
|
||||||
##### Authentication & Security
|
##### Authentication & Security
|
||||||
|
|
||||||
| Variable | Description | Default |
|
| Variable | Description | Default |
|
||||||
@@ -165,9 +191,10 @@ When you do this, updating to a new version requires manually updating the image
|
|||||||
|
|
||||||
### Volumes
|
### Volumes
|
||||||
|
|
||||||
The compose file creates two Docker volumes:
|
The compose file creates three Docker volumes:
|
||||||
|
|
||||||
* `postgres_data`: PostgreSQL's data directory.
|
* `postgres_data`: PostgreSQL's data directory.
|
||||||
|
* `redis_data`: Redis's data directory.
|
||||||
* `agent_files`: PatchMon's agent files.
|
* `agent_files`: PatchMon's agent files.
|
||||||
|
|
||||||
If you wish to bind either if their respective container paths to a host path rather than a Docker volume, you can do so in the Docker Compose file.
|
If you wish to bind either if their respective container paths to a host path rather than a Docker volume, you can do so in the Docker Compose file.
|
||||||
@@ -201,6 +228,7 @@ For development with live reload and source code mounting:
|
|||||||
- Frontend: `http://localhost:3000`
|
- Frontend: `http://localhost:3000`
|
||||||
- Backend API: `http://localhost:3001`
|
- Backend API: `http://localhost:3001`
|
||||||
- Database: `localhost:5432`
|
- Database: `localhost:5432`
|
||||||
|
- Redis: `localhost:6379`
|
||||||
|
|
||||||
## Development Docker Compose
|
## Development Docker Compose
|
||||||
|
|
||||||
@@ -254,6 +282,7 @@ docker compose -f docker/docker-compose.dev.yml up -d --build
|
|||||||
### Development Ports
|
### Development Ports
|
||||||
The development setup exposes additional ports for debugging:
|
The development setup exposes additional ports for debugging:
|
||||||
- **Database**: `5432` - Direct PostgreSQL access
|
- **Database**: `5432` - Direct PostgreSQL access
|
||||||
|
- **Redis**: `6379` - Direct Redis access
|
||||||
- **Backend**: `3001` - API server with development features
|
- **Backend**: `3001` - API server with development features
|
||||||
- **Frontend**: `3000` - React development server with hot reload
|
- **Frontend**: `3000` - React development server with hot reload
|
||||||
|
|
||||||
@@ -277,8 +306,8 @@ The development setup exposes additional ports for debugging:
|
|||||||
- **Prisma Schema Changes**: Backend service restarts automatically
|
- **Prisma Schema Changes**: Backend service restarts automatically
|
||||||
|
|
||||||
4. **Database Access**: Connect database client directly to `localhost:5432`
|
4. **Database Access**: Connect database client directly to `localhost:5432`
|
||||||
|
5. **Redis Access**: Connect Redis client directly to `localhost:6379`
|
||||||
5. **Debug**: If started with `docker compose [...] up -d` or `docker compose [...] watch`, check logs manually:
|
6. **Debug**: If started with `docker compose [...] up -d` or `docker compose [...] watch`, check logs manually:
|
||||||
```bash
|
```bash
|
||||||
docker compose -f docker/docker-compose.dev.yml logs -f
|
docker compose -f docker/docker-compose.dev.yml logs -f
|
||||||
```
|
```
|
||||||
@@ -288,6 +317,6 @@ The development setup exposes additional ports for debugging:
|
|||||||
|
|
||||||
- **Hot Reload**: Automatic code synchronization and service restarts
|
- **Hot Reload**: Automatic code synchronization and service restarts
|
||||||
- **Enhanced Logging**: Detailed logs for debugging
|
- **Enhanced Logging**: Detailed logs for debugging
|
||||||
- **Direct Access**: Exposed ports for database and API debugging
|
- **Direct Access**: Exposed ports for database, Redis, and API debugging
|
||||||
- **Health Checks**: Built-in health monitoring for services
|
- **Health Checks**: Built-in health monitoring for services
|
||||||
- **Volume Persistence**: Development data persists between restarts
|
- **Volume Persistence**: Development data persists between restarts
|
||||||
|
|||||||
@@ -8,19 +8,94 @@ log() {
|
|||||||
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" >&2
|
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" >&2
|
||||||
}
|
}
|
||||||
|
|
||||||
# Copy files from agents_backup to agents if agents directory is empty and no .sh files are present
|
# Function to extract version from agent script
|
||||||
if [ -d "/app/agents" ] && [ -z "$(find /app/agents -maxdepth 1 -type f -name '*.sh' | head -n 1)" ]; then
|
get_agent_version() {
|
||||||
if [ -d "/app/agents_backup" ]; then
|
local file="$1"
|
||||||
log "Agents directory is empty, copying from backup..."
|
if [ -f "$file" ]; then
|
||||||
cp -r /app/agents_backup/* /app/agents/
|
grep -m 1 '^AGENT_VERSION=' "$file" | cut -d'"' -f2 2>/dev/null || echo "0.0.0"
|
||||||
else
|
else
|
||||||
log "Warning: agents_backup directory not found"
|
echo "0.0.0"
|
||||||
fi
|
fi
|
||||||
else
|
}
|
||||||
log "Agents directory already contains files, skipping copy"
|
|
||||||
|
# Function to compare versions (returns 0 if $1 > $2)
|
||||||
|
version_greater() {
|
||||||
|
# Use sort -V for version comparison
|
||||||
|
test "$(printf '%s\n' "$1" "$2" | sort -V | tail -n1)" = "$1" && test "$1" != "$2"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check and update agent files if necessary
|
||||||
|
update_agents() {
|
||||||
|
local backup_agent="/app/agents_backup/patchmon-agent.sh"
|
||||||
|
local current_agent="/app/agents/patchmon-agent.sh"
|
||||||
|
|
||||||
|
# Check if agents directory exists
|
||||||
|
if [ ! -d "/app/agents" ]; then
|
||||||
|
log "ERROR: /app/agents directory not found"
|
||||||
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log "Starting PatchMon Backend (${NODE_ENV:-production})..."
|
# Check if backup exists
|
||||||
|
if [ ! -d "/app/agents_backup" ]; then
|
||||||
|
log "WARNING: agents_backup directory not found, skipping agent update"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get versions
|
||||||
|
local backup_version=$(get_agent_version "$backup_agent")
|
||||||
|
local current_version=$(get_agent_version "$current_agent")
|
||||||
|
|
||||||
|
log "Agent version check:"
|
||||||
|
log " Image version: ${backup_version}"
|
||||||
|
log " Volume version: ${current_version}"
|
||||||
|
|
||||||
|
# Determine if update is needed
|
||||||
|
local needs_update=0
|
||||||
|
|
||||||
|
# Case 1: No agents in volume (first time setup)
|
||||||
|
if [ -z "$(find /app/agents -maxdepth 1 -type f -name '*.sh' 2>/dev/null | head -n 1)" ]; then
|
||||||
|
log "Agents directory is empty - performing initial copy"
|
||||||
|
needs_update=1
|
||||||
|
# Case 2: Backup version is newer
|
||||||
|
elif version_greater "$backup_version" "$current_version"; then
|
||||||
|
log "Newer agent version available (${backup_version} > ${current_version})"
|
||||||
|
needs_update=1
|
||||||
|
else
|
||||||
|
log "Agents are up to date"
|
||||||
|
needs_update=0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Perform update if needed
|
||||||
|
if [ $needs_update -eq 1 ]; then
|
||||||
|
log "Updating agents to version ${backup_version}..."
|
||||||
|
|
||||||
|
# Create backup of existing agents if they exist
|
||||||
|
if [ -f "$current_agent" ]; then
|
||||||
|
local backup_timestamp=$(date +%Y%m%d_%H%M%S)
|
||||||
|
local backup_name="/app/agents/patchmon-agent.sh.backup.${backup_timestamp}"
|
||||||
|
cp "$current_agent" "$backup_name" 2>/dev/null || true
|
||||||
|
log "Previous agent backed up to: $(basename $backup_name)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Copy new agents
|
||||||
|
cp -r /app/agents_backup/* /app/agents/
|
||||||
|
|
||||||
|
# Verify update
|
||||||
|
local new_version=$(get_agent_version "$current_agent")
|
||||||
|
if [ "$new_version" = "$backup_version" ]; then
|
||||||
|
log "✅ Agents successfully updated to version ${new_version}"
|
||||||
|
else
|
||||||
|
log "⚠️ Warning: Agent update may have failed (expected: ${backup_version}, got: ${new_version})"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main execution
|
||||||
|
log "PatchMon Backend Container Starting..."
|
||||||
|
log "Environment: ${NODE_ENV:-production}"
|
||||||
|
|
||||||
|
# Update agents (version-aware)
|
||||||
|
update_agents
|
||||||
|
|
||||||
log "Running database migrations..."
|
log "Running database migrations..."
|
||||||
npx prisma migrate deploy
|
npx prisma migrate deploy
|
||||||
|
|||||||
@@ -18,6 +18,22 @@ services:
|
|||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 7
|
retries: 7
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
command: redis-server --requirepass 1NS3CU6E_DEV_R3DIS_PASSW0RD
|
||||||
|
environment:
|
||||||
|
REDIS_PASSWORD: 1NS3CU6E_DEV_R3DIS_PASSW0RD
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
|
volumes:
|
||||||
|
- ./compose_dev_data/redis:/data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "1NS3CU6E_DEV_R3DIS_PASSW0RD", "ping"]
|
||||||
|
interval: 3s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 7
|
||||||
|
|
||||||
backend:
|
backend:
|
||||||
build:
|
build:
|
||||||
context: ..
|
context: ..
|
||||||
@@ -34,6 +50,11 @@ services:
|
|||||||
SERVER_HOST: localhost
|
SERVER_HOST: localhost
|
||||||
SERVER_PORT: 3000
|
SERVER_PORT: 3000
|
||||||
CORS_ORIGIN: http://localhost:3000
|
CORS_ORIGIN: http://localhost:3000
|
||||||
|
# Redis Configuration
|
||||||
|
REDIS_HOST: redis
|
||||||
|
REDIS_PORT: 6379
|
||||||
|
REDIS_PASSWORD: 1NS3CU6E_DEV_R3DIS_PASSW0RD
|
||||||
|
REDIS_DB: 0
|
||||||
ports:
|
ports:
|
||||||
- "3001:3001"
|
- "3001:3001"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -41,6 +62,8 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
database:
|
database:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
develop:
|
develop:
|
||||||
watch:
|
watch:
|
||||||
- action: sync
|
- action: sync
|
||||||
|
|||||||
@@ -16,6 +16,21 @@ services:
|
|||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 7
|
retries: 7
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
command: redis-server /usr/local/etc/redis/redis.conf
|
||||||
|
environment:
|
||||||
|
REDIS_PASSWORD: # CREATE A STRONG REDIS PASSWORD AND PUT IT HERE
|
||||||
|
volumes:
|
||||||
|
- redis_data:/data
|
||||||
|
- ./docker/redis.conf:/usr/local/etc/redis/redis.conf:ro
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "${REDIS_PASSWORD}", "ping"]
|
||||||
|
interval: 3s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 7
|
||||||
|
|
||||||
backend:
|
backend:
|
||||||
image: ghcr.io/patchmon/patchmon-backend:latest
|
image: ghcr.io/patchmon/patchmon-backend:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@@ -28,11 +43,18 @@ services:
|
|||||||
SERVER_HOST: localhost
|
SERVER_HOST: localhost
|
||||||
SERVER_PORT: 3000
|
SERVER_PORT: 3000
|
||||||
CORS_ORIGIN: http://localhost:3000
|
CORS_ORIGIN: http://localhost:3000
|
||||||
|
# Redis Configuration
|
||||||
|
REDIS_HOST: redis
|
||||||
|
REDIS_PORT: 6379
|
||||||
|
REDIS_PASSWORD: REPLACE_YOUR_REDIS_PASSWORD_HERE
|
||||||
|
REDIS_DB: 0
|
||||||
volumes:
|
volumes:
|
||||||
- agent_files:/app/agents
|
- agent_files:/app/agents
|
||||||
depends_on:
|
depends_on:
|
||||||
database:
|
database:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: ghcr.io/patchmon/patchmon-frontend:latest
|
image: ghcr.io/patchmon/patchmon-frontend:latest
|
||||||
@@ -45,4 +67,5 @@ services:
|
|||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
|
redis_data:
|
||||||
agent_files:
|
agent_files:
|
||||||
|
|||||||
@@ -52,6 +52,64 @@ server {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# SSE (Server-Sent Events) specific configuration
|
||||||
|
location /api/v1/ws/status/ {
|
||||||
|
proxy_pass http://${BACKEND_HOST}:${BACKEND_PORT};
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host $host;
|
||||||
|
|
||||||
|
# Critical SSE settings
|
||||||
|
proxy_buffering off;
|
||||||
|
proxy_cache off;
|
||||||
|
proxy_set_header Connection '';
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
chunked_transfer_encoding off;
|
||||||
|
|
||||||
|
# Timeout settings for long-lived connections
|
||||||
|
proxy_read_timeout 24h;
|
||||||
|
proxy_send_timeout 24h;
|
||||||
|
proxy_connect_timeout 60s;
|
||||||
|
|
||||||
|
# Disable nginx buffering for real-time streaming
|
||||||
|
proxy_request_buffering off;
|
||||||
|
proxy_max_temp_file_size 0;
|
||||||
|
|
||||||
|
# CORS headers for SSE
|
||||||
|
add_header Access-Control-Allow-Origin * always;
|
||||||
|
add_header Access-Control-Allow-Methods "GET, OPTIONS" always;
|
||||||
|
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||||
|
|
||||||
|
# Handle preflight requests
|
||||||
|
if ($request_method = 'OPTIONS') {
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# WebSocket upgrade handling
|
||||||
|
location /api/v1/agents/ws {
|
||||||
|
proxy_pass http://${BACKEND_HOST}:${BACKEND_PORT};
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host $host;
|
||||||
|
|
||||||
|
# WebSocket timeout settings
|
||||||
|
proxy_read_timeout 24h;
|
||||||
|
proxy_send_timeout 24h;
|
||||||
|
proxy_connect_timeout 60s;
|
||||||
|
|
||||||
|
# Disable buffering for WebSocket
|
||||||
|
proxy_buffering off;
|
||||||
|
proxy_cache off;
|
||||||
|
}
|
||||||
|
|
||||||
# Static assets caching
|
# Static assets caching
|
||||||
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
|
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
|
||||||
expires 1y;
|
expires 1y;
|
||||||
|
|||||||
35
docker/redis.conf
Normal file
35
docker/redis.conf
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# Redis Configuration for PatchMon Production
|
||||||
|
# Security settings
|
||||||
|
requirepass ${REDIS_PASSWORD}
|
||||||
|
rename-command FLUSHDB ""
|
||||||
|
rename-command FLUSHALL ""
|
||||||
|
rename-command DEBUG ""
|
||||||
|
rename-command CONFIG "CONFIG_${REDIS_PASSWORD}"
|
||||||
|
|
||||||
|
# Memory management
|
||||||
|
maxmemory 256mb
|
||||||
|
maxmemory-policy allkeys-lru
|
||||||
|
|
||||||
|
# Persistence settings
|
||||||
|
save 900 1
|
||||||
|
save 300 10
|
||||||
|
save 60 10000
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
loglevel notice
|
||||||
|
logfile ""
|
||||||
|
|
||||||
|
# Network security
|
||||||
|
bind 127.0.0.1
|
||||||
|
protected-mode yes
|
||||||
|
|
||||||
|
# Performance tuning
|
||||||
|
tcp-keepalive 300
|
||||||
|
timeout 0
|
||||||
|
|
||||||
|
# Disable dangerous commands
|
||||||
|
rename-command SHUTDOWN "SHUTDOWN_${REDIS_PASSWORD}"
|
||||||
|
rename-command KEYS ""
|
||||||
|
rename-command MONITOR ""
|
||||||
|
rename-command SLAVEOF ""
|
||||||
|
rename-command REPLICAOF ""
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon-frontend",
|
"name": "patchmon-frontend",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "1.2.7",
|
"version": "1.2.9",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -20,7 +20,7 @@
|
|||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"date-fns": "^4.1.0",
|
"date-fns": "^4.1.0",
|
||||||
"express": "^4.21.2",
|
"express": "^5.0.0",
|
||||||
"http-proxy-middleware": "^3.0.3",
|
"http-proxy-middleware": "^3.0.3",
|
||||||
"lucide-react": "^0.468.0",
|
"lucide-react": "^0.468.0",
|
||||||
"react": "^18.3.1",
|
"react": "^18.3.1",
|
||||||
|
|||||||
23
frontend/public/assets/bull-board-logo.svg
Normal file
23
frontend/public/assets/bull-board-logo.svg
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36">
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="18" />
|
||||||
|
<circle fill="#FFF" cx="18" cy="18" r="13.5" />
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="10" />
|
||||||
|
<circle fill="#FFF" cx="18" cy="18" r="6" />
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="3" />
|
||||||
|
<path
|
||||||
|
opacity=".2"
|
||||||
|
d="M18.24 18.282l13.144 11.754s-2.647 3.376-7.89 5.109L17.579 18.42l.661-.138z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#FFAC33"
|
||||||
|
d="M18.294 19a.994.994 0 01-.704-1.699l.563-.563a.995.995 0 011.408 1.407l-.564.563a.987.987 0 01-.703.292z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#55ACEE"
|
||||||
|
d="M24.016 6.981c-.403 2.079 0 4.691 0 4.691l7.054-7.388c.291-1.454-.528-3.932-1.718-4.238-1.19-.306-4.079.803-5.336 6.935zm5.003 5.003c-2.079.403-4.691 0-4.691 0l7.388-7.054c1.454-.291 3.932.528 4.238 1.718.306 1.19-.803 4.079-6.935 5.336z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#3A87C2"
|
||||||
|
d="M32.798 4.485L21.176 17.587c-.362.362-1.673.882-2.51.046-.836-.836-.419-2.08-.057-2.443L31.815 3.501s.676-.635 1.159-.152-.176 1.136-.176 1.136z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 1.1 KiB |
@@ -18,9 +18,15 @@ const Login = lazy(() => import("./pages/Login"));
|
|||||||
const PackageDetail = lazy(() => import("./pages/PackageDetail"));
|
const PackageDetail = lazy(() => import("./pages/PackageDetail"));
|
||||||
const Packages = lazy(() => import("./pages/Packages"));
|
const Packages = lazy(() => import("./pages/Packages"));
|
||||||
const Profile = lazy(() => import("./pages/Profile"));
|
const Profile = lazy(() => import("./pages/Profile"));
|
||||||
const Queue = lazy(() => import("./pages/Queue"));
|
const Automation = lazy(() => import("./pages/Automation"));
|
||||||
const Repositories = lazy(() => import("./pages/Repositories"));
|
const Repositories = lazy(() => import("./pages/Repositories"));
|
||||||
const RepositoryDetail = lazy(() => import("./pages/RepositoryDetail"));
|
const RepositoryDetail = lazy(() => import("./pages/RepositoryDetail"));
|
||||||
|
const Docker = lazy(() => import("./pages/Docker"));
|
||||||
|
const DockerContainerDetail = lazy(
|
||||||
|
() => import("./pages/docker/ContainerDetail"),
|
||||||
|
);
|
||||||
|
const DockerImageDetail = lazy(() => import("./pages/docker/ImageDetail"));
|
||||||
|
const DockerHostDetail = lazy(() => import("./pages/docker/HostDetail"));
|
||||||
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
|
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
|
||||||
const Integrations = lazy(() => import("./pages/settings/Integrations"));
|
const Integrations = lazy(() => import("./pages/settings/Integrations"));
|
||||||
const Notifications = lazy(() => import("./pages/settings/Notifications"));
|
const Notifications = lazy(() => import("./pages/settings/Notifications"));
|
||||||
@@ -137,11 +143,51 @@ function AppRoutes() {
|
|||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
<Route
|
<Route
|
||||||
path="/queue"
|
path="/automation"
|
||||||
element={
|
element={
|
||||||
<ProtectedRoute requirePermission="can_view_hosts">
|
<ProtectedRoute requirePermission="can_view_hosts">
|
||||||
<Layout>
|
<Layout>
|
||||||
<Queue />
|
<Automation />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<Docker />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/containers/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerContainerDetail />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/images/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerImageDetail />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/hosts/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerHostDetail />
|
||||||
</Layout>
|
</Layout>
|
||||||
</ProtectedRoute>
|
</ProtectedRoute>
|
||||||
}
|
}
|
||||||
|
|||||||
283
frontend/src/components/InlineMultiGroupEdit.jsx
Normal file
283
frontend/src/components/InlineMultiGroupEdit.jsx
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
import { Check, ChevronDown, Edit2, X } from "lucide-react";
|
||||||
|
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||||
|
|
||||||
|
const InlineMultiGroupEdit = ({
|
||||||
|
value = [], // Array of group IDs
|
||||||
|
onSave,
|
||||||
|
onCancel,
|
||||||
|
options = [],
|
||||||
|
className = "",
|
||||||
|
disabled = false,
|
||||||
|
}) => {
|
||||||
|
const [isEditing, setIsEditing] = useState(false);
|
||||||
|
const [selectedValues, setSelectedValues] = useState(value);
|
||||||
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
const [error, setError] = useState("");
|
||||||
|
const [isOpen, setIsOpen] = useState(false);
|
||||||
|
const [dropdownPosition, setDropdownPosition] = useState({
|
||||||
|
top: 0,
|
||||||
|
left: 0,
|
||||||
|
width: 0,
|
||||||
|
});
|
||||||
|
const dropdownRef = useRef(null);
|
||||||
|
const buttonRef = useRef(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (isEditing && dropdownRef.current) {
|
||||||
|
dropdownRef.current.focus();
|
||||||
|
}
|
||||||
|
}, [isEditing]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setSelectedValues(value);
|
||||||
|
// Force re-render when value changes
|
||||||
|
if (!isEditing) {
|
||||||
|
setIsOpen(false);
|
||||||
|
}
|
||||||
|
}, [value, isEditing]);
|
||||||
|
|
||||||
|
// Calculate dropdown position
|
||||||
|
const calculateDropdownPosition = useCallback(() => {
|
||||||
|
if (buttonRef.current) {
|
||||||
|
const rect = buttonRef.current.getBoundingClientRect();
|
||||||
|
setDropdownPosition({
|
||||||
|
top: rect.bottom + window.scrollY + 4,
|
||||||
|
left: rect.left + window.scrollX,
|
||||||
|
width: rect.width,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Close dropdown when clicking outside
|
||||||
|
useEffect(() => {
|
||||||
|
const handleClickOutside = (event) => {
|
||||||
|
if (dropdownRef.current && !dropdownRef.current.contains(event.target)) {
|
||||||
|
setIsOpen(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isOpen) {
|
||||||
|
calculateDropdownPosition();
|
||||||
|
document.addEventListener("mousedown", handleClickOutside);
|
||||||
|
window.addEventListener("resize", calculateDropdownPosition);
|
||||||
|
window.addEventListener("scroll", calculateDropdownPosition);
|
||||||
|
return () => {
|
||||||
|
document.removeEventListener("mousedown", handleClickOutside);
|
||||||
|
window.removeEventListener("resize", calculateDropdownPosition);
|
||||||
|
window.removeEventListener("scroll", calculateDropdownPosition);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}, [isOpen, calculateDropdownPosition]);
|
||||||
|
|
||||||
|
const handleEdit = () => {
|
||||||
|
if (disabled) return;
|
||||||
|
setIsEditing(true);
|
||||||
|
setSelectedValues(value);
|
||||||
|
setError("");
|
||||||
|
// Automatically open dropdown when editing starts
|
||||||
|
setTimeout(() => {
|
||||||
|
setIsOpen(true);
|
||||||
|
}, 0);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleCancel = () => {
|
||||||
|
setIsEditing(false);
|
||||||
|
setSelectedValues(value);
|
||||||
|
setError("");
|
||||||
|
setIsOpen(false);
|
||||||
|
if (onCancel) onCancel();
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSave = async () => {
|
||||||
|
if (disabled || isLoading) return;
|
||||||
|
|
||||||
|
// Check if values actually changed
|
||||||
|
const sortedCurrent = [...value].sort();
|
||||||
|
const sortedSelected = [...selectedValues].sort();
|
||||||
|
if (JSON.stringify(sortedCurrent) === JSON.stringify(sortedSelected)) {
|
||||||
|
setIsEditing(false);
|
||||||
|
setIsOpen(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsLoading(true);
|
||||||
|
setError("");
|
||||||
|
|
||||||
|
try {
|
||||||
|
await onSave(selectedValues);
|
||||||
|
setIsEditing(false);
|
||||||
|
setIsOpen(false);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err.message || "Failed to save");
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleKeyDown = (e) => {
|
||||||
|
if (e.key === "Enter") {
|
||||||
|
e.preventDefault();
|
||||||
|
handleSave();
|
||||||
|
} else if (e.key === "Escape") {
|
||||||
|
e.preventDefault();
|
||||||
|
handleCancel();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleGroup = (groupId) => {
|
||||||
|
setSelectedValues((prev) => {
|
||||||
|
if (prev.includes(groupId)) {
|
||||||
|
return prev.filter((id) => id !== groupId);
|
||||||
|
} else {
|
||||||
|
return [...prev, groupId];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const _displayValue = useMemo(() => {
|
||||||
|
if (!value || value.length === 0) {
|
||||||
|
return "Ungrouped";
|
||||||
|
}
|
||||||
|
if (value.length === 1) {
|
||||||
|
const option = options.find((opt) => opt.id === value[0]);
|
||||||
|
return option ? option.name : "Unknown Group";
|
||||||
|
}
|
||||||
|
return `${value.length} groups`;
|
||||||
|
}, [value, options]);
|
||||||
|
|
||||||
|
const displayGroups = useMemo(() => {
|
||||||
|
if (!value || value.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
.map((groupId) => options.find((opt) => opt.id === groupId))
|
||||||
|
.filter(Boolean);
|
||||||
|
}, [value, options]);
|
||||||
|
|
||||||
|
if (isEditing) {
|
||||||
|
return (
|
||||||
|
<div className={`relative ${className}`} ref={dropdownRef}>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className="relative flex-1">
|
||||||
|
<button
|
||||||
|
ref={buttonRef}
|
||||||
|
type="button"
|
||||||
|
onClick={() => setIsOpen(!isOpen)}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
|
disabled={isLoading}
|
||||||
|
className={`w-full px-3 py-1 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent flex items-center justify-between ${
|
||||||
|
error ? "border-red-500" : ""
|
||||||
|
} ${isLoading ? "opacity-50" : ""}`}
|
||||||
|
>
|
||||||
|
<span className="truncate">
|
||||||
|
{selectedValues.length === 0
|
||||||
|
? "Ungrouped"
|
||||||
|
: selectedValues.length === 1
|
||||||
|
? options.find((opt) => opt.id === selectedValues[0])
|
||||||
|
?.name || "Unknown Group"
|
||||||
|
: `${selectedValues.length} groups selected`}
|
||||||
|
</span>
|
||||||
|
<ChevronDown className="h-4 w-4 flex-shrink-0" />
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{isOpen && (
|
||||||
|
<div
|
||||||
|
className="fixed z-50 bg-white dark:bg-secondary-800 border border-secondary-300 dark:border-secondary-600 rounded-md shadow-lg max-h-60 overflow-auto"
|
||||||
|
style={{
|
||||||
|
top: `${dropdownPosition.top}px`,
|
||||||
|
left: `${dropdownPosition.left}px`,
|
||||||
|
width: `${dropdownPosition.width}px`,
|
||||||
|
minWidth: "200px",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<div className="py-1">
|
||||||
|
{options.map((option) => (
|
||||||
|
<label
|
||||||
|
key={option.id}
|
||||||
|
className="w-full px-3 py-2 text-left text-sm hover:bg-secondary-100 dark:hover:bg-secondary-700 flex items-center cursor-pointer"
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={selectedValues.includes(option.id)}
|
||||||
|
onChange={() => toggleGroup(option.id)}
|
||||||
|
className="mr-2 h-4 w-4 text-primary-600 focus:ring-primary-500 border-secondary-300 rounded"
|
||||||
|
/>
|
||||||
|
<span
|
||||||
|
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium text-white"
|
||||||
|
style={{ backgroundColor: option.color }}
|
||||||
|
>
|
||||||
|
{option.name}
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
))}
|
||||||
|
{options.length === 0 && (
|
||||||
|
<div className="px-3 py-2 text-sm text-secondary-500 dark:text-secondary-400">
|
||||||
|
No groups available
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={handleSave}
|
||||||
|
disabled={isLoading}
|
||||||
|
className="p-1 text-green-600 hover:text-green-700 hover:bg-green-50 dark:hover:bg-green-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||||
|
title="Save"
|
||||||
|
>
|
||||||
|
<Check className="h-4 w-4" />
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={handleCancel}
|
||||||
|
disabled={isLoading}
|
||||||
|
className="p-1 text-red-600 hover:text-red-700 hover:bg-red-50 dark:hover:bg-red-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||||
|
title="Cancel"
|
||||||
|
>
|
||||||
|
<X className="h-4 w-4" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{error && (
|
||||||
|
<span className="text-xs text-red-600 dark:text-red-400 mt-1 block">
|
||||||
|
{error}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={`flex items-center gap-1 group ${className}`}>
|
||||||
|
{displayGroups.length === 0 ? (
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800">
|
||||||
|
Ungrouped
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<div className="flex items-center gap-1 flex-wrap">
|
||||||
|
{displayGroups.map((group) => (
|
||||||
|
<span
|
||||||
|
key={group.id}
|
||||||
|
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium text-white"
|
||||||
|
style={{ backgroundColor: group.color }}
|
||||||
|
>
|
||||||
|
{group.name}
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{!disabled && (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={handleEdit}
|
||||||
|
className="p-1 text-secondary-400 hover:text-secondary-600 dark:hover:text-secondary-300 hover:bg-secondary-100 dark:hover:bg-secondary-700 rounded transition-colors opacity-0 group-hover:opacity-100"
|
||||||
|
title="Edit groups"
|
||||||
|
>
|
||||||
|
<Edit2 className="h-3 w-3" />
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default InlineMultiGroupEdit;
|
||||||
@@ -11,7 +11,6 @@ import {
|
|||||||
Github,
|
Github,
|
||||||
Globe,
|
Globe,
|
||||||
Home,
|
Home,
|
||||||
List,
|
|
||||||
LogOut,
|
LogOut,
|
||||||
Mail,
|
Mail,
|
||||||
Menu,
|
Menu,
|
||||||
@@ -113,18 +112,26 @@ const Layout = ({ children }) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add Automation item (available to all users with inventory access)
|
||||||
|
inventoryItems.push({
|
||||||
|
name: "Automation",
|
||||||
|
href: "/automation",
|
||||||
|
icon: RefreshCw,
|
||||||
|
beta: true,
|
||||||
|
});
|
||||||
|
|
||||||
if (canViewReports()) {
|
if (canViewReports()) {
|
||||||
inventoryItems.push(
|
inventoryItems.push(
|
||||||
{
|
|
||||||
name: "Services",
|
|
||||||
href: "/services",
|
|
||||||
icon: Activity,
|
|
||||||
comingSoon: true,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "Docker",
|
name: "Docker",
|
||||||
href: "/docker",
|
href: "/docker",
|
||||||
icon: Container,
|
icon: Container,
|
||||||
|
beta: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Services",
|
||||||
|
href: "/services",
|
||||||
|
icon: Activity,
|
||||||
comingSoon: true,
|
comingSoon: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -136,21 +143,13 @@ const Layout = ({ children }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add Pro-Action and Queue items (available to all users with inventory access)
|
// Add Pro-Action item (available to all users with inventory access)
|
||||||
inventoryItems.push(
|
inventoryItems.push({
|
||||||
{
|
|
||||||
name: "Pro-Action",
|
name: "Pro-Action",
|
||||||
href: "/pro-action",
|
href: "/pro-action",
|
||||||
icon: Zap,
|
icon: Zap,
|
||||||
comingSoon: true,
|
comingSoon: true,
|
||||||
},
|
});
|
||||||
{
|
|
||||||
name: "Queue",
|
|
||||||
href: "/queue",
|
|
||||||
icon: List,
|
|
||||||
comingSoon: true,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
if (inventoryItems.length > 0) {
|
if (inventoryItems.length > 0) {
|
||||||
nav.push({
|
nav.push({
|
||||||
@@ -210,7 +209,7 @@ const Layout = ({ children }) => {
|
|||||||
if (path === "/services") return "Services";
|
if (path === "/services") return "Services";
|
||||||
if (path === "/docker") return "Docker";
|
if (path === "/docker") return "Docker";
|
||||||
if (path === "/pro-action") return "Pro-Action";
|
if (path === "/pro-action") return "Pro-Action";
|
||||||
if (path === "/queue") return "Queue";
|
if (path === "/automation") return "Automation";
|
||||||
if (path === "/users") return "Users";
|
if (path === "/users") return "Users";
|
||||||
if (path === "/permissions") return "Permissions";
|
if (path === "/permissions") return "Permissions";
|
||||||
if (path === "/settings") return "Settings";
|
if (path === "/settings") return "Settings";
|
||||||
@@ -436,6 +435,11 @@ const Layout = ({ children }) => {
|
|||||||
Soon
|
Soon
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
|
{subItem.beta && (
|
||||||
|
<span className="text-xs bg-blue-100 dark:bg-blue-900 text-blue-600 dark:text-blue-200 px-1.5 py-0.5 rounded font-medium">
|
||||||
|
Beta
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
</span>
|
</span>
|
||||||
</Link>
|
</Link>
|
||||||
)}
|
)}
|
||||||
@@ -707,6 +711,11 @@ const Layout = ({ children }) => {
|
|||||||
Soon
|
Soon
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
|
{subItem.beta && (
|
||||||
|
<span className="text-xs bg-blue-100 dark:bg-blue-900 text-blue-600 dark:text-blue-200 px-1.5 py-0.5 rounded font-medium">
|
||||||
|
Beta
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
{subItem.showUpgradeIcon && (
|
{subItem.showUpgradeIcon && (
|
||||||
<UpgradeNotificationIcon className="h-3 w-3" />
|
<UpgradeNotificationIcon className="h-3 w-3" />
|
||||||
)}
|
)}
|
||||||
@@ -929,11 +938,17 @@ const Layout = ({ children }) => {
|
|||||||
<div className="h-6 w-px bg-secondary-200 dark:bg-secondary-600 lg:hidden" />
|
<div className="h-6 w-px bg-secondary-200 dark:bg-secondary-600 lg:hidden" />
|
||||||
|
|
||||||
<div className="flex flex-1 gap-x-4 self-stretch lg:gap-x-6">
|
<div className="flex flex-1 gap-x-4 self-stretch lg:gap-x-6">
|
||||||
{/* Page title - hidden on dashboard, hosts, repositories, packages, and host details to give more space to search */}
|
{/* Page title - hidden on dashboard, hosts, repositories, packages, automation, docker, and host details to give more space to search */}
|
||||||
{!["/", "/hosts", "/repositories", "/packages"].includes(
|
{![
|
||||||
location.pathname,
|
"/",
|
||||||
) &&
|
"/hosts",
|
||||||
!location.pathname.startsWith("/hosts/") && (
|
"/repositories",
|
||||||
|
"/packages",
|
||||||
|
"/automation",
|
||||||
|
"/docker",
|
||||||
|
].includes(location.pathname) &&
|
||||||
|
!location.pathname.startsWith("/hosts/") &&
|
||||||
|
!location.pathname.startsWith("/docker/") && (
|
||||||
<div className="relative flex items-center">
|
<div className="relative flex items-center">
|
||||||
<h2 className="text-lg font-semibold text-secondary-900 dark:text-secondary-100 whitespace-nowrap">
|
<h2 className="text-lg font-semibold text-secondary-900 dark:text-secondary-100 whitespace-nowrap">
|
||||||
{getPageTitle()}
|
{getPageTitle()}
|
||||||
@@ -943,7 +958,7 @@ const Layout = ({ children }) => {
|
|||||||
|
|
||||||
{/* Global Search Bar */}
|
{/* Global Search Bar */}
|
||||||
<div
|
<div
|
||||||
className={`flex items-center ${["/", "/hosts", "/repositories", "/packages"].includes(location.pathname) || location.pathname.startsWith("/hosts/") ? "flex-1 max-w-none" : "max-w-sm"}`}
|
className={`flex items-center ${["/", "/hosts", "/repositories", "/packages", "/automation", "/docker"].includes(location.pathname) || location.pathname.startsWith("/hosts/") || location.pathname.startsWith("/docker/") ? "flex-1 max-w-none" : "max-w-sm"}`}
|
||||||
>
|
>
|
||||||
<GlobalSearch />
|
<GlobalSearch />
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ const AgentManagementTab = () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Helper function to get curl flags based on settings
|
// Helper function to get curl flags based on settings
|
||||||
const getCurlFlags = () => {
|
const _getCurlFlags = () => {
|
||||||
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -177,29 +177,40 @@ const AgentManagementTab = () => {
|
|||||||
Agent Uninstall Command
|
Agent Uninstall Command
|
||||||
</h3>
|
</h3>
|
||||||
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
|
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
<p className="mb-2">
|
<p className="mb-3">
|
||||||
To completely remove PatchMon from a host:
|
To completely remove PatchMon from a host:
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
{/* Go Agent Uninstall */}
|
||||||
|
<div className="mb-3">
|
||||||
|
<div className="space-y-2">
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
|
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
|
||||||
curl {getCurlFlags()} {window.location.origin}
|
sudo patchmon-agent uninstall
|
||||||
/api/v1/hosts/remove | sudo bash
|
|
||||||
</div>
|
</div>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
const command = `curl ${getCurlFlags()} ${window.location.origin}/api/v1/hosts/remove | sudo bash`;
|
navigator.clipboard.writeText(
|
||||||
navigator.clipboard.writeText(command);
|
"sudo patchmon-agent uninstall",
|
||||||
// You could add a toast notification here
|
);
|
||||||
}}
|
}}
|
||||||
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
|
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
|
||||||
>
|
>
|
||||||
Copy
|
Copy
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="text-xs text-red-600 dark:text-red-400">
|
||||||
|
Options: <code>--remove-config</code>,{" "}
|
||||||
|
<code>--remove-logs</code>, <code>--remove-all</code>,{" "}
|
||||||
|
<code>--force</code>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<p className="mt-2 text-xs">
|
<p className="mt-2 text-xs">
|
||||||
⚠️ This will remove all PatchMon files, configuration, and
|
⚠️ This command will remove all PatchMon files,
|
||||||
crontab entries
|
configuration, and crontab entries
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -446,6 +446,53 @@ const AgentUpdatesTab = () => {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
|
{/* Uninstall Instructions */}
|
||||||
|
<div className="bg-red-50 dark:bg-red-900 border border-red-200 dark:border-red-700 rounded-md p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<Shield className="h-5 w-5 text-red-400 dark:text-red-300" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Agent Uninstall Command
|
||||||
|
</h3>
|
||||||
|
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
|
<p className="mb-3">To completely remove PatchMon from a host:</p>
|
||||||
|
|
||||||
|
{/* Go Agent Uninstall */}
|
||||||
|
<div className="mb-3">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
|
||||||
|
sudo patchmon-agent uninstall
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
navigator.clipboard.writeText(
|
||||||
|
"sudo patchmon-agent uninstall",
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
|
||||||
|
>
|
||||||
|
Copy
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div className="text-xs text-red-600 dark:text-red-400">
|
||||||
|
Options: <code>--remove-config</code>,{" "}
|
||||||
|
<code>--remove-logs</code>, <code>--remove-all</code>,{" "}
|
||||||
|
<code>--force</code>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p className="mt-2 text-xs">
|
||||||
|
⚠️ This command will remove all PatchMon files, configuration,
|
||||||
|
and crontab entries
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -128,12 +128,14 @@ const VersionUpdateTab = () => {
|
|||||||
<span className="text-lg font-mono text-secondary-900 dark:text-white">
|
<span className="text-lg font-mono text-secondary-900 dark:text-white">
|
||||||
{versionInfo.github.latestRelease.tagName}
|
{versionInfo.github.latestRelease.tagName}
|
||||||
</span>
|
</span>
|
||||||
|
{versionInfo.github.latestRelease.publishedAt && (
|
||||||
<div className="text-xs text-secondary-500 dark:text-secondary-400">
|
<div className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
Published:{" "}
|
Published:{" "}
|
||||||
{new Date(
|
{new Date(
|
||||||
versionInfo.github.latestRelease.publishedAt,
|
versionInfo.github.latestRelease.publishedAt,
|
||||||
).toLocaleDateString()}
|
).toLocaleDateString()}
|
||||||
</div>
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
613
frontend/src/pages/Automation.jsx
Normal file
613
frontend/src/pages/Automation.jsx
Normal file
@@ -0,0 +1,613 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
Activity,
|
||||||
|
ArrowDown,
|
||||||
|
ArrowUp,
|
||||||
|
ArrowUpDown,
|
||||||
|
CheckCircle,
|
||||||
|
Clock,
|
||||||
|
Play,
|
||||||
|
Settings,
|
||||||
|
XCircle,
|
||||||
|
Zap,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { useState } from "react";
|
||||||
|
import api from "../utils/api";
|
||||||
|
|
||||||
|
const Automation = () => {
|
||||||
|
const [activeTab, setActiveTab] = useState("overview");
|
||||||
|
const [sortField, setSortField] = useState("nextRunTimestamp");
|
||||||
|
const [sortDirection, setSortDirection] = useState("asc");
|
||||||
|
|
||||||
|
// Fetch automation overview data
|
||||||
|
const { data: overview, isLoading: overviewLoading } = useQuery({
|
||||||
|
queryKey: ["automation-overview"],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get("/automation/overview");
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000, // Refresh every 30 seconds
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch queue statistics
|
||||||
|
useQuery({
|
||||||
|
queryKey: ["automation-stats"],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get("/automation/stats");
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch recent jobs
|
||||||
|
useQuery({
|
||||||
|
queryKey: ["automation-jobs"],
|
||||||
|
queryFn: async () => {
|
||||||
|
const jobs = await Promise.all([
|
||||||
|
api
|
||||||
|
.get("/automation/jobs/github-update-check?limit=5")
|
||||||
|
.then((r) => r.data.data || []),
|
||||||
|
api
|
||||||
|
.get("/automation/jobs/session-cleanup?limit=5")
|
||||||
|
.then((r) => r.data.data || []),
|
||||||
|
]);
|
||||||
|
return {
|
||||||
|
githubUpdate: jobs[0],
|
||||||
|
sessionCleanup: jobs[1],
|
||||||
|
};
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const _getStatusIcon = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return <CheckCircle className="h-4 w-4 text-green-500" />;
|
||||||
|
case "failed":
|
||||||
|
return <XCircle className="h-4 w-4 text-red-500" />;
|
||||||
|
case "active":
|
||||||
|
return <Activity className="h-4 w-4 text-blue-500 animate-pulse" />;
|
||||||
|
default:
|
||||||
|
return <Clock className="h-4 w-4 text-gray-500" />;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const _getStatusColor = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return "bg-green-100 text-green-800";
|
||||||
|
case "failed":
|
||||||
|
return "bg-red-100 text-red-800";
|
||||||
|
case "active":
|
||||||
|
return "bg-blue-100 text-blue-800";
|
||||||
|
default:
|
||||||
|
return "bg-gray-100 text-gray-800";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const _formatDate = (dateString) => {
|
||||||
|
if (!dateString) return "N/A";
|
||||||
|
return new Date(dateString).toLocaleString();
|
||||||
|
};
|
||||||
|
|
||||||
|
const _formatDuration = (ms) => {
|
||||||
|
if (!ms) return "N/A";
|
||||||
|
return `${ms}ms`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStatusBadge = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "Success":
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-green-100 text-green-800">
|
||||||
|
Success
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
case "Failed":
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-red-100 text-red-800">
|
||||||
|
Failed
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
case "Never run":
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
|
||||||
|
Never run
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
default:
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
|
||||||
|
{status}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getNextRunTime = (schedule, _lastRun) => {
|
||||||
|
if (schedule === "Manual only") return "Manual trigger only";
|
||||||
|
if (schedule.includes("Agent-driven")) return "Agent-driven (automatic)";
|
||||||
|
if (schedule === "Daily at midnight") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(0, 0, 0, 0);
|
||||||
|
return tomorrow.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 2 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(2, 0, 0, 0);
|
||||||
|
return tomorrow.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 3 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(3, 0, 0, 0);
|
||||||
|
return tomorrow.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (schedule === "Every hour") {
|
||||||
|
const now = new Date();
|
||||||
|
const nextHour = new Date(now);
|
||||||
|
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
|
||||||
|
return nextHour.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return "Unknown";
|
||||||
|
};
|
||||||
|
|
||||||
|
const getNextRunTimestamp = (schedule) => {
|
||||||
|
if (schedule === "Manual only") return Number.MAX_SAFE_INTEGER; // Manual tasks go to bottom
|
||||||
|
if (schedule.includes("Agent-driven")) return Number.MAX_SAFE_INTEGER - 1; // Agent-driven tasks near bottom but above manual
|
||||||
|
if (schedule === "Daily at midnight") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(0, 0, 0, 0);
|
||||||
|
return tomorrow.getTime();
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 2 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(2, 0, 0, 0);
|
||||||
|
return tomorrow.getTime();
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 3 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(3, 0, 0, 0);
|
||||||
|
return tomorrow.getTime();
|
||||||
|
}
|
||||||
|
if (schedule === "Every hour") {
|
||||||
|
const now = new Date();
|
||||||
|
const nextHour = new Date(now);
|
||||||
|
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
|
||||||
|
return nextHour.getTime();
|
||||||
|
}
|
||||||
|
return Number.MAX_SAFE_INTEGER; // Unknown schedules go to bottom
|
||||||
|
};
|
||||||
|
|
||||||
|
const openBullBoard = () => {
|
||||||
|
const token = localStorage.getItem("token");
|
||||||
|
if (!token) {
|
||||||
|
alert("Please log in to access the Queue Monitor");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the proxied URL through the frontend (port 3000)
|
||||||
|
// This avoids CORS issues as everything goes through the same origin
|
||||||
|
const url = `/admin/queues?token=${encodeURIComponent(token)}`;
|
||||||
|
window.open(url, "_blank", "width=1200,height=800");
|
||||||
|
};
|
||||||
|
|
||||||
|
const triggerManualJob = async (jobType, data = {}) => {
|
||||||
|
try {
|
||||||
|
let endpoint;
|
||||||
|
|
||||||
|
if (jobType === "github") {
|
||||||
|
endpoint = "/automation/trigger/github-update";
|
||||||
|
} else if (jobType === "sessions") {
|
||||||
|
endpoint = "/automation/trigger/session-cleanup";
|
||||||
|
} else if (jobType === "orphaned-repos") {
|
||||||
|
endpoint = "/automation/trigger/orphaned-repo-cleanup";
|
||||||
|
} else if (jobType === "orphaned-packages") {
|
||||||
|
endpoint = "/automation/trigger/orphaned-package-cleanup";
|
||||||
|
} else if (jobType === "agent-collection") {
|
||||||
|
endpoint = "/automation/trigger/agent-collection";
|
||||||
|
}
|
||||||
|
|
||||||
|
const _response = await api.post(endpoint, data);
|
||||||
|
|
||||||
|
// Refresh data
|
||||||
|
window.location.reload();
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering job:", error);
|
||||||
|
alert(
|
||||||
|
"Failed to trigger job: " +
|
||||||
|
(error.response?.data?.error || error.message),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSort = (field) => {
|
||||||
|
if (sortField === field) {
|
||||||
|
setSortDirection(sortDirection === "asc" ? "desc" : "asc");
|
||||||
|
} else {
|
||||||
|
setSortField(field);
|
||||||
|
setSortDirection("asc");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getSortIcon = (field) => {
|
||||||
|
if (sortField !== field) return <ArrowUpDown className="h-4 w-4" />;
|
||||||
|
return sortDirection === "asc" ? (
|
||||||
|
<ArrowUp className="h-4 w-4" />
|
||||||
|
) : (
|
||||||
|
<ArrowDown className="h-4 w-4" />
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Sort automations based on current sort settings
|
||||||
|
const sortedAutomations = overview?.automations
|
||||||
|
? [...overview.automations].sort((a, b) => {
|
||||||
|
let aValue, bValue;
|
||||||
|
|
||||||
|
switch (sortField) {
|
||||||
|
case "name":
|
||||||
|
aValue = a.name.toLowerCase();
|
||||||
|
bValue = b.name.toLowerCase();
|
||||||
|
break;
|
||||||
|
case "schedule":
|
||||||
|
aValue = a.schedule.toLowerCase();
|
||||||
|
bValue = b.schedule.toLowerCase();
|
||||||
|
break;
|
||||||
|
case "lastRun":
|
||||||
|
// Convert "Never" to empty string for proper sorting
|
||||||
|
aValue = a.lastRun === "Never" ? "" : a.lastRun;
|
||||||
|
bValue = b.lastRun === "Never" ? "" : b.lastRun;
|
||||||
|
break;
|
||||||
|
case "lastRunTimestamp":
|
||||||
|
aValue = a.lastRunTimestamp || 0;
|
||||||
|
bValue = b.lastRunTimestamp || 0;
|
||||||
|
break;
|
||||||
|
case "nextRunTimestamp":
|
||||||
|
aValue = getNextRunTimestamp(a.schedule);
|
||||||
|
bValue = getNextRunTimestamp(b.schedule);
|
||||||
|
break;
|
||||||
|
case "status":
|
||||||
|
aValue = a.status.toLowerCase();
|
||||||
|
bValue = b.status.toLowerCase();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
aValue = a[sortField];
|
||||||
|
bValue = b[sortField];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aValue < bValue) return sortDirection === "asc" ? -1 : 1;
|
||||||
|
if (aValue > bValue) return sortDirection === "asc" ? 1 : -1;
|
||||||
|
return 0;
|
||||||
|
})
|
||||||
|
: [];
|
||||||
|
|
||||||
|
const tabs = [{ id: "overview", name: "Overview", icon: Settings }];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Page Header */}
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
Automation Management
|
||||||
|
</h1>
|
||||||
|
<p className="text-sm text-secondary-600 dark:text-secondary-400 mt-1">
|
||||||
|
Monitor and manage automated server operations, agent
|
||||||
|
communications, and patch deployments
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={openBullBoard}
|
||||||
|
className="btn-outline flex items-center gap-2"
|
||||||
|
title="Open Bull Board Queue Monitor"
|
||||||
|
>
|
||||||
|
<svg
|
||||||
|
className="h-4 w-4"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
viewBox="0 0 36 36"
|
||||||
|
role="img"
|
||||||
|
aria-label="Bull Board"
|
||||||
|
>
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="18" />
|
||||||
|
<circle fill="#FFF" cx="18" cy="18" r="13.5" />
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="10" />
|
||||||
|
<circle fill="#FFF" cx="18" cy="18" r="6" />
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="3" />
|
||||||
|
<path
|
||||||
|
opacity=".2"
|
||||||
|
d="M18.24 18.282l13.144 11.754s-2.647 3.376-7.89 5.109L17.579 18.42l.661-.138z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#FFAC33"
|
||||||
|
d="M18.294 19a.994.994 0 01-.704-1.699l.563-.563a.995.995 0 011.408 1.407l-.564.563a.987.987 0 01-.703.292z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#55ACEE"
|
||||||
|
d="M24.016 6.981c-.403 2.079 0 4.691 0 4.691l7.054-7.388c.291-1.454-.528-3.932-1.718-4.238-1.19-.306-4.079.803-5.336 6.935zm5.003 5.003c-2.079.403-4.691 0-4.691 0l7.388-7.054c1.454-.291 3.932.528 4.238 1.718.306 1.19-.803 4.079-6.935 5.336z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#3A87C2"
|
||||||
|
d="M32.798 4.485L21.176 17.587c-.362.362-1.673.882-2.51.046-.836-.836-.419-2.08-.057-2.443L31.815 3.501s.676-.635 1.159-.152-.176 1.136-.176 1.136z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
Queue Monitor
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Stats Cards */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||||
|
{/* Scheduled Tasks Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Clock className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Scheduled Tasks
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.scheduledTasks || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Running Tasks Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Play className="h-5 w-5 text-success-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Running Tasks
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.runningTasks || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Failed Tasks Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<XCircle className="h-5 w-5 text-red-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Failed Tasks
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.failedTasks || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Total Task Runs Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Zap className="h-5 w-5 text-secondary-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Total Task Runs
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.totalAutomations || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tabs */}
|
||||||
|
<div className="mb-6">
|
||||||
|
<div className="border-b border-gray-200 dark:border-gray-700">
|
||||||
|
<nav className="-mb-px flex space-x-8">
|
||||||
|
{tabs.map((tab) => (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
key={tab.id}
|
||||||
|
onClick={() => setActiveTab(tab.id)}
|
||||||
|
className={`py-2 px-1 border-b-2 font-medium text-sm flex items-center gap-2 ${
|
||||||
|
activeTab === tab.id
|
||||||
|
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||||
|
: "border-transparent text-gray-500 hover:text-gray-700 hover:border-gray-300 dark:text-gray-400 dark:hover:text-gray-300"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<tab.icon className="h-4 w-4" />
|
||||||
|
{tab.name}
|
||||||
|
</button>
|
||||||
|
))}
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tab Content */}
|
||||||
|
{activeTab === "overview" && (
|
||||||
|
<div className="card p-6">
|
||||||
|
{overviewLoading ? (
|
||||||
|
<div className="text-center py-8">
|
||||||
|
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-600 mx-auto"></div>
|
||||||
|
<p className="mt-2 text-sm text-secondary-500">
|
||||||
|
Loading automations...
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-700">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Run
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("name")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Task
|
||||||
|
{getSortIcon("name")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("schedule")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Frequency
|
||||||
|
{getSortIcon("schedule")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("lastRunTimestamp")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Last Run
|
||||||
|
{getSortIcon("lastRunTimestamp")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("nextRunTimestamp")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Next Run
|
||||||
|
{getSortIcon("nextRunTimestamp")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("status")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Status
|
||||||
|
{getSortIcon("status")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
{sortedAutomations.map((automation) => (
|
||||||
|
<tr
|
||||||
|
key={automation.queue}
|
||||||
|
className="hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||||
|
>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
{automation.schedule !== "Manual only" ? (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
if (automation.queue.includes("github")) {
|
||||||
|
triggerManualJob("github");
|
||||||
|
} else if (automation.queue.includes("session")) {
|
||||||
|
triggerManualJob("sessions");
|
||||||
|
} else if (
|
||||||
|
automation.queue.includes("orphaned-repo")
|
||||||
|
) {
|
||||||
|
triggerManualJob("orphaned-repos");
|
||||||
|
} else if (
|
||||||
|
automation.queue.includes("orphaned-package")
|
||||||
|
) {
|
||||||
|
triggerManualJob("orphaned-packages");
|
||||||
|
} else if (
|
||||||
|
automation.queue.includes("agent-commands")
|
||||||
|
) {
|
||||||
|
triggerManualJob("agent-collection");
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200"
|
||||||
|
title="Run Now"
|
||||||
|
>
|
||||||
|
<Play className="h-3 w-3" />
|
||||||
|
</button>
|
||||||
|
) : (
|
||||||
|
<span className="text-gray-400 text-xs">Manual</span>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
<div>
|
||||||
|
<div className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{automation.name}
|
||||||
|
</div>
|
||||||
|
<div className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
|
{automation.description}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
|
||||||
|
{automation.schedule}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
|
||||||
|
{automation.lastRun}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
|
||||||
|
{getNextRunTime(
|
||||||
|
automation.schedule,
|
||||||
|
automation.lastRun,
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
{getStatusBadge(automation.status)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Automation;
|
||||||
@@ -200,6 +200,8 @@ const Dashboard = () => {
|
|||||||
data: packageTrendsData,
|
data: packageTrendsData,
|
||||||
isLoading: packageTrendsLoading,
|
isLoading: packageTrendsLoading,
|
||||||
error: _packageTrendsError,
|
error: _packageTrendsError,
|
||||||
|
refetch: refetchPackageTrends,
|
||||||
|
isFetching: packageTrendsFetching,
|
||||||
} = useQuery({
|
} = useQuery({
|
||||||
queryKey: ["packageTrends", packageTrendsPeriod, packageTrendsHost],
|
queryKey: ["packageTrends", packageTrendsPeriod, packageTrendsHost],
|
||||||
queryFn: () => {
|
queryFn: () => {
|
||||||
@@ -771,6 +773,20 @@ const Dashboard = () => {
|
|||||||
Package Trends Over Time
|
Package Trends Over Time
|
||||||
</h3>
|
</h3>
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex items-center gap-3">
|
||||||
|
{/* Refresh Button */}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => refetchPackageTrends()}
|
||||||
|
disabled={packageTrendsFetching}
|
||||||
|
className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white hover:bg-secondary-50 dark:hover:bg-secondary-700 focus:ring-2 focus:ring-primary-500 focus:border-primary-500 disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2"
|
||||||
|
title="Refresh data"
|
||||||
|
>
|
||||||
|
<RefreshCw
|
||||||
|
className={`h-4 w-4 ${packageTrendsFetching ? "animate-spin" : ""}`}
|
||||||
|
/>
|
||||||
|
Refresh
|
||||||
|
</button>
|
||||||
|
|
||||||
{/* Period Selector */}
|
{/* Period Selector */}
|
||||||
<select
|
<select
|
||||||
value={packageTrendsPeriod}
|
value={packageTrendsPeriod}
|
||||||
@@ -1161,7 +1177,7 @@ const Dashboard = () => {
|
|||||||
try {
|
try {
|
||||||
const date = new Date(`${label}:00:00`);
|
const date = new Date(`${label}:00:00`);
|
||||||
// Check if date is valid
|
// Check if date is valid
|
||||||
if (isNaN(date.getTime())) {
|
if (Number.isNaN(date.getTime())) {
|
||||||
return label; // Return original label if date is invalid
|
return label; // Return original label if date is invalid
|
||||||
}
|
}
|
||||||
return date.toLocaleDateString("en-US", {
|
return date.toLocaleDateString("en-US", {
|
||||||
@@ -1171,7 +1187,7 @@ const Dashboard = () => {
|
|||||||
minute: "2-digit",
|
minute: "2-digit",
|
||||||
hour12: true,
|
hour12: true,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
return label; // Return original label if parsing fails
|
return label; // Return original label if parsing fails
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1180,17 +1196,24 @@ const Dashboard = () => {
|
|||||||
try {
|
try {
|
||||||
const date = new Date(label);
|
const date = new Date(label);
|
||||||
// Check if date is valid
|
// Check if date is valid
|
||||||
if (isNaN(date.getTime())) {
|
if (Number.isNaN(date.getTime())) {
|
||||||
return label; // Return original label if date is invalid
|
return label; // Return original label if date is invalid
|
||||||
}
|
}
|
||||||
return date.toLocaleDateString("en-US", {
|
return date.toLocaleDateString("en-US", {
|
||||||
month: "short",
|
month: "short",
|
||||||
day: "numeric",
|
day: "numeric",
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
return label; // Return original label if parsing fails
|
return label; // Return original label if parsing fails
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
label: (context) => {
|
||||||
|
const value = context.parsed.y;
|
||||||
|
if (value === null || value === undefined) {
|
||||||
|
return `${context.dataset.label}: No data`;
|
||||||
|
}
|
||||||
|
return `${context.dataset.label}: ${value}`;
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -1222,7 +1245,7 @@ const Dashboard = () => {
|
|||||||
const hourNum = parseInt(hour, 10);
|
const hourNum = parseInt(hour, 10);
|
||||||
|
|
||||||
// Validate hour number
|
// Validate hour number
|
||||||
if (isNaN(hourNum) || hourNum < 0 || hourNum > 23) {
|
if (Number.isNaN(hourNum) || hourNum < 0 || hourNum > 23) {
|
||||||
return hour; // Return original hour if invalid
|
return hour; // Return original hour if invalid
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1233,7 +1256,7 @@ const Dashboard = () => {
|
|||||||
: hourNum === 12
|
: hourNum === 12
|
||||||
? "12 PM"
|
? "12 PM"
|
||||||
: `${hourNum - 12} PM`;
|
: `${hourNum - 12} PM`;
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
return label; // Return original label if parsing fails
|
return label; // Return original label if parsing fails
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1242,14 +1265,14 @@ const Dashboard = () => {
|
|||||||
try {
|
try {
|
||||||
const date = new Date(label);
|
const date = new Date(label);
|
||||||
// Check if date is valid
|
// Check if date is valid
|
||||||
if (isNaN(date.getTime())) {
|
if (Number.isNaN(date.getTime())) {
|
||||||
return label; // Return original label if date is invalid
|
return label; // Return original label if date is invalid
|
||||||
}
|
}
|
||||||
return date.toLocaleDateString("en-US", {
|
return date.toLocaleDateString("en-US", {
|
||||||
month: "short",
|
month: "short",
|
||||||
day: "numeric",
|
day: "numeric",
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
return label; // Return original label if parsing fails
|
return label; // Return original label if parsing fails
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1411,7 +1434,6 @@ const Dashboard = () => {
|
|||||||
title="Customize dashboard layout"
|
title="Customize dashboard layout"
|
||||||
>
|
>
|
||||||
<Settings className="h-4 w-4" />
|
<Settings className="h-4 w-4" />
|
||||||
Customize Dashboard
|
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -1423,7 +1445,6 @@ const Dashboard = () => {
|
|||||||
<RefreshCw
|
<RefreshCw
|
||||||
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
||||||
/>
|
/>
|
||||||
{isFetching ? "Refreshing..." : "Refresh"}
|
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
1003
frontend/src/pages/Docker.jsx
Normal file
1003
frontend/src/pages/Docker.jsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,14 @@
|
|||||||
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
|
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
|
||||||
import {
|
import {
|
||||||
Activity,
|
Activity,
|
||||||
|
AlertCircle,
|
||||||
AlertTriangle,
|
AlertTriangle,
|
||||||
ArrowLeft,
|
ArrowLeft,
|
||||||
Calendar,
|
Calendar,
|
||||||
CheckCircle,
|
CheckCircle,
|
||||||
|
CheckCircle2,
|
||||||
Clock,
|
Clock,
|
||||||
|
Clock3,
|
||||||
Copy,
|
Copy,
|
||||||
Cpu,
|
Cpu,
|
||||||
Database,
|
Database,
|
||||||
@@ -27,11 +30,13 @@ import {
|
|||||||
import { useEffect, useId, useState } from "react";
|
import { useEffect, useId, useState } from "react";
|
||||||
import { Link, useNavigate, useParams } from "react-router-dom";
|
import { Link, useNavigate, useParams } from "react-router-dom";
|
||||||
import InlineEdit from "../components/InlineEdit";
|
import InlineEdit from "../components/InlineEdit";
|
||||||
|
import InlineMultiGroupEdit from "../components/InlineMultiGroupEdit";
|
||||||
import {
|
import {
|
||||||
adminHostsAPI,
|
adminHostsAPI,
|
||||||
dashboardAPI,
|
dashboardAPI,
|
||||||
formatDate,
|
formatDate,
|
||||||
formatRelativeTime,
|
formatRelativeTime,
|
||||||
|
hostGroupsAPI,
|
||||||
repositoryAPI,
|
repositoryAPI,
|
||||||
settingsAPI,
|
settingsAPI,
|
||||||
} from "../utils/api";
|
} from "../utils/api";
|
||||||
@@ -46,6 +51,7 @@ const HostDetail = () => {
|
|||||||
const [activeTab, setActiveTab] = useState("host");
|
const [activeTab, setActiveTab] = useState("host");
|
||||||
const [historyPage, setHistoryPage] = useState(0);
|
const [historyPage, setHistoryPage] = useState(0);
|
||||||
const [historyLimit] = useState(10);
|
const [historyLimit] = useState(10);
|
||||||
|
const [notes, setNotes] = useState("");
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: host,
|
data: host,
|
||||||
@@ -66,6 +72,64 @@ const HostDetail = () => {
|
|||||||
refetchOnWindowFocus: false, // Don't refetch when window regains focus
|
refetchOnWindowFocus: false, // Don't refetch when window regains focus
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// WebSocket connection status using Server-Sent Events (SSE) for real-time push updates
|
||||||
|
const [wsStatus, setWsStatus] = useState(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!host?.api_id) return;
|
||||||
|
|
||||||
|
const token = localStorage.getItem("token");
|
||||||
|
if (!token) return;
|
||||||
|
|
||||||
|
let eventSource = null;
|
||||||
|
let reconnectTimeout = null;
|
||||||
|
let isMounted = true;
|
||||||
|
|
||||||
|
const connect = () => {
|
||||||
|
if (!isMounted) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create EventSource for SSE connection
|
||||||
|
eventSource = new EventSource(
|
||||||
|
`/api/v1/ws/status/${host.api_id}/stream?token=${encodeURIComponent(token)}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
eventSource.onmessage = (event) => {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(event.data);
|
||||||
|
setWsStatus(data);
|
||||||
|
} catch (_err) {
|
||||||
|
// Silently handle parse errors
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
eventSource.onerror = (_error) => {
|
||||||
|
console.log(`[SSE] Connection error for ${host.api_id}, retrying...`);
|
||||||
|
eventSource?.close();
|
||||||
|
|
||||||
|
// Automatic reconnection after 5 seconds
|
||||||
|
if (isMounted) {
|
||||||
|
reconnectTimeout = setTimeout(connect, 5000);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} catch (_err) {
|
||||||
|
// Silently handle connection errors
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initial connection
|
||||||
|
connect();
|
||||||
|
|
||||||
|
// Cleanup on unmount or when api_id changes
|
||||||
|
return () => {
|
||||||
|
isMounted = false;
|
||||||
|
if (reconnectTimeout) clearTimeout(reconnectTimeout);
|
||||||
|
if (eventSource) {
|
||||||
|
eventSource.close();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [host?.api_id]);
|
||||||
|
|
||||||
// Fetch repository count for this host
|
// Fetch repository count for this host
|
||||||
const { data: repositories, isLoading: isLoadingRepos } = useQuery({
|
const { data: repositories, isLoading: isLoadingRepos } = useQuery({
|
||||||
queryKey: ["host-repositories", hostId],
|
queryKey: ["host-repositories", hostId],
|
||||||
@@ -75,6 +139,14 @@ const HostDetail = () => {
|
|||||||
enabled: !!hostId,
|
enabled: !!hostId,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Fetch host groups for multi-select
|
||||||
|
const { data: hostGroups } = useQuery({
|
||||||
|
queryKey: ["host-groups"],
|
||||||
|
queryFn: () => hostGroupsAPI.list().then((res) => res.data),
|
||||||
|
staleTime: 5 * 60 * 1000, // 5 minutes - data stays fresh longer
|
||||||
|
refetchOnWindowFocus: false, // Don't refetch when window regains focus
|
||||||
|
});
|
||||||
|
|
||||||
// Tab change handler
|
// Tab change handler
|
||||||
const handleTabChange = (tabName) => {
|
const handleTabChange = (tabName) => {
|
||||||
setActiveTab(tabName);
|
setActiveTab(tabName);
|
||||||
@@ -87,6 +159,13 @@ const HostDetail = () => {
|
|||||||
}
|
}
|
||||||
}, [host]);
|
}, [host]);
|
||||||
|
|
||||||
|
// Sync notes state with host data
|
||||||
|
useEffect(() => {
|
||||||
|
if (host) {
|
||||||
|
setNotes(host.notes || "");
|
||||||
|
}
|
||||||
|
}, [host]);
|
||||||
|
|
||||||
const deleteHostMutation = useMutation({
|
const deleteHostMutation = useMutation({
|
||||||
mutationFn: (hostId) => adminHostsAPI.delete(hostId),
|
mutationFn: (hostId) => adminHostsAPI.delete(hostId),
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
@@ -118,6 +197,15 @@ const HostDetail = () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const updateHostGroupsMutation = useMutation({
|
||||||
|
mutationFn: ({ hostId, groupIds }) =>
|
||||||
|
adminHostsAPI.updateGroups(hostId, groupIds).then((res) => res.data),
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries(["host", hostId]);
|
||||||
|
queryClient.invalidateQueries(["hosts"]);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const updateNotesMutation = useMutation({
|
const updateNotesMutation = useMutation({
|
||||||
mutationFn: ({ hostId, notes }) =>
|
mutationFn: ({ hostId, notes }) =>
|
||||||
adminHostsAPI.updateNotes(hostId, notes).then((res) => res.data),
|
adminHostsAPI.updateNotes(hostId, notes).then((res) => res.data),
|
||||||
@@ -238,29 +326,40 @@ const HostDetail = () => {
|
|||||||
return (
|
return (
|
||||||
<div className="h-screen flex flex-col">
|
<div className="h-screen flex flex-col">
|
||||||
{/* Header */}
|
{/* Header */}
|
||||||
<div className="flex items-center justify-between mb-4 pb-4 border-b border-secondary-200 dark:border-secondary-600">
|
<div className="flex items-start justify-between mb-4 pb-4 border-b border-secondary-200 dark:border-secondary-600">
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex items-start gap-3">
|
||||||
<Link
|
<Link
|
||||||
to="/hosts"
|
to="/hosts"
|
||||||
className="text-secondary-500 hover:text-secondary-700 dark:text-secondary-400 dark:hover:text-secondary-200"
|
className="text-secondary-500 hover:text-secondary-700 dark:text-secondary-400 dark:hover:text-secondary-200 mt-1"
|
||||||
>
|
>
|
||||||
<ArrowLeft className="h-5 w-5" />
|
<ArrowLeft className="h-5 w-5" />
|
||||||
</Link>
|
</Link>
|
||||||
<h1 className="text-xl font-semibold text-secondary-900 dark:text-white">
|
<div className="flex flex-col gap-2">
|
||||||
|
{/* Title row with friendly name, badge, and status */}
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<h1 className="text-2xl font-semibold text-secondary-900 dark:text-white">
|
||||||
{host.friendly_name}
|
{host.friendly_name}
|
||||||
</h1>
|
</h1>
|
||||||
{host.system_uptime && (
|
{wsStatus && (
|
||||||
<div className="flex items-center gap-1 text-sm text-secondary-600 dark:text-secondary-400">
|
<span
|
||||||
<Clock className="h-4 w-4" />
|
className={`inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold uppercase ${
|
||||||
<span className="text-xs font-medium">Uptime:</span>
|
wsStatus.connected
|
||||||
<span>{host.system_uptime}</span>
|
? "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200 animate-pulse"
|
||||||
</div>
|
: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200"
|
||||||
|
}`}
|
||||||
|
title={
|
||||||
|
wsStatus.connected
|
||||||
|
? `Agent connected via ${wsStatus.secure ? "WSS (secure)" : "WS"}`
|
||||||
|
: "Agent not connected"
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{wsStatus.connected
|
||||||
|
? wsStatus.secure
|
||||||
|
? "WSS"
|
||||||
|
: "WS"
|
||||||
|
: "Offline"}
|
||||||
|
</span>
|
||||||
)}
|
)}
|
||||||
<div className="flex items-center gap-1 text-sm text-secondary-600 dark:text-secondary-400">
|
|
||||||
<Clock className="h-4 w-4" />
|
|
||||||
<span className="text-xs font-medium">Last updated:</span>
|
|
||||||
<span>{formatRelativeTime(host.last_update)}</span>
|
|
||||||
</div>
|
|
||||||
<div
|
<div
|
||||||
className={`flex items-center gap-2 px-2 py-1 rounded-full text-xs font-medium ${getStatusColor(isStale, host.stats.outdated_packages > 0)}`}
|
className={`flex items-center gap-2 px-2 py-1 rounded-full text-xs font-medium ${getStatusColor(isStale, host.stats.outdated_packages > 0)}`}
|
||||||
>
|
>
|
||||||
@@ -268,19 +367,26 @@ const HostDetail = () => {
|
|||||||
{getStatusText(isStale, host.stats.outdated_packages > 0)}
|
{getStatusText(isStale, host.stats.outdated_packages > 0)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
{/* Info row with uptime and last updated */}
|
||||||
|
<div className="flex items-center gap-4 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
{host.system_uptime && (
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<Clock className="h-3.5 w-3.5" />
|
||||||
|
<span className="text-xs font-medium">Uptime:</span>
|
||||||
|
<span className="text-xs">{host.system_uptime}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<Clock className="h-3.5 w-3.5" />
|
||||||
|
<span className="text-xs font-medium">Last updated:</span>
|
||||||
|
<span className="text-xs">
|
||||||
|
{formatRelativeTime(host.last_update)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => refetch()}
|
|
||||||
disabled={isFetching}
|
|
||||||
className="btn-outline flex items-center gap-2 text-sm"
|
|
||||||
title="Refresh host data"
|
|
||||||
>
|
|
||||||
<RefreshCw
|
|
||||||
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
|
||||||
/>
|
|
||||||
{isFetching ? "Refreshing..." : "Refresh"}
|
|
||||||
</button>
|
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => setShowCredentialsModal(true)}
|
onClick={() => setShowCredentialsModal(true)}
|
||||||
@@ -289,13 +395,24 @@ const HostDetail = () => {
|
|||||||
<Key className="h-4 w-4" />
|
<Key className="h-4 w-4" />
|
||||||
Deploy Agent
|
Deploy Agent
|
||||||
</button>
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => refetch()}
|
||||||
|
disabled={isFetching}
|
||||||
|
className="btn-outline flex items-center justify-center p-2 text-sm"
|
||||||
|
title="Refresh host data"
|
||||||
|
>
|
||||||
|
<RefreshCw
|
||||||
|
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
||||||
|
/>
|
||||||
|
</button>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => setShowDeleteModal(true)}
|
onClick={() => setShowDeleteModal(true)}
|
||||||
className="btn-danger flex items-center gap-2 text-sm"
|
className="btn-danger flex items-center justify-center p-2 text-sm"
|
||||||
|
title="Delete host"
|
||||||
>
|
>
|
||||||
<Trash2 className="h-4 w-4" />
|
<Trash2 className="h-4 w-4" />
|
||||||
Delete
|
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -426,7 +543,18 @@ const HostDetail = () => {
|
|||||||
: "text-secondary-500 dark:text-secondary-400 hover:text-secondary-700 dark:hover:text-secondary-300"
|
: "text-secondary-500 dark:text-secondary-400 hover:text-secondary-700 dark:hover:text-secondary-300"
|
||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
Agent History
|
Package Reports
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => handleTabChange("queue")}
|
||||||
|
className={`px-4 py-2 text-sm font-medium ${
|
||||||
|
activeTab === "queue"
|
||||||
|
? "text-primary-600 dark:text-primary-400 border-b-2 border-primary-500"
|
||||||
|
: "text-secondary-500 dark:text-secondary-400 hover:text-secondary-700 dark:hover:text-secondary-300"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
Agent Queue
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -493,20 +621,30 @@ const HostDetail = () => {
|
|||||||
|
|
||||||
<div>
|
<div>
|
||||||
<p className="text-xs text-secondary-500 dark:text-secondary-300 mb-1.5">
|
<p className="text-xs text-secondary-500 dark:text-secondary-300 mb-1.5">
|
||||||
Host Group
|
Host Groups
|
||||||
</p>
|
</p>
|
||||||
{host.host_groups ? (
|
{/* Extract group IDs from the new many-to-many structure */}
|
||||||
<span
|
{(() => {
|
||||||
className="inline-flex items-center px-2 py-0.5 rounded-full text-xs font-medium text-white"
|
const groupIds =
|
||||||
style={{ backgroundColor: host.host_groups.color }}
|
host.host_group_memberships?.map(
|
||||||
>
|
(membership) => membership.host_groups.id,
|
||||||
{host.host_groups.name}
|
) || [];
|
||||||
</span>
|
return (
|
||||||
) : (
|
<InlineMultiGroupEdit
|
||||||
<span className="inline-flex items-center px-2 py-0.5 rounded-full text-xs font-medium bg-secondary-100 dark:bg-secondary-700 text-secondary-800 dark:text-secondary-200">
|
key={`${host.id}-${groupIds.join(",")}`}
|
||||||
Ungrouped
|
value={groupIds}
|
||||||
</span>
|
onSave={(newGroupIds) =>
|
||||||
)}
|
updateHostGroupsMutation.mutate({
|
||||||
|
hostId: host.id,
|
||||||
|
groupIds: newGroupIds,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
options={hostGroups || []}
|
||||||
|
placeholder="Select groups..."
|
||||||
|
className="w-full"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})()}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
@@ -1097,12 +1235,8 @@ const HostDetail = () => {
|
|||||||
</div>
|
</div>
|
||||||
<div className="bg-secondary-50 dark:bg-secondary-700 rounded-lg p-4">
|
<div className="bg-secondary-50 dark:bg-secondary-700 rounded-lg p-4">
|
||||||
<textarea
|
<textarea
|
||||||
value={host.notes || ""}
|
value={notes}
|
||||||
onChange={(e) => {
|
onChange={(e) => setNotes(e.target.value)}
|
||||||
// Update local state immediately for better UX
|
|
||||||
const updatedHost = { ...host, notes: e.target.value };
|
|
||||||
queryClient.setQueryData(["host", hostId], updatedHost);
|
|
||||||
}}
|
|
||||||
placeholder="Add notes about this host... (e.g., purpose, special configurations, maintenance notes)"
|
placeholder="Add notes about this host... (e.g., purpose, special configurations, maintenance notes)"
|
||||||
className="w-full h-32 p-3 border border-secondary-200 dark:border-secondary-600 rounded-lg bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white placeholder-secondary-500 dark:placeholder-secondary-400 focus:ring-2 focus:ring-primary-500 focus:border-primary-500 resize-none"
|
className="w-full h-32 p-3 border border-secondary-200 dark:border-secondary-600 rounded-lg bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white placeholder-secondary-500 dark:placeholder-secondary-400 focus:ring-2 focus:ring-primary-500 focus:border-primary-500 resize-none"
|
||||||
maxLength={1000}
|
maxLength={1000}
|
||||||
@@ -1114,14 +1248,14 @@ const HostDetail = () => {
|
|||||||
</p>
|
</p>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<span className="text-xs text-secondary-400 dark:text-secondary-500">
|
<span className="text-xs text-secondary-400 dark:text-secondary-500">
|
||||||
{(host.notes || "").length}/1000
|
{notes.length}/1000
|
||||||
</span>
|
</span>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
updateNotesMutation.mutate({
|
updateNotesMutation.mutate({
|
||||||
hostId: host.id,
|
hostId: host.id,
|
||||||
notes: host.notes || "",
|
notes: notes,
|
||||||
});
|
});
|
||||||
}}
|
}}
|
||||||
disabled={updateNotesMutation.isPending}
|
disabled={updateNotesMutation.isPending}
|
||||||
@@ -1136,6 +1270,9 @@ const HostDetail = () => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Agent Queue */}
|
||||||
|
{activeTab === "queue" && <AgentQueueTab hostId={hostId} />}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -1168,8 +1305,10 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
const [showApiKey, setShowApiKey] = useState(false);
|
const [showApiKey, setShowApiKey] = useState(false);
|
||||||
const [activeTab, setActiveTab] = useState("quick-install");
|
const [activeTab, setActiveTab] = useState("quick-install");
|
||||||
const [forceInstall, setForceInstall] = useState(false);
|
const [forceInstall, setForceInstall] = useState(false);
|
||||||
|
const [architecture, setArchitecture] = useState("amd64");
|
||||||
const apiIdInputId = useId();
|
const apiIdInputId = useId();
|
||||||
const apiKeyInputId = useId();
|
const apiKeyInputId = useId();
|
||||||
|
const architectureSelectId = useId();
|
||||||
|
|
||||||
const { data: serverUrlData } = useQuery({
|
const { data: serverUrlData } = useQuery({
|
||||||
queryKey: ["serverUrl"],
|
queryKey: ["serverUrl"],
|
||||||
@@ -1189,10 +1328,13 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
||||||
};
|
};
|
||||||
|
|
||||||
// Helper function to build installation URL with optional force flag
|
// Helper function to build installation URL with optional force flag and architecture
|
||||||
const getInstallUrl = () => {
|
const getInstallUrl = () => {
|
||||||
const baseUrl = `${serverUrl}/api/v1/hosts/install`;
|
const baseUrl = `${serverUrl}/api/v1/hosts/install`;
|
||||||
return forceInstall ? `${baseUrl}?force=true` : baseUrl;
|
const params = new URLSearchParams();
|
||||||
|
if (forceInstall) params.append("force", "true");
|
||||||
|
params.append("arch", architecture);
|
||||||
|
return `${baseUrl}?${params.toString()}`;
|
||||||
};
|
};
|
||||||
|
|
||||||
const copyToClipboard = async (text) => {
|
const copyToClipboard = async (text) => {
|
||||||
@@ -1308,6 +1450,29 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Architecture Selection */}
|
||||||
|
<div className="mb-3">
|
||||||
|
<label
|
||||||
|
htmlFor={architectureSelectId}
|
||||||
|
className="block text-sm font-medium text-primary-800 dark:text-primary-200 mb-2"
|
||||||
|
>
|
||||||
|
Target Architecture
|
||||||
|
</label>
|
||||||
|
<select
|
||||||
|
id={architectureSelectId}
|
||||||
|
value={architecture}
|
||||||
|
onChange={(e) => setArchitecture(e.target.value)}
|
||||||
|
className="px-3 py-2 border border-primary-300 dark:border-primary-600 rounded-md bg-white dark:bg-secondary-800 text-sm text-secondary-900 dark:text-white focus:ring-primary-500 focus:border-primary-500"
|
||||||
|
>
|
||||||
|
<option value="amd64">AMD64 (x86_64) - Default</option>
|
||||||
|
<option value="386">386 (i386) - 32-bit</option>
|
||||||
|
<option value="arm64">ARM64 (aarch64) - ARM</option>
|
||||||
|
</select>
|
||||||
|
<p className="text-xs text-primary-600 dark:text-primary-400 mt-1">
|
||||||
|
Select the architecture of the target host
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
@@ -1364,12 +1529,12 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
|
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
||||||
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
2. Download and Install Agent Script
|
2. Download and Install Agent Binary
|
||||||
</h5>
|
</h5>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value={`curl ${getCurlFlags()} -o /usr/local/bin/patchmon-agent.sh ${serverUrl}/api/v1/hosts/agent/download -H "X-API-ID: ${host.api_id}" -H "X-API-KEY: ${host.api_key}" && sudo chmod +x /usr/local/bin/patchmon-agent.sh`}
|
value={`curl ${getCurlFlags()} -o /usr/local/bin/patchmon-agent ${serverUrl}/api/v1/hosts/agent/download?arch=${architecture} -H "X-API-ID: ${host.api_id}" -H "X-API-KEY: ${host.api_key}" && sudo chmod +x /usr/local/bin/patchmon-agent`}
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1377,7 +1542,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
`curl ${getCurlFlags()} -o /usr/local/bin/patchmon-agent.sh ${serverUrl}/api/v1/hosts/agent/download -H "X-API-ID: ${host.api_id}" -H "X-API-KEY: ${host.api_key}" && sudo chmod +x /usr/local/bin/patchmon-agent.sh`,
|
`curl ${getCurlFlags()} -o /usr/local/bin/patchmon-agent ${serverUrl}/api/v1/hosts/agent/download?arch=${architecture} -H "X-API-ID: ${host.api_id}" -H "X-API-KEY: ${host.api_key}" && sudo chmod +x /usr/local/bin/patchmon-agent`,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1395,7 +1560,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value={`sudo /usr/local/bin/patchmon-agent.sh configure "${host.api_id}" "${host.api_key}" "${serverUrl}"`}
|
value={`sudo /usr/local/bin/patchmon-agent config set-api "${host.api_id}" "${host.api_key}" "${serverUrl}"`}
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1403,7 +1568,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
`sudo /usr/local/bin/patchmon-agent.sh configure "${host.api_id}" "${host.api_key}" "${serverUrl}"`,
|
`sudo /usr/local/bin/patchmon-agent config set-api "${host.api_id}" "${host.api_key}" "${serverUrl}"`,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1421,7 +1586,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value="sudo /usr/local/bin/patchmon-agent.sh test"
|
value="sudo /usr/local/bin/patchmon-agent ping"
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1429,7 +1594,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
"sudo /usr/local/bin/patchmon-agent.sh test",
|
"sudo /usr/local/bin/patchmon-agent ping",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1447,7 +1612,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value="sudo /usr/local/bin/patchmon-agent.sh update"
|
value="sudo /usr/local/bin/patchmon-agent report"
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1455,7 +1620,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
"sudo /usr/local/bin/patchmon-agent.sh update",
|
"sudo /usr/local/bin/patchmon-agent report",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1468,12 +1633,33 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
|
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
||||||
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
6. Setup Crontab (Optional)
|
6. Create Systemd Service File
|
||||||
</h5>
|
</h5>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value={`(sudo crontab -l 2>/dev/null | grep -v "patchmon-agent.sh update"; echo "${new Date().getMinutes()} * * * * /usr/local/bin/patchmon-agent.sh update >/dev/null 2>&1") | sudo crontab -`}
|
value={`sudo tee /etc/systemd/system/patchmon-agent.service > /dev/null << 'EOF'
|
||||||
|
[Unit]
|
||||||
|
Description=PatchMon Agent Service
|
||||||
|
After=network.target
|
||||||
|
Wants=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=root
|
||||||
|
ExecStart=/usr/local/bin/patchmon-agent serve
|
||||||
|
Restart=always
|
||||||
|
RestartSec=10
|
||||||
|
WorkingDirectory=/etc/patchmon
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
SyslogIdentifier=patchmon-agent
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF`}
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1481,7 +1667,28 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
`(sudo crontab -l 2>/dev/null | grep -v "patchmon-agent.sh update"; echo "${new Date().getMinutes()} * * * * /usr/local/bin/patchmon-agent.sh update >/dev/null 2>&1") | sudo crontab -`,
|
`sudo tee /etc/systemd/system/patchmon-agent.service > /dev/null << 'EOF'
|
||||||
|
[Unit]
|
||||||
|
Description=PatchMon Agent Service
|
||||||
|
After=network.target
|
||||||
|
Wants=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=root
|
||||||
|
ExecStart=/usr/local/bin/patchmon-agent serve
|
||||||
|
Restart=always
|
||||||
|
RestartSec=10
|
||||||
|
WorkingDirectory=/etc/patchmon
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
SyslogIdentifier=patchmon-agent
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF`,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1491,6 +1698,64 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
||||||
|
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
|
7. Enable and Start Service
|
||||||
|
</h5>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value="sudo systemctl daemon-reload && sudo systemctl enable patchmon-agent && sudo systemctl start patchmon-agent"
|
||||||
|
readOnly
|
||||||
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
|
/>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() =>
|
||||||
|
copyToClipboard(
|
||||||
|
"sudo systemctl daemon-reload && sudo systemctl enable patchmon-agent && sudo systemctl start patchmon-agent",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
className="btn-secondary flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Copy className="h-4 w-4" />
|
||||||
|
Copy
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-secondary-600 dark:text-secondary-400 mt-2">
|
||||||
|
This will start the agent service and establish WebSocket
|
||||||
|
connection for real-time communication
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
||||||
|
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
|
8. Verify Service Status
|
||||||
|
</h5>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value="sudo systemctl status patchmon-agent"
|
||||||
|
readOnly
|
||||||
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
|
/>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() =>
|
||||||
|
copyToClipboard("sudo systemctl status patchmon-agent")
|
||||||
|
}
|
||||||
|
className="btn-secondary flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Copy className="h-4 w-4" />
|
||||||
|
Copy
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-secondary-600 dark:text-secondary-400 mt-2">
|
||||||
|
Check that the service is running and WebSocket connection
|
||||||
|
is established
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -1659,4 +1924,249 @@ const DeleteConfirmationModal = ({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Agent Queue Tab Component
|
||||||
|
const AgentQueueTab = ({ hostId }) => {
|
||||||
|
const {
|
||||||
|
data: queueData,
|
||||||
|
isLoading,
|
||||||
|
error,
|
||||||
|
refetch,
|
||||||
|
} = useQuery({
|
||||||
|
queryKey: ["host-queue", hostId],
|
||||||
|
queryFn: () => dashboardAPI.getHostQueue(hostId).then((res) => res.data),
|
||||||
|
staleTime: 30 * 1000, // 30 seconds
|
||||||
|
refetchInterval: 30 * 1000, // Auto-refresh every 30 seconds
|
||||||
|
});
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center h-32">
|
||||||
|
<RefreshCw className="h-6 w-6 animate-spin text-primary-600" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return (
|
||||||
|
<div className="text-center py-8">
|
||||||
|
<AlertCircle className="h-12 w-12 text-red-500 mx-auto mb-4" />
|
||||||
|
<p className="text-red-600 dark:text-red-400">
|
||||||
|
Failed to load queue data
|
||||||
|
</p>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => refetch()}
|
||||||
|
className="mt-2 px-4 py-2 text-sm bg-primary-600 text-white rounded-md hover:bg-primary-700"
|
||||||
|
>
|
||||||
|
Retry
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { waiting, active, delayed, failed, jobHistory } = queueData.data;
|
||||||
|
|
||||||
|
const getStatusIcon = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return <CheckCircle2 className="h-4 w-4 text-green-500" />;
|
||||||
|
case "failed":
|
||||||
|
return <AlertCircle className="h-4 w-4 text-red-500" />;
|
||||||
|
case "active":
|
||||||
|
return <Clock3 className="h-4 w-4 text-blue-500" />;
|
||||||
|
default:
|
||||||
|
return <Clock className="h-4 w-4 text-gray-500" />;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStatusColor = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return "text-green-600 dark:text-green-400";
|
||||||
|
case "failed":
|
||||||
|
return "text-red-600 dark:text-red-400";
|
||||||
|
case "active":
|
||||||
|
return "text-blue-600 dark:text-blue-400";
|
||||||
|
default:
|
||||||
|
return "text-gray-600 dark:text-gray-400";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatJobType = (type) => {
|
||||||
|
switch (type) {
|
||||||
|
case "settings_update":
|
||||||
|
return "Settings Update";
|
||||||
|
case "report_now":
|
||||||
|
return "Report Now";
|
||||||
|
case "update_agent":
|
||||||
|
return "Agent Update";
|
||||||
|
default:
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
|
||||||
|
Live Agent Queue Status
|
||||||
|
</h3>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => refetch()}
|
||||||
|
className="btn-outline flex items-center gap-2"
|
||||||
|
title="Refresh queue data"
|
||||||
|
>
|
||||||
|
<RefreshCw className="h-4 w-4" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Queue Summary */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Server className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Waiting
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{waiting}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Clock3 className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Active
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{active}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Clock className="h-5 w-5 text-primary-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Delayed
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{delayed}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<AlertCircle className="h-5 w-5 text-danger-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Failed
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{failed}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Job History */}
|
||||||
|
<div>
|
||||||
|
{jobHistory.length === 0 ? (
|
||||||
|
<div className="text-center py-8">
|
||||||
|
<Server className="h-12 w-12 text-gray-400 mx-auto mb-4" />
|
||||||
|
<p className="text-gray-500 dark:text-gray-400">
|
||||||
|
No job history found
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-700">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Job ID
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Job Name
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Status
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Attempt
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Date/Time
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Error/Output
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
{jobHistory.map((job) => (
|
||||||
|
<tr
|
||||||
|
key={job.id}
|
||||||
|
className="hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||||
|
>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-xs font-mono text-secondary-900 dark:text-white">
|
||||||
|
{job.job_id}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-xs text-secondary-900 dark:text-white">
|
||||||
|
{formatJobType(job.job_name)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{getStatusIcon(job.status)}
|
||||||
|
<span
|
||||||
|
className={`text-xs font-medium ${getStatusColor(job.status)}`}
|
||||||
|
>
|
||||||
|
{job.status.charAt(0).toUpperCase() +
|
||||||
|
job.status.slice(1)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-xs text-secondary-900 dark:text-white">
|
||||||
|
{job.attempt_number}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-xs text-secondary-900 dark:text-white">
|
||||||
|
{new Date(job.created_at).toLocaleString()}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 text-xs">
|
||||||
|
{job.error_message ? (
|
||||||
|
<span className="text-red-600 dark:text-red-400">
|
||||||
|
{job.error_message}
|
||||||
|
</span>
|
||||||
|
) : job.output ? (
|
||||||
|
<span className="text-green-600 dark:text-green-400">
|
||||||
|
{JSON.stringify(job.output)}
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<span className="text-secondary-500 dark:text-secondary-400">
|
||||||
|
-
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
export default HostDetail;
|
export default HostDetail;
|
||||||
|
|||||||
@@ -21,12 +21,13 @@ import {
|
|||||||
Square,
|
Square,
|
||||||
Trash2,
|
Trash2,
|
||||||
Users,
|
Users,
|
||||||
|
Wifi,
|
||||||
X,
|
X,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { useEffect, useId, useMemo, useState } from "react";
|
import { useEffect, useId, useMemo, useState } from "react";
|
||||||
import { Link, useNavigate, useSearchParams } from "react-router-dom";
|
import { Link, useNavigate, useSearchParams } from "react-router-dom";
|
||||||
import InlineEdit from "../components/InlineEdit";
|
import InlineEdit from "../components/InlineEdit";
|
||||||
import InlineGroupEdit from "../components/InlineGroupEdit";
|
import InlineMultiGroupEdit from "../components/InlineMultiGroupEdit";
|
||||||
import InlineToggle from "../components/InlineToggle";
|
import InlineToggle from "../components/InlineToggle";
|
||||||
import {
|
import {
|
||||||
adminHostsAPI,
|
adminHostsAPI,
|
||||||
@@ -34,14 +35,14 @@ import {
|
|||||||
formatRelativeTime,
|
formatRelativeTime,
|
||||||
hostGroupsAPI,
|
hostGroupsAPI,
|
||||||
} from "../utils/api";
|
} from "../utils/api";
|
||||||
import { OSIcon } from "../utils/osIcons.jsx";
|
import { getOSDisplayName, OSIcon } from "../utils/osIcons.jsx";
|
||||||
|
|
||||||
// Add Host Modal Component
|
// Add Host Modal Component
|
||||||
const AddHostModal = ({ isOpen, onClose, onSuccess }) => {
|
const AddHostModal = ({ isOpen, onClose, onSuccess }) => {
|
||||||
const friendlyNameId = useId();
|
const friendlyNameId = useId();
|
||||||
const [formData, setFormData] = useState({
|
const [formData, setFormData] = useState({
|
||||||
friendly_name: "",
|
friendly_name: "",
|
||||||
hostGroupId: "",
|
hostGroupIds: [], // Changed to array for multiple selection
|
||||||
});
|
});
|
||||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||||
const [error, setError] = useState("");
|
const [error, setError] = useState("");
|
||||||
@@ -64,7 +65,7 @@ const AddHostModal = ({ isOpen, onClose, onSuccess }) => {
|
|||||||
const response = await adminHostsAPI.create(formData);
|
const response = await adminHostsAPI.create(formData);
|
||||||
console.log("Host created successfully:", formData.friendly_name);
|
console.log("Host created successfully:", formData.friendly_name);
|
||||||
onSuccess(response.data);
|
onSuccess(response.data);
|
||||||
setFormData({ friendly_name: "", hostGroupId: "" });
|
setFormData({ friendly_name: "", hostGroupIds: [] });
|
||||||
onClose();
|
onClose();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Full error object:", err);
|
console.error("Full error object:", err);
|
||||||
@@ -134,68 +135,56 @@ const AddHostModal = ({ isOpen, onClose, onSuccess }) => {
|
|||||||
|
|
||||||
<div>
|
<div>
|
||||||
<span className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-3">
|
<span className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-3">
|
||||||
Host Group
|
Host Groups
|
||||||
</span>
|
</span>
|
||||||
<div className="grid grid-cols-3 gap-2">
|
<div className="space-y-2 max-h-48 overflow-y-auto">
|
||||||
{/* No Group Option */}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => setFormData({ ...formData, hostGroupId: "" })}
|
|
||||||
className={`flex flex-col items-center justify-center px-2 py-3 text-center border-2 rounded-lg transition-all duration-200 relative min-h-[80px] ${
|
|
||||||
formData.hostGroupId === ""
|
|
||||||
? "border-primary-500 bg-primary-50 dark:bg-primary-900/30 text-primary-700 dark:text-primary-300"
|
|
||||||
: "border-secondary-300 dark:border-secondary-600 bg-white dark:bg-secondary-700 text-secondary-700 dark:text-secondary-200 hover:border-secondary-400 dark:hover:border-secondary-500"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
<div className="text-xs font-medium">No Group</div>
|
|
||||||
<div className="text-xs text-secondary-500 dark:text-secondary-400 mt-1">
|
|
||||||
Ungrouped
|
|
||||||
</div>
|
|
||||||
{formData.hostGroupId === "" && (
|
|
||||||
<div className="absolute top-2 right-2 w-3 h-3 rounded-full bg-primary-500 flex items-center justify-center">
|
|
||||||
<div className="w-1.5 h-1.5 rounded-full bg-white"></div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
|
|
||||||
{/* Host Group Options */}
|
{/* Host Group Options */}
|
||||||
{hostGroups?.map((group) => (
|
{hostGroups?.map((group) => (
|
||||||
<button
|
<label
|
||||||
key={group.id}
|
key={group.id}
|
||||||
type="button"
|
className={`flex items-center gap-3 p-3 border-2 rounded-lg transition-all duration-200 cursor-pointer ${
|
||||||
onClick={() =>
|
formData.hostGroupIds.includes(group.id)
|
||||||
setFormData({ ...formData, hostGroupId: group.id })
|
? "border-primary-500 bg-primary-50 dark:bg-primary-900/30"
|
||||||
}
|
: "border-secondary-300 dark:border-secondary-600 bg-white dark:bg-secondary-700 hover:border-secondary-400 dark:hover:border-secondary-500"
|
||||||
className={`flex flex-col items-center justify-center px-2 py-3 text-center border-2 rounded-lg transition-all duration-200 relative min-h-[80px] ${
|
|
||||||
formData.hostGroupId === group.id
|
|
||||||
? "border-primary-500 bg-primary-50 dark:bg-primary-900/30 text-primary-700 dark:text-primary-300"
|
|
||||||
: "border-secondary-300 dark:border-secondary-600 bg-white dark:bg-secondary-700 text-secondary-700 dark:text-secondary-200 hover:border-secondary-400 dark:hover:border-secondary-500"
|
|
||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
<div className="flex items-center gap-1 mb-1 w-full justify-center">
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={formData.hostGroupIds.includes(group.id)}
|
||||||
|
onChange={(e) => {
|
||||||
|
if (e.target.checked) {
|
||||||
|
setFormData({
|
||||||
|
...formData,
|
||||||
|
hostGroupIds: [...formData.hostGroupIds, group.id],
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
setFormData({
|
||||||
|
...formData,
|
||||||
|
hostGroupIds: formData.hostGroupIds.filter(
|
||||||
|
(id) => id !== group.id,
|
||||||
|
),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className="w-4 h-4 text-primary-600 bg-gray-100 border-gray-300 rounded focus:ring-primary-500 dark:focus:ring-primary-600 dark:ring-offset-gray-800 focus:ring-2 dark:bg-gray-700 dark:border-gray-600"
|
||||||
|
/>
|
||||||
|
<div className="flex items-center gap-2 flex-1">
|
||||||
{group.color && (
|
{group.color && (
|
||||||
<div
|
<div
|
||||||
className="w-3 h-3 rounded-full border border-secondary-300 dark:border-secondary-500 flex-shrink-0"
|
className="w-3 h-3 rounded-full border border-secondary-300 dark:border-secondary-500 flex-shrink-0"
|
||||||
style={{ backgroundColor: group.color }}
|
style={{ backgroundColor: group.color }}
|
||||||
></div>
|
></div>
|
||||||
)}
|
)}
|
||||||
<div className="text-xs font-medium truncate max-w-full">
|
<div className="text-sm font-medium text-secondary-700 dark:text-secondary-200">
|
||||||
{group.name}
|
{group.name}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="text-xs text-secondary-500 dark:text-secondary-400">
|
</label>
|
||||||
Group
|
|
||||||
</div>
|
|
||||||
{formData.hostGroupId === group.id && (
|
|
||||||
<div className="absolute top-2 right-2 w-3 h-3 rounded-full bg-primary-500 flex items-center justify-center">
|
|
||||||
<div className="w-1.5 h-1.5 rounded-full bg-white"></div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
<p className="mt-2 text-sm text-secondary-500 dark:text-secondary-400">
|
<p className="mt-2 text-sm text-secondary-500 dark:text-secondary-400">
|
||||||
Optional: Assign this host to a group for better organization.
|
Optional: Select one or more groups to assign this host to for
|
||||||
|
better organization.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -328,22 +317,24 @@ const Hosts = () => {
|
|||||||
const defaultConfig = [
|
const defaultConfig = [
|
||||||
{ id: "select", label: "Select", visible: true, order: 0 },
|
{ id: "select", label: "Select", visible: true, order: 0 },
|
||||||
{ id: "host", label: "Friendly Name", visible: true, order: 1 },
|
{ id: "host", label: "Friendly Name", visible: true, order: 1 },
|
||||||
{ id: "ip", label: "IP Address", visible: false, order: 2 },
|
{ id: "hostname", label: "System Hostname", visible: true, order: 2 },
|
||||||
{ id: "group", label: "Group", visible: true, order: 3 },
|
{ id: "ip", label: "IP Address", visible: false, order: 3 },
|
||||||
{ id: "os", label: "OS", visible: true, order: 4 },
|
{ id: "group", label: "Group", visible: true, order: 4 },
|
||||||
{ id: "os_version", label: "OS Version", visible: false, order: 5 },
|
{ id: "os", label: "OS", visible: true, order: 5 },
|
||||||
{ id: "agent_version", label: "Agent Version", visible: true, order: 6 },
|
{ id: "os_version", label: "OS Version", visible: false, order: 6 },
|
||||||
|
{ id: "agent_version", label: "Agent Version", visible: true, order: 7 },
|
||||||
{
|
{
|
||||||
id: "auto_update",
|
id: "auto_update",
|
||||||
label: "Agent Auto-Update",
|
label: "Agent Auto-Update",
|
||||||
visible: true,
|
visible: true,
|
||||||
order: 7,
|
order: 8,
|
||||||
},
|
},
|
||||||
{ id: "status", label: "Status", visible: true, order: 8 },
|
{ id: "ws_status", label: "Connection", visible: true, order: 9 },
|
||||||
{ id: "updates", label: "Updates", visible: true, order: 9 },
|
{ id: "status", label: "Status", visible: true, order: 10 },
|
||||||
{ id: "notes", label: "Notes", visible: false, order: 10 },
|
{ id: "updates", label: "Updates", visible: true, order: 11 },
|
||||||
{ id: "last_update", label: "Last Update", visible: true, order: 11 },
|
{ id: "notes", label: "Notes", visible: false, order: 12 },
|
||||||
{ id: "actions", label: "Actions", visible: true, order: 12 },
|
{ id: "last_update", label: "Last Update", visible: true, order: 13 },
|
||||||
|
{ id: "actions", label: "Actions", visible: true, order: 14 },
|
||||||
];
|
];
|
||||||
|
|
||||||
const saved = localStorage.getItem("hosts-column-config");
|
const saved = localStorage.getItem("hosts-column-config");
|
||||||
@@ -365,8 +356,11 @@ const Hosts = () => {
|
|||||||
localStorage.removeItem("hosts-column-config");
|
localStorage.removeItem("hosts-column-config");
|
||||||
return defaultConfig;
|
return defaultConfig;
|
||||||
} else {
|
} else {
|
||||||
// Use the existing configuration
|
// Ensure ws_status column is visible in saved config
|
||||||
return savedConfig;
|
const updatedConfig = savedConfig.map((col) =>
|
||||||
|
col.id === "ws_status" ? { ...col, visible: true } : col,
|
||||||
|
);
|
||||||
|
return updatedConfig;
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// If there's an error parsing the config, clear it and use default
|
// If there's an error parsing the config, clear it and use default
|
||||||
@@ -398,6 +392,118 @@ const Hosts = () => {
|
|||||||
queryFn: () => hostGroupsAPI.list().then((res) => res.data),
|
queryFn: () => hostGroupsAPI.list().then((res) => res.data),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Track WebSocket status for all hosts
|
||||||
|
const [wsStatusMap, setWsStatusMap] = useState({});
|
||||||
|
|
||||||
|
// Fetch initial WebSocket status for all hosts
|
||||||
|
useEffect(() => {
|
||||||
|
if (!hosts || hosts.length === 0) return;
|
||||||
|
|
||||||
|
const token = localStorage.getItem("token");
|
||||||
|
if (!token) return;
|
||||||
|
|
||||||
|
// Fetch initial WebSocket status for all hosts
|
||||||
|
const fetchInitialStatus = async () => {
|
||||||
|
const statusPromises = hosts
|
||||||
|
.filter((host) => host.api_id)
|
||||||
|
.map(async (host) => {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/v1/ws/status/${host.api_id}`, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (response.ok) {
|
||||||
|
const data = await response.json();
|
||||||
|
return { apiId: host.api_id, status: data.data };
|
||||||
|
}
|
||||||
|
} catch (_error) {
|
||||||
|
// Silently handle errors
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
apiId: host.api_id,
|
||||||
|
status: { connected: false, secure: false },
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const results = await Promise.all(statusPromises);
|
||||||
|
const initialStatusMap = {};
|
||||||
|
results.forEach(({ apiId, status }) => {
|
||||||
|
initialStatusMap[apiId] = status;
|
||||||
|
});
|
||||||
|
|
||||||
|
setWsStatusMap(initialStatusMap);
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchInitialStatus();
|
||||||
|
}, [hosts]);
|
||||||
|
|
||||||
|
// Subscribe to WebSocket status changes for all hosts via SSE
|
||||||
|
useEffect(() => {
|
||||||
|
if (!hosts || hosts.length === 0) return;
|
||||||
|
|
||||||
|
const token = localStorage.getItem("token");
|
||||||
|
if (!token) return;
|
||||||
|
|
||||||
|
const eventSources = new Map();
|
||||||
|
let isMounted = true;
|
||||||
|
|
||||||
|
const connectHost = (apiId) => {
|
||||||
|
if (!isMounted || eventSources.has(apiId)) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const es = new EventSource(
|
||||||
|
`/api/v1/ws/status/${apiId}/stream?token=${encodeURIComponent(token)}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
es.onmessage = (event) => {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(event.data);
|
||||||
|
if (isMounted) {
|
||||||
|
setWsStatusMap((prev) => {
|
||||||
|
const newMap = { ...prev, [apiId]: data };
|
||||||
|
return newMap;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (_err) {
|
||||||
|
// Silently handle parse errors
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
es.onerror = (_error) => {
|
||||||
|
console.log(`[SSE] Connection error for ${apiId}, retrying...`);
|
||||||
|
es?.close();
|
||||||
|
eventSources.delete(apiId);
|
||||||
|
if (isMounted) {
|
||||||
|
// Retry connection after 5 seconds with exponential backoff
|
||||||
|
setTimeout(() => connectHost(apiId), 5000);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
eventSources.set(apiId, es);
|
||||||
|
} catch (_err) {
|
||||||
|
// Silently handle connection errors
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Connect to all hosts
|
||||||
|
for (const host of hosts) {
|
||||||
|
if (host.api_id) {
|
||||||
|
connectHost(host.api_id);
|
||||||
|
} else {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cleanup function
|
||||||
|
return () => {
|
||||||
|
isMounted = false;
|
||||||
|
for (const es of eventSources.values()) {
|
||||||
|
es.close();
|
||||||
|
}
|
||||||
|
eventSources.clear();
|
||||||
|
};
|
||||||
|
}, [hosts]);
|
||||||
|
|
||||||
const bulkUpdateGroupMutation = useMutation({
|
const bulkUpdateGroupMutation = useMutation({
|
||||||
mutationFn: ({ hostIds, hostGroupId }) =>
|
mutationFn: ({ hostIds, hostGroupId }) =>
|
||||||
adminHostsAPI.bulkUpdateGroup(hostIds, hostGroupId),
|
adminHostsAPI.bulkUpdateGroup(hostIds, hostGroupId),
|
||||||
@@ -439,7 +545,7 @@ const Hosts = () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const updateHostGroupMutation = useMutation({
|
const _updateHostGroupMutation = useMutation({
|
||||||
mutationFn: ({ hostId, hostGroupId }) => {
|
mutationFn: ({ hostId, hostGroupId }) => {
|
||||||
console.log("updateHostGroupMutation called with:", {
|
console.log("updateHostGroupMutation called with:", {
|
||||||
hostId,
|
hostId,
|
||||||
@@ -485,6 +591,46 @@ const Hosts = () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const updateHostGroupsMutation = useMutation({
|
||||||
|
mutationFn: ({ hostId, groupIds }) => {
|
||||||
|
console.log("updateHostGroupsMutation called with:", {
|
||||||
|
hostId,
|
||||||
|
groupIds,
|
||||||
|
});
|
||||||
|
return adminHostsAPI.updateGroups(hostId, groupIds).then((res) => {
|
||||||
|
console.log("updateGroups API response:", res);
|
||||||
|
return res.data;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onSuccess: (data) => {
|
||||||
|
// Update the cache with the new host data
|
||||||
|
queryClient.setQueryData(["hosts"], (oldData) => {
|
||||||
|
console.log("Old cache data before update:", oldData);
|
||||||
|
if (!oldData) return oldData;
|
||||||
|
const updatedData = oldData.map((host) => {
|
||||||
|
if (host.id === data.host.id) {
|
||||||
|
console.log(
|
||||||
|
"Updating host in cache:",
|
||||||
|
host.id,
|
||||||
|
"with new data:",
|
||||||
|
data.host,
|
||||||
|
);
|
||||||
|
return data.host;
|
||||||
|
}
|
||||||
|
return host;
|
||||||
|
});
|
||||||
|
console.log("New cache data after update:", updatedData);
|
||||||
|
return updatedData;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Also invalidate to ensure consistency
|
||||||
|
queryClient.invalidateQueries(["hosts"]);
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
console.error("updateHostGroupsMutation error:", error);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const toggleAutoUpdateMutation = useMutation({
|
const toggleAutoUpdateMutation = useMutation({
|
||||||
mutationFn: ({ hostId, autoUpdate }) =>
|
mutationFn: ({ hostId, autoUpdate }) =>
|
||||||
adminHostsAPI
|
adminHostsAPI
|
||||||
@@ -562,7 +708,7 @@ const Hosts = () => {
|
|||||||
osFilter === "all" ||
|
osFilter === "all" ||
|
||||||
host.os_type?.toLowerCase() === osFilter.toLowerCase();
|
host.os_type?.toLowerCase() === osFilter.toLowerCase();
|
||||||
|
|
||||||
// URL filter for hosts needing updates, inactive hosts, up-to-date hosts, or stale hosts
|
// URL filter for hosts needing updates, inactive hosts, up-to-date hosts, stale hosts, or offline hosts
|
||||||
const filter = searchParams.get("filter");
|
const filter = searchParams.get("filter");
|
||||||
const matchesUrlFilter =
|
const matchesUrlFilter =
|
||||||
(filter !== "needsUpdates" ||
|
(filter !== "needsUpdates" ||
|
||||||
@@ -570,7 +716,8 @@ const Hosts = () => {
|
|||||||
(filter !== "inactive" ||
|
(filter !== "inactive" ||
|
||||||
(host.effectiveStatus || host.status) === "inactive") &&
|
(host.effectiveStatus || host.status) === "inactive") &&
|
||||||
(filter !== "upToDate" || (!host.isStale && host.updatesCount === 0)) &&
|
(filter !== "upToDate" || (!host.isStale && host.updatesCount === 0)) &&
|
||||||
(filter !== "stale" || host.isStale);
|
(filter !== "stale" || host.isStale) &&
|
||||||
|
(filter !== "offline" || wsStatusMap[host.api_id]?.connected !== true);
|
||||||
|
|
||||||
// Hide stale filter
|
// Hide stale filter
|
||||||
const matchesHideStale = !hideStale || !host.isStale;
|
const matchesHideStale = !hideStale || !host.isStale;
|
||||||
@@ -655,6 +802,7 @@ const Hosts = () => {
|
|||||||
sortDirection,
|
sortDirection,
|
||||||
searchParams,
|
searchParams,
|
||||||
hideStale,
|
hideStale,
|
||||||
|
wsStatusMap,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// Get unique OS types from hosts for dynamic dropdown
|
// Get unique OS types from hosts for dynamic dropdown
|
||||||
@@ -756,10 +904,19 @@ const Hosts = () => {
|
|||||||
{ id: "group", label: "Group", visible: true, order: 4 },
|
{ id: "group", label: "Group", visible: true, order: 4 },
|
||||||
{ id: "os", label: "OS", visible: true, order: 5 },
|
{ id: "os", label: "OS", visible: true, order: 5 },
|
||||||
{ id: "os_version", label: "OS Version", visible: false, order: 6 },
|
{ id: "os_version", label: "OS Version", visible: false, order: 6 },
|
||||||
{ id: "status", label: "Status", visible: true, order: 7 },
|
{ id: "agent_version", label: "Agent Version", visible: true, order: 7 },
|
||||||
{ id: "updates", label: "Updates", visible: true, order: 8 },
|
{
|
||||||
{ id: "last_update", label: "Last Update", visible: true, order: 9 },
|
id: "auto_update",
|
||||||
{ id: "actions", label: "Actions", visible: true, order: 10 },
|
label: "Agent Auto-Update",
|
||||||
|
visible: true,
|
||||||
|
order: 8,
|
||||||
|
},
|
||||||
|
{ id: "ws_status", label: "Connection", visible: true, order: 9 },
|
||||||
|
{ id: "status", label: "Status", visible: true, order: 10 },
|
||||||
|
{ id: "updates", label: "Updates", visible: true, order: 11 },
|
||||||
|
{ id: "notes", label: "Notes", visible: false, order: 12 },
|
||||||
|
{ id: "last_update", label: "Last Update", visible: true, order: 13 },
|
||||||
|
{ id: "actions", label: "Actions", visible: true, order: 14 },
|
||||||
];
|
];
|
||||||
updateColumnConfig(defaultConfig);
|
updateColumnConfig(defaultConfig);
|
||||||
};
|
};
|
||||||
@@ -822,27 +979,33 @@ const Hosts = () => {
|
|||||||
{host.ip || "N/A"}
|
{host.ip || "N/A"}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
case "group":
|
case "group": {
|
||||||
|
// Extract group IDs from the new many-to-many structure
|
||||||
|
const groupIds =
|
||||||
|
host.host_group_memberships?.map(
|
||||||
|
(membership) => membership.host_groups.id,
|
||||||
|
) || [];
|
||||||
return (
|
return (
|
||||||
<InlineGroupEdit
|
<InlineMultiGroupEdit
|
||||||
key={`${host.id}-${host.host_groups?.id || "ungrouped"}-${host.host_groups?.name || "ungrouped"}`}
|
key={`${host.id}-${groupIds.join(",")}`}
|
||||||
value={host.host_groups?.id}
|
value={groupIds}
|
||||||
onSave={(newGroupId) =>
|
onSave={(newGroupIds) =>
|
||||||
updateHostGroupMutation.mutate({
|
updateHostGroupsMutation.mutate({
|
||||||
hostId: host.id,
|
hostId: host.id,
|
||||||
hostGroupId: newGroupId,
|
groupIds: newGroupIds,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
options={hostGroups || []}
|
options={hostGroups || []}
|
||||||
placeholder="Select group..."
|
placeholder="Select groups..."
|
||||||
className="w-full"
|
className="w-full"
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
}
|
||||||
case "os":
|
case "os":
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center gap-2 text-sm text-secondary-900 dark:text-white">
|
<div className="flex items-center gap-2 text-sm text-secondary-900 dark:text-white">
|
||||||
<OSIcon osType={host.os_type} className="h-4 w-4" />
|
<OSIcon osType={host.os_type} className="h-4 w-4" />
|
||||||
<span>{host.os_type}</span>
|
<span>{getOSDisplayName(host.os_type)}</span>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
case "os_version":
|
case "os_version":
|
||||||
@@ -871,6 +1034,38 @@ const Hosts = () => {
|
|||||||
falseLabel="No"
|
falseLabel="No"
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
case "ws_status": {
|
||||||
|
const wsStatus = wsStatusMap[host.api_id];
|
||||||
|
if (!wsStatus) {
|
||||||
|
return (
|
||||||
|
<span className="inline-flex items-center px-2 py-1 rounded-full text-xs font-medium bg-gray-100 text-gray-600 dark:bg-gray-700 dark:text-gray-400">
|
||||||
|
<div className="w-2 h-2 bg-gray-400 rounded-full mr-1.5"></div>
|
||||||
|
Unknown
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
<span
|
||||||
|
className={`inline-flex items-center px-2 py-1 rounded-full text-xs font-medium ${
|
||||||
|
wsStatus.connected
|
||||||
|
? "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200"
|
||||||
|
: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200"
|
||||||
|
}`}
|
||||||
|
title={
|
||||||
|
wsStatus.connected
|
||||||
|
? `Agent connected via ${wsStatus.secure ? "WSS (secure)" : "WS (insecure)"}`
|
||||||
|
: "Agent not connected"
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className={`w-2 h-2 rounded-full mr-1.5 ${
|
||||||
|
wsStatus.connected ? "bg-green-500 animate-pulse" : "bg-red-500"
|
||||||
|
}`}
|
||||||
|
></div>
|
||||||
|
{wsStatus.connected ? (wsStatus.secure ? "WSS" : "WS") : "Offline"}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
}
|
||||||
case "status":
|
case "status":
|
||||||
return (
|
return (
|
||||||
<div className="text-sm text-secondary-900 dark:text-white">
|
<div className="text-sm text-secondary-900 dark:text-white">
|
||||||
@@ -966,13 +1161,13 @@ const Hosts = () => {
|
|||||||
navigate(`/hosts?${newSearchParams.toString()}`, { replace: true });
|
navigate(`/hosts?${newSearchParams.toString()}`, { replace: true });
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleStaleClick = () => {
|
const handleConnectionStatusClick = () => {
|
||||||
// Filter to show stale/inactive hosts
|
// Filter to show offline hosts (not connected via WebSocket)
|
||||||
setStatusFilter("inactive");
|
setStatusFilter("all");
|
||||||
setShowFilters(true);
|
setShowFilters(true);
|
||||||
// We'll use the existing inactive URL filter logic
|
// Use a new URL filter for connection status
|
||||||
const newSearchParams = new URLSearchParams(window.location.search);
|
const newSearchParams = new URLSearchParams(window.location.search);
|
||||||
newSearchParams.set("filter", "inactive");
|
newSearchParams.set("filter", "offline");
|
||||||
navigate(`/hosts?${newSearchParams.toString()}`, { replace: true });
|
navigate(`/hosts?${newSearchParams.toString()}`, { replace: true });
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1026,13 +1221,12 @@ const Hosts = () => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() => refetch()}
|
onClick={() => refetch()}
|
||||||
disabled={isFetching}
|
disabled={isFetching}
|
||||||
className="btn-outline flex items-center gap-2"
|
className="btn-outline flex items-center justify-center p-2"
|
||||||
title="Refresh hosts data"
|
title="Refresh hosts data"
|
||||||
>
|
>
|
||||||
<RefreshCw
|
<RefreshCw
|
||||||
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
||||||
/>
|
/>
|
||||||
{isFetching ? "Refreshing..." : "Refresh"}
|
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -1102,17 +1296,46 @@ const Hosts = () => {
|
|||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 text-left w-full"
|
className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 text-left w-full"
|
||||||
onClick={handleStaleClick}
|
onClick={handleConnectionStatusClick}
|
||||||
>
|
>
|
||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
<AlertTriangle className="h-5 w-5 text-danger-600 mr-2" />
|
<Wifi className="h-5 w-5 text-primary-600 mr-2" />
|
||||||
<div>
|
<div className="flex-1">
|
||||||
<p className="text-sm text-secondary-500 dark:text-white">
|
<p className="text-sm text-secondary-500 dark:text-white mb-1">
|
||||||
Stale
|
Connection Status
|
||||||
</p>
|
|
||||||
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
|
||||||
{hosts?.filter((h) => h.isStale).length || 0}
|
|
||||||
</p>
|
</p>
|
||||||
|
{(() => {
|
||||||
|
const connectedCount =
|
||||||
|
hosts?.filter(
|
||||||
|
(h) => wsStatusMap[h.api_id]?.connected === true,
|
||||||
|
).length || 0;
|
||||||
|
const offlineCount =
|
||||||
|
hosts?.filter(
|
||||||
|
(h) => wsStatusMap[h.api_id]?.connected !== true,
|
||||||
|
).length || 0;
|
||||||
|
return (
|
||||||
|
<div className="flex gap-4">
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<div className="w-2 h-2 bg-green-500 rounded-full"></div>
|
||||||
|
<span className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{connectedCount}
|
||||||
|
</span>
|
||||||
|
<span className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
|
Connected
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<div className="w-2 h-2 bg-red-500 rounded-full"></div>
|
||||||
|
<span className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{offlineCount}
|
||||||
|
</span>
|
||||||
|
<span className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
|
Offline
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})()}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</button>
|
</button>
|
||||||
@@ -1437,6 +1660,11 @@ const Hosts = () => {
|
|||||||
<div className="flex items-center gap-2 font-normal text-xs text-secondary-500 dark:text-secondary-300 normal-case tracking-wider">
|
<div className="flex items-center gap-2 font-normal text-xs text-secondary-500 dark:text-secondary-300 normal-case tracking-wider">
|
||||||
{column.label}
|
{column.label}
|
||||||
</div>
|
</div>
|
||||||
|
) : column.id === "ws_status" ? (
|
||||||
|
<div className="flex items-center gap-2 font-normal text-xs text-secondary-500 dark:text-secondary-300 normal-case tracking-wider">
|
||||||
|
<Wifi className="h-3 w-3" />
|
||||||
|
{column.label}
|
||||||
|
</div>
|
||||||
) : column.id === "status" ? (
|
) : column.id === "status" ? (
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -1785,9 +2013,10 @@ const ColumnSettingsModal = ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50 p-4">
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-lg shadow-xl max-w-md w-full mx-4">
|
<div className="bg-white dark:bg-secondary-800 rounded-lg shadow-xl max-w-lg w-full max-h-[85vh] flex flex-col">
|
||||||
<div className="px-6 py-4 border-b border-secondary-200 dark:border-secondary-600">
|
{/* Header */}
|
||||||
|
<div className="px-6 py-4 border-b border-secondary-200 dark:border-secondary-600 flex-shrink-0">
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
|
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
|
||||||
Column Settings
|
Column Settings
|
||||||
@@ -1800,14 +2029,14 @@ const ColumnSettingsModal = ({
|
|||||||
<X className="h-5 w-5" />
|
<X className="h-5 w-5" />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
<p className="text-sm text-secondary-600 dark:text-secondary-300 mt-2">
|
||||||
|
|
||||||
<div className="px-6 py-4">
|
|
||||||
<p className="text-sm text-secondary-600 dark:text-secondary-300 mb-4">
|
|
||||||
Drag to reorder columns or toggle visibility
|
Drag to reorder columns or toggle visibility
|
||||||
</p>
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="space-y-2">
|
{/* Scrollable content */}
|
||||||
|
<div className="px-6 py-4 flex-1 overflow-y-auto">
|
||||||
|
<div className="space-y-1">
|
||||||
{columnConfig.map((column, index) => (
|
{columnConfig.map((column, index) => (
|
||||||
<button
|
<button
|
||||||
key={column.id}
|
key={column.id}
|
||||||
@@ -1824,22 +2053,22 @@ const ColumnSettingsModal = ({
|
|||||||
// Focus handling for keyboard users
|
// Focus handling for keyboard users
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={`flex items-center justify-between p-3 border rounded-lg cursor-move w-full text-left ${
|
className={`flex items-center justify-between p-2.5 border rounded-lg cursor-move w-full text-left transition-colors ${
|
||||||
draggedIndex === index
|
draggedIndex === index
|
||||||
? "opacity-50"
|
? "opacity-50"
|
||||||
: "hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
: "hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||||
} border-secondary-200 dark:border-secondary-600`}
|
} border-secondary-200 dark:border-secondary-600`}
|
||||||
>
|
>
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex items-center gap-2.5">
|
||||||
<GripVertical className="h-4 w-4 text-secondary-400 dark:text-secondary-500" />
|
<GripVertical className="h-4 w-4 text-secondary-400 dark:text-secondary-500 flex-shrink-0" />
|
||||||
<span className="text-sm font-medium text-secondary-900 dark:text-white">
|
<span className="text-sm font-medium text-secondary-900 dark:text-white truncate">
|
||||||
{column.label}
|
{column.label}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => onToggleVisibility(column.id)}
|
onClick={() => onToggleVisibility(column.id)}
|
||||||
className={`p-1 rounded ${
|
className={`p-1 rounded transition-colors flex-shrink-0 ${
|
||||||
column.visible
|
column.visible
|
||||||
? "text-primary-600 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-300"
|
? "text-primary-600 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
: "text-secondary-400 hover:text-secondary-600 dark:text-secondary-500 dark:hover:text-secondary-300"
|
: "text-secondary-400 hover:text-secondary-600 dark:text-secondary-500 dark:hover:text-secondary-300"
|
||||||
@@ -1854,8 +2083,11 @@ const ColumnSettingsModal = ({
|
|||||||
</button>
|
</button>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="flex justify-between mt-6">
|
{/* Footer */}
|
||||||
|
<div className="px-6 py-4 border-t border-secondary-200 dark:border-secondary-600 flex-shrink-0">
|
||||||
|
<div className="flex justify-between">
|
||||||
<button type="button" onClick={onReset} className="btn-outline">
|
<button type="button" onClick={onReset} className="btn-outline">
|
||||||
Reset to Default
|
Reset to Default
|
||||||
</button>
|
</button>
|
||||||
|
|||||||
@@ -1,699 +0,0 @@
|
|||||||
import {
|
|
||||||
Activity,
|
|
||||||
AlertCircle,
|
|
||||||
CheckCircle,
|
|
||||||
Clock,
|
|
||||||
Download,
|
|
||||||
Eye,
|
|
||||||
Filter,
|
|
||||||
Package,
|
|
||||||
Pause,
|
|
||||||
Play,
|
|
||||||
RefreshCw,
|
|
||||||
Search,
|
|
||||||
Server,
|
|
||||||
XCircle,
|
|
||||||
} from "lucide-react";
|
|
||||||
import { useState } from "react";
|
|
||||||
|
|
||||||
const Queue = () => {
|
|
||||||
const [activeTab, setActiveTab] = useState("server");
|
|
||||||
const [filterStatus, setFilterStatus] = useState("all");
|
|
||||||
const [searchQuery, setSearchQuery] = useState("");
|
|
||||||
|
|
||||||
// Mock data for demonstration
|
|
||||||
const serverQueueData = [
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
type: "Server Update Check",
|
|
||||||
description: "Check for server updates from GitHub",
|
|
||||||
status: "running",
|
|
||||||
priority: "high",
|
|
||||||
createdAt: "2024-01-15 10:30:00",
|
|
||||||
estimatedCompletion: "2024-01-15 10:35:00",
|
|
||||||
progress: 75,
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
type: "Session Cleanup",
|
|
||||||
description: "Clear expired login sessions",
|
|
||||||
status: "pending",
|
|
||||||
priority: "medium",
|
|
||||||
createdAt: "2024-01-15 10:25:00",
|
|
||||||
estimatedCompletion: "2024-01-15 10:40:00",
|
|
||||||
progress: 0,
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
type: "Database Optimization",
|
|
||||||
description: "Optimize database indexes and cleanup old records",
|
|
||||||
status: "completed",
|
|
||||||
priority: "low",
|
|
||||||
createdAt: "2024-01-15 09:00:00",
|
|
||||||
completedAt: "2024-01-15 09:45:00",
|
|
||||||
progress: 100,
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
type: "Backup Creation",
|
|
||||||
description: "Create system backup",
|
|
||||||
status: "failed",
|
|
||||||
priority: "high",
|
|
||||||
createdAt: "2024-01-15 08:00:00",
|
|
||||||
errorMessage: "Insufficient disk space",
|
|
||||||
progress: 45,
|
|
||||||
retryCount: 2,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const agentQueueData = [
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
hostname: "web-server-01",
|
|
||||||
ip: "192.168.1.100",
|
|
||||||
type: "Agent Update Collection",
|
|
||||||
description: "Agent v1.2.7 → v1.2.8",
|
|
||||||
status: "pending",
|
|
||||||
priority: "medium",
|
|
||||||
lastCommunication: "2024-01-15 10:00:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 11:00:00",
|
|
||||||
currentVersion: "1.2.7",
|
|
||||||
targetVersion: "1.2.8",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 5,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
hostname: "db-server-02",
|
|
||||||
ip: "192.168.1.101",
|
|
||||||
type: "Data Collection",
|
|
||||||
description: "Collect package and system information",
|
|
||||||
status: "running",
|
|
||||||
priority: "high",
|
|
||||||
lastCommunication: "2024-01-15 10:15:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 11:15:00",
|
|
||||||
currentVersion: "1.2.8",
|
|
||||||
targetVersion: "1.2.8",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
hostname: "app-server-03",
|
|
||||||
ip: "192.168.1.102",
|
|
||||||
type: "Agent Update Collection",
|
|
||||||
description: "Agent v1.2.6 → v1.2.8",
|
|
||||||
status: "completed",
|
|
||||||
priority: "low",
|
|
||||||
lastCommunication: "2024-01-15 09:30:00",
|
|
||||||
completedAt: "2024-01-15 09:45:00",
|
|
||||||
currentVersion: "1.2.8",
|
|
||||||
targetVersion: "1.2.8",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 5,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
hostname: "test-server-04",
|
|
||||||
ip: "192.168.1.103",
|
|
||||||
type: "Data Collection",
|
|
||||||
description: "Collect package and system information",
|
|
||||||
status: "failed",
|
|
||||||
priority: "medium",
|
|
||||||
lastCommunication: "2024-01-15 08:00:00",
|
|
||||||
errorMessage: "Connection timeout",
|
|
||||||
retryCount: 3,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const patchQueueData = [
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
hostname: "web-server-01",
|
|
||||||
ip: "192.168.1.100",
|
|
||||||
packages: ["nginx", "openssl", "curl"],
|
|
||||||
type: "Security Updates",
|
|
||||||
description: "Apply critical security patches",
|
|
||||||
status: "pending",
|
|
||||||
priority: "high",
|
|
||||||
scheduledFor: "2024-01-15 19:00:00",
|
|
||||||
lastCommunication: "2024-01-15 18:00:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 19:00:00",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
hostname: "db-server-02",
|
|
||||||
ip: "192.168.1.101",
|
|
||||||
packages: ["postgresql", "python3"],
|
|
||||||
type: "Feature Updates",
|
|
||||||
description: "Update database and Python packages",
|
|
||||||
status: "running",
|
|
||||||
priority: "medium",
|
|
||||||
scheduledFor: "2024-01-15 20:00:00",
|
|
||||||
lastCommunication: "2024-01-15 19:15:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 20:15:00",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
hostname: "app-server-03",
|
|
||||||
ip: "192.168.1.102",
|
|
||||||
packages: ["nodejs", "npm"],
|
|
||||||
type: "Maintenance Updates",
|
|
||||||
description: "Update Node.js and npm packages",
|
|
||||||
status: "completed",
|
|
||||||
priority: "low",
|
|
||||||
scheduledFor: "2024-01-15 18:30:00",
|
|
||||||
completedAt: "2024-01-15 18:45:00",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
hostname: "test-server-04",
|
|
||||||
ip: "192.168.1.103",
|
|
||||||
packages: ["docker", "docker-compose"],
|
|
||||||
type: "Security Updates",
|
|
||||||
description: "Update Docker components",
|
|
||||||
status: "failed",
|
|
||||||
priority: "high",
|
|
||||||
scheduledFor: "2024-01-15 17:00:00",
|
|
||||||
errorMessage: "Package conflicts detected",
|
|
||||||
retryCount: 2,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const getStatusIcon = (status) => {
|
|
||||||
switch (status) {
|
|
||||||
case "running":
|
|
||||||
return <RefreshCw className="h-4 w-4 text-blue-500 animate-spin" />;
|
|
||||||
case "completed":
|
|
||||||
return <CheckCircle className="h-4 w-4 text-green-500" />;
|
|
||||||
case "failed":
|
|
||||||
return <XCircle className="h-4 w-4 text-red-500" />;
|
|
||||||
case "pending":
|
|
||||||
return <Clock className="h-4 w-4 text-yellow-500" />;
|
|
||||||
case "paused":
|
|
||||||
return <Pause className="h-4 w-4 text-gray-500" />;
|
|
||||||
default:
|
|
||||||
return <AlertCircle className="h-4 w-4 text-gray-500" />;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getStatusColor = (status) => {
|
|
||||||
switch (status) {
|
|
||||||
case "running":
|
|
||||||
return "bg-blue-100 text-blue-800";
|
|
||||||
case "completed":
|
|
||||||
return "bg-green-100 text-green-800";
|
|
||||||
case "failed":
|
|
||||||
return "bg-red-100 text-red-800";
|
|
||||||
case "pending":
|
|
||||||
return "bg-yellow-100 text-yellow-800";
|
|
||||||
case "paused":
|
|
||||||
return "bg-gray-100 text-gray-800";
|
|
||||||
default:
|
|
||||||
return "bg-gray-100 text-gray-800";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getPriorityColor = (priority) => {
|
|
||||||
switch (priority) {
|
|
||||||
case "high":
|
|
||||||
return "bg-red-100 text-red-800";
|
|
||||||
case "medium":
|
|
||||||
return "bg-yellow-100 text-yellow-800";
|
|
||||||
case "low":
|
|
||||||
return "bg-green-100 text-green-800";
|
|
||||||
default:
|
|
||||||
return "bg-gray-100 text-gray-800";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const filteredData = (data) => {
|
|
||||||
let filtered = data;
|
|
||||||
|
|
||||||
if (filterStatus !== "all") {
|
|
||||||
filtered = filtered.filter((item) => item.status === filterStatus);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (searchQuery) {
|
|
||||||
filtered = filtered.filter(
|
|
||||||
(item) =>
|
|
||||||
item.hostname?.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
|
||||||
item.type?.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
|
||||||
item.description?.toLowerCase().includes(searchQuery.toLowerCase()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return filtered;
|
|
||||||
};
|
|
||||||
|
|
||||||
const tabs = [
|
|
||||||
{
|
|
||||||
id: "server",
|
|
||||||
name: "Server Queue",
|
|
||||||
icon: Server,
|
|
||||||
data: serverQueueData,
|
|
||||||
count: serverQueueData.length,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: "agent",
|
|
||||||
name: "Agent Queue",
|
|
||||||
icon: Download,
|
|
||||||
data: agentQueueData,
|
|
||||||
count: agentQueueData.length,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: "patch",
|
|
||||||
name: "Patch Management",
|
|
||||||
icon: Package,
|
|
||||||
data: patchQueueData,
|
|
||||||
count: patchQueueData.length,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const renderServerQueueItem = (item) => (
|
|
||||||
<div
|
|
||||||
key={item.id}
|
|
||||||
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
|
|
||||||
>
|
|
||||||
<div className="flex items-start justify-between">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="flex items-center gap-3 mb-2">
|
|
||||||
{getStatusIcon(item.status)}
|
|
||||||
<h3 className="font-medium text-gray-900 dark:text-white">
|
|
||||||
{item.type}
|
|
||||||
</h3>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
|
|
||||||
>
|
|
||||||
{item.status}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
|
|
||||||
>
|
|
||||||
{item.priority}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-3">
|
|
||||||
{item.description}
|
|
||||||
</p>
|
|
||||||
|
|
||||||
{item.status === "running" && (
|
|
||||||
<div className="mb-3">
|
|
||||||
<div className="flex justify-between text-xs text-gray-500 mb-1">
|
|
||||||
<span>Progress</span>
|
|
||||||
<span>{item.progress}%</span>
|
|
||||||
</div>
|
|
||||||
<div className="w-full bg-gray-200 rounded-full h-2">
|
|
||||||
<div
|
|
||||||
className="bg-blue-600 h-2 rounded-full transition-all duration-300"
|
|
||||||
style={{ width: `${item.progress}%` }}
|
|
||||||
></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Created:</span> {item.createdAt}
|
|
||||||
</div>
|
|
||||||
{item.status === "running" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">ETA:</span>{" "}
|
|
||||||
{item.estimatedCompletion}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "completed" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Completed:</span>{" "}
|
|
||||||
{item.completedAt}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<div className="col-span-2">
|
|
||||||
<span className="font-medium">Error:</span> {item.errorMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{item.retryCount > 0 && (
|
|
||||||
<div className="mt-2 text-xs text-orange-600">
|
|
||||||
Retries: {item.retryCount}/{item.maxRetries}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex gap-2 ml-4">
|
|
||||||
{item.status === "running" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Pause className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
{item.status === "paused" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Play className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
const renderAgentQueueItem = (item) => (
|
|
||||||
<div
|
|
||||||
key={item.id}
|
|
||||||
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
|
|
||||||
>
|
|
||||||
<div className="flex items-start justify-between">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="flex items-center gap-3 mb-2">
|
|
||||||
{getStatusIcon(item.status)}
|
|
||||||
<h3 className="font-medium text-gray-900 dark:text-white">
|
|
||||||
{item.hostname}
|
|
||||||
</h3>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
|
|
||||||
>
|
|
||||||
{item.status}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
|
|
||||||
>
|
|
||||||
{item.priority}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-2">
|
|
||||||
{item.type}
|
|
||||||
</p>
|
|
||||||
<p className="text-sm text-gray-500 mb-3">{item.description}</p>
|
|
||||||
|
|
||||||
{item.type === "Agent Update Collection" && (
|
|
||||||
<div className="mb-3 p-2 bg-gray-50 dark:bg-gray-700 rounded">
|
|
||||||
<div className="text-xs text-gray-600 dark:text-gray-400">
|
|
||||||
<span className="font-medium">Version:</span>{" "}
|
|
||||||
{item.currentVersion} → {item.targetVersion}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">IP:</span> {item.ip}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Last Comm:</span>{" "}
|
|
||||||
{item.lastCommunication}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Next Expected:</span>{" "}
|
|
||||||
{item.nextExpectedCommunication}
|
|
||||||
</div>
|
|
||||||
{item.status === "completed" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Completed:</span>{" "}
|
|
||||||
{item.completedAt}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<div className="col-span-2">
|
|
||||||
<span className="font-medium">Error:</span> {item.errorMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{item.retryCount > 0 && (
|
|
||||||
<div className="mt-2 text-xs text-orange-600">
|
|
||||||
Retries: {item.retryCount}/{item.maxRetries}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex gap-2 ml-4">
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
const renderPatchQueueItem = (item) => (
|
|
||||||
<div
|
|
||||||
key={item.id}
|
|
||||||
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
|
|
||||||
>
|
|
||||||
<div className="flex items-start justify-between">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="flex items-center gap-3 mb-2">
|
|
||||||
{getStatusIcon(item.status)}
|
|
||||||
<h3 className="font-medium text-gray-900 dark:text-white">
|
|
||||||
{item.hostname}
|
|
||||||
</h3>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
|
|
||||||
>
|
|
||||||
{item.status}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
|
|
||||||
>
|
|
||||||
{item.priority}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-2">
|
|
||||||
{item.type}
|
|
||||||
</p>
|
|
||||||
<p className="text-sm text-gray-500 mb-3">{item.description}</p>
|
|
||||||
|
|
||||||
<div className="mb-3">
|
|
||||||
<div className="text-xs text-gray-600 dark:text-gray-400 mb-1">
|
|
||||||
<span className="font-medium">Packages:</span>
|
|
||||||
</div>
|
|
||||||
<div className="flex flex-wrap gap-1">
|
|
||||||
{item.packages.map((pkg) => (
|
|
||||||
<span
|
|
||||||
key={pkg}
|
|
||||||
className="px-2 py-1 bg-blue-100 text-blue-800 text-xs rounded"
|
|
||||||
>
|
|
||||||
{pkg}
|
|
||||||
</span>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">IP:</span> {item.ip}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Scheduled:</span>{" "}
|
|
||||||
{item.scheduledFor}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Last Comm:</span>{" "}
|
|
||||||
{item.lastCommunication}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Next Expected:</span>{" "}
|
|
||||||
{item.nextExpectedCommunication}
|
|
||||||
</div>
|
|
||||||
{item.status === "completed" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Completed:</span>{" "}
|
|
||||||
{item.completedAt}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<div className="col-span-2">
|
|
||||||
<span className="font-medium">Error:</span> {item.errorMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{item.retryCount > 0 && (
|
|
||||||
<div className="mt-2 text-xs text-orange-600">
|
|
||||||
Retries: {item.retryCount}/{item.maxRetries}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex gap-2 ml-4">
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
const currentTab = tabs.find((tab) => tab.id === activeTab);
|
|
||||||
const filteredItems = filteredData(currentTab?.data || []);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="min-h-screen bg-gray-50 dark:bg-gray-900">
|
|
||||||
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
|
||||||
{/* Header */}
|
|
||||||
<div className="mb-8">
|
|
||||||
<h1 className="text-3xl font-bold text-gray-900 dark:text-white mb-2">
|
|
||||||
Queue Management
|
|
||||||
</h1>
|
|
||||||
<p className="text-gray-600 dark:text-gray-400">
|
|
||||||
Monitor and manage server operations, agent communications, and
|
|
||||||
patch deployments
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Tabs */}
|
|
||||||
<div className="mb-6">
|
|
||||||
<div className="border-b border-gray-200 dark:border-gray-700">
|
|
||||||
<nav className="-mb-px flex space-x-8">
|
|
||||||
{tabs.map((tab) => (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
key={tab.id}
|
|
||||||
onClick={() => setActiveTab(tab.id)}
|
|
||||||
className={`py-2 px-1 border-b-2 font-medium text-sm flex items-center gap-2 ${
|
|
||||||
activeTab === tab.id
|
|
||||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
|
||||||
: "border-transparent text-gray-500 hover:text-gray-700 hover:border-gray-300 dark:text-gray-400 dark:hover:text-gray-300"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
<tab.icon className="h-4 w-4" />
|
|
||||||
{tab.name}
|
|
||||||
<span className="bg-gray-100 dark:bg-gray-700 text-gray-600 dark:text-gray-300 px-2 py-0.5 rounded-full text-xs">
|
|
||||||
{tab.count}
|
|
||||||
</span>
|
|
||||||
</button>
|
|
||||||
))}
|
|
||||||
</nav>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Filters and Search */}
|
|
||||||
<div className="mb-6 flex flex-col sm:flex-row gap-4">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="relative">
|
|
||||||
<Search className="absolute left-3 top-1/2 transform -translate-y-1/2 text-gray-400 h-4 w-4" />
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
placeholder="Search queues..."
|
|
||||||
value={searchQuery}
|
|
||||||
onChange={(e) => setSearchQuery(e.target.value)}
|
|
||||||
className="w-full pl-10 pr-4 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<select
|
|
||||||
value={filterStatus}
|
|
||||||
onChange={(e) => setFilterStatus(e.target.value)}
|
|
||||||
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
|
||||||
>
|
|
||||||
<option value="all">All Status</option>
|
|
||||||
<option value="pending">Pending</option>
|
|
||||||
<option value="running">Running</option>
|
|
||||||
<option value="completed">Completed</option>
|
|
||||||
<option value="failed">Failed</option>
|
|
||||||
<option value="paused">Paused</option>
|
|
||||||
</select>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white hover:bg-gray-50 dark:hover:bg-gray-700 flex items-center gap-2"
|
|
||||||
>
|
|
||||||
<Filter className="h-4 w-4" />
|
|
||||||
More Filters
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Queue Items */}
|
|
||||||
<div className="space-y-4">
|
|
||||||
{filteredItems.length === 0 ? (
|
|
||||||
<div className="text-center py-12">
|
|
||||||
<Activity className="mx-auto h-12 w-12 text-gray-400" />
|
|
||||||
<h3 className="mt-2 text-sm font-medium text-gray-900 dark:text-white">
|
|
||||||
No queue items found
|
|
||||||
</h3>
|
|
||||||
<p className="mt-1 text-sm text-gray-500 dark:text-gray-400">
|
|
||||||
{searchQuery
|
|
||||||
? "Try adjusting your search criteria"
|
|
||||||
: "No items match the current filters"}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
filteredItems.map((item) => {
|
|
||||||
switch (activeTab) {
|
|
||||||
case "server":
|
|
||||||
return renderServerQueueItem(item);
|
|
||||||
case "agent":
|
|
||||||
return renderAgentQueueItem(item);
|
|
||||||
case "patch":
|
|
||||||
return renderPatchQueueItem(item);
|
|
||||||
default:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default Queue;
|
|
||||||
@@ -120,7 +120,7 @@ const Settings = () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Helper function to get curl flags based on settings
|
// Helper function to get curl flags based on settings
|
||||||
const getCurlFlags = () => {
|
const _getCurlFlags = () => {
|
||||||
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1155,28 +1155,39 @@ const Settings = () => {
|
|||||||
Agent Uninstall Command
|
Agent Uninstall Command
|
||||||
</h3>
|
</h3>
|
||||||
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
|
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
<p className="mb-2">
|
<p className="mb-3">
|
||||||
To completely remove PatchMon from a host:
|
To completely remove PatchMon from a host:
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
{/* Go Agent Uninstall */}
|
||||||
|
<div className="mb-3">
|
||||||
|
<div className="space-y-2">
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
|
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
|
||||||
curl {getCurlFlags()} {window.location.origin}
|
sudo patchmon-agent uninstall
|
||||||
/api/v1/hosts/remove | sudo bash
|
|
||||||
</div>
|
</div>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
const command = `curl ${getCurlFlags()} ${window.location.origin}/api/v1/hosts/remove | sudo bash`;
|
navigator.clipboard.writeText(
|
||||||
navigator.clipboard.writeText(command);
|
"sudo patchmon-agent uninstall",
|
||||||
// You could add a toast notification here
|
);
|
||||||
}}
|
}}
|
||||||
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
|
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
|
||||||
>
|
>
|
||||||
Copy
|
Copy
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
<div className="text-xs text-red-600 dark:text-red-400">
|
||||||
|
Options: <code>--remove-config</code>,{" "}
|
||||||
|
<code>--remove-logs</code>,{" "}
|
||||||
|
<code>--remove-all</code>, <code>--force</code>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<p className="mt-2 text-xs">
|
<p className="mt-2 text-xs">
|
||||||
⚠️ This will remove all PatchMon files,
|
⚠️ This command will remove all PatchMon files,
|
||||||
configuration, and crontab entries
|
configuration, and crontab entries
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
389
frontend/src/pages/docker/ContainerDetail.jsx
Normal file
389
frontend/src/pages/docker/ContainerDetail.jsx
Normal file
@@ -0,0 +1,389 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
CheckCircle,
|
||||||
|
Container,
|
||||||
|
ExternalLink,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api, { formatRelativeTime } from "../../utils/api";
|
||||||
|
|
||||||
|
const ContainerDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "container", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/containers/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const container = data?.container;
|
||||||
|
const similarContainers = data?.similarContainers || [];
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !container) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Container not found
|
||||||
|
</h3>
|
||||||
|
<p className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
|
The container you're looking for doesn't exist or has been
|
||||||
|
removed.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const getStatusBadge = (status) => {
|
||||||
|
const statusClasses = {
|
||||||
|
running:
|
||||||
|
"bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200",
|
||||||
|
exited: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200",
|
||||||
|
paused:
|
||||||
|
"bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200",
|
||||||
|
restarting:
|
||||||
|
"bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200",
|
||||||
|
};
|
||||||
|
return (
|
||||||
|
<span
|
||||||
|
className={`inline-flex items-center px-3 py-1 rounded-full text-sm font-medium ${
|
||||||
|
statusClasses[status] ||
|
||||||
|
"bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{status}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Container className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{container.name}
|
||||||
|
</h1>
|
||||||
|
{getStatusBadge(container.status)}
|
||||||
|
</div>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
Container ID: {container.container_id.substring(0, 12)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
{/* Update Status Card */}
|
||||||
|
{container.docker_images?.docker_image_updates &&
|
||||||
|
container.docker_images.docker_image_updates.length > 0 ? (
|
||||||
|
<div className="card p-4 bg-yellow-50 dark:bg-yellow-900/20 border-yellow-200 dark:border-yellow-800">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-yellow-600 dark:text-yellow-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-yellow-200">
|
||||||
|
Update Available
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-yellow-100 truncate">
|
||||||
|
{
|
||||||
|
container.docker_images.docker_image_updates[0]
|
||||||
|
.available_tag
|
||||||
|
}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="card p-4 bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<CheckCircle className="h-5 w-5 text-green-600 dark:text-green-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-green-200">
|
||||||
|
Update Status
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-green-100">
|
||||||
|
Up to date
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Server className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">Host</p>
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${container.host?.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 truncate block"
|
||||||
|
>
|
||||||
|
{container.host?.friendly_name || container.host?.hostname}
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
State
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{container.state || container.status}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<RefreshCw className="h-5 w-5 text-secondary-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Last Checked
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.last_checked)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Container and Image Information - Side by Side */}
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{/* Container Details */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Container Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Container ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono break-all">
|
||||||
|
{container.container_id}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Image Tag
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{container.image_tag}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.created_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{container.started_at && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Started
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.started_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{container.ports && Object.keys(container.ports).length > 0 && (
|
||||||
|
<div className="sm:col-span-2">
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Port Mappings
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
{Object.entries(container.ports).map(([key, value]) => (
|
||||||
|
<span
|
||||||
|
key={key}
|
||||||
|
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200"
|
||||||
|
>
|
||||||
|
{key} → {value}
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Image Information */}
|
||||||
|
{container.docker_images && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Image Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Repository
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
<Link
|
||||||
|
to={`/docker/images/${container.docker_images.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
{container.docker_images.repository}
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Tag
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{container.docker_images.tag}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Source
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{container.docker_images.source}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{container.docker_images.size_bytes && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Size
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{(
|
||||||
|
Number(container.docker_images.size_bytes) /
|
||||||
|
1024 /
|
||||||
|
1024
|
||||||
|
).toFixed(2)}{" "}
|
||||||
|
MB
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Image ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-xs text-secondary-900 dark:text-white font-mono break-all">
|
||||||
|
{container.docker_images.image_id?.substring(0, 12)}...
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.docker_images.created_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Similar Containers */}
|
||||||
|
{similarContainers.length > 0 && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Similar Containers (Same Image)
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<ul className="divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{similarContainers.map((similar) => (
|
||||||
|
<li
|
||||||
|
key={similar.id}
|
||||||
|
className="py-4 flex items-center justify-between"
|
||||||
|
>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Container className="h-5 w-5 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${similar.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{similar.name}
|
||||||
|
</Link>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-secondary-400">
|
||||||
|
{similar.status}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ContainerDetail;
|
||||||
354
frontend/src/pages/docker/HostDetail.jsx
Normal file
354
frontend/src/pages/docker/HostDetail.jsx
Normal file
@@ -0,0 +1,354 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
Container,
|
||||||
|
ExternalLink,
|
||||||
|
Package,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api from "../../utils/api";
|
||||||
|
|
||||||
|
const HostDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "host", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/hosts/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const host = data?.host;
|
||||||
|
const containers = data?.containers || [];
|
||||||
|
const images = data?.images || [];
|
||||||
|
const stats = data?.stats;
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !host) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Host not found
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Server className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{host.friendly_name || host.hostname}
|
||||||
|
</h1>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
{host.ip}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${id}`}
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
View Full Host Details
|
||||||
|
<ExternalLink className="ml-2 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Total Containers
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.totalContainers || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Running
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.runningContainers || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-red-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Stopped
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.stoppedContainers || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Package className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Images
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.totalImages || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Host Information */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Host Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5 space-y-6">
|
||||||
|
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Friendly Name
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.friendly_name}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Hostname
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.hostname}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
IP Address
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.ip}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
OS
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.os_type} {host.os_version}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Containers */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Containers ({containers.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Container Name
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Image
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Status
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{containers.map((container) => (
|
||||||
|
<tr key={container.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{container.name}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{container.image_name}:{container.image_tag}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
{container.status}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Images */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Images ({images.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Repository
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Tag
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Source
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{images.map((image) => (
|
||||||
|
<tr key={image.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/docker/images/${image.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{image.repository}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
{image.tag}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{image.source}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/docker/images/${image.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default HostDetail;
|
||||||
439
frontend/src/pages/docker/ImageDetail.jsx
Normal file
439
frontend/src/pages/docker/ImageDetail.jsx
Normal file
@@ -0,0 +1,439 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
Container,
|
||||||
|
ExternalLink,
|
||||||
|
Package,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
Shield,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api, { formatRelativeTime } from "../../utils/api";
|
||||||
|
|
||||||
|
const ImageDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "image", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/images/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const image = data?.image;
|
||||||
|
const hosts = data?.hosts || [];
|
||||||
|
const containers = image?.docker_containers || [];
|
||||||
|
const updates = image?.docker_image_updates || [];
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !image) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Image not found
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Package className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{image.repository}:{image.tag}
|
||||||
|
</h1>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
Image ID: {image.image_id.substring(0, 12)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Containers
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{containers.length}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Server className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Hosts
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{hosts.length}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Package className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">Size</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{image.size_bytes ? (
|
||||||
|
<>{(Number(image.size_bytes) / 1024 / 1024).toFixed(0)} MB</>
|
||||||
|
) : (
|
||||||
|
"N/A"
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Updates
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{updates.length}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Available Updates with Digest Comparison */}
|
||||||
|
{updates.length > 0 && (
|
||||||
|
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-yellow-400" />
|
||||||
|
<div className="ml-3 flex-1">
|
||||||
|
<h3 className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
|
||||||
|
Updates Available
|
||||||
|
</h3>
|
||||||
|
<div className="mt-2 space-y-3">
|
||||||
|
{updates.map((update) => {
|
||||||
|
let digestInfo = null;
|
||||||
|
try {
|
||||||
|
if (update.changelog_url) {
|
||||||
|
digestInfo = JSON.parse(update.changelog_url);
|
||||||
|
}
|
||||||
|
} catch (_e) {
|
||||||
|
// Ignore parse errors
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={update.id}
|
||||||
|
className="bg-white dark:bg-secondary-800 rounded-lg p-3 border border-yellow-200 dark:border-yellow-700"
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between mb-2">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{update.is_security_update && (
|
||||||
|
<Shield className="h-4 w-4 text-red-500" />
|
||||||
|
)}
|
||||||
|
<span className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
|
||||||
|
New version available:{" "}
|
||||||
|
<span className="font-semibold">
|
||||||
|
{update.available_tag}
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{update.is_security_update && (
|
||||||
|
<span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-medium bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200">
|
||||||
|
Security
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{digestInfo &&
|
||||||
|
digestInfo.method === "digest_comparison" && (
|
||||||
|
<div className="mt-2 pt-2 border-t border-yellow-200 dark:border-yellow-700">
|
||||||
|
<p className="text-xs text-secondary-600 dark:text-secondary-400 mb-1">
|
||||||
|
Detected via digest comparison:
|
||||||
|
</p>
|
||||||
|
<div className="font-mono text-xs space-y-1">
|
||||||
|
<div className="text-red-600 dark:text-red-400">
|
||||||
|
<span className="font-bold">- Current: </span>
|
||||||
|
{digestInfo.current_digest}
|
||||||
|
</div>
|
||||||
|
<div className="text-green-600 dark:text-green-400">
|
||||||
|
<span className="font-bold">+ Available: </span>
|
||||||
|
{digestInfo.available_digest}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Image Information */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Image Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5 space-y-6">
|
||||||
|
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Repository
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.repository}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Tag
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.tag}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Source
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.source}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.created_at
|
||||||
|
? formatRelativeTime(image.created_at)
|
||||||
|
: "Unknown"}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Image ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white">
|
||||||
|
{image.image_id}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Last Checked
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.last_checked
|
||||||
|
? formatRelativeTime(image.last_checked)
|
||||||
|
: "Never"}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{image.digest && (
|
||||||
|
<div className="sm:col-span-2">
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Digest
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white break-all">
|
||||||
|
{image.digest}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Containers using this image */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Containers ({containers.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Container Name
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Status
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Host
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{containers.map((container) => (
|
||||||
|
<tr key={container.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{container.name}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
{container.status}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{container.host_id}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Hosts using this image */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Hosts ({hosts.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Host Name
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
IP Address
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{hosts.map((host) => (
|
||||||
|
<tr key={host.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${host.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{host.friendly_name || host.hostname}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{host.ip}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${host.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ImageDetail;
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -56,11 +56,23 @@ export const dashboardAPI = {
|
|||||||
const url = `/dashboard/hosts/${hostId}${queryString ? `?${queryString}` : ""}`;
|
const url = `/dashboard/hosts/${hostId}${queryString ? `?${queryString}` : ""}`;
|
||||||
return api.get(url);
|
return api.get(url);
|
||||||
},
|
},
|
||||||
|
getHostQueue: (hostId, params = {}) => {
|
||||||
|
const queryString = new URLSearchParams(params).toString();
|
||||||
|
const url = `/dashboard/hosts/${hostId}/queue${queryString ? `?${queryString}` : ""}`;
|
||||||
|
return api.get(url);
|
||||||
|
},
|
||||||
|
getHostWsStatus: (hostId) => api.get(`/dashboard/hosts/${hostId}/ws-status`),
|
||||||
|
getWsStatusByApiId: (apiId) => api.get(`/ws/status/${apiId}`),
|
||||||
getPackageTrends: (params = {}) => {
|
getPackageTrends: (params = {}) => {
|
||||||
const queryString = new URLSearchParams(params).toString();
|
const queryString = new URLSearchParams(params).toString();
|
||||||
const url = `/dashboard/package-trends${queryString ? `?${queryString}` : ""}`;
|
const url = `/dashboard/package-trends${queryString ? `?${queryString}` : ""}`;
|
||||||
return api.get(url);
|
return api.get(url);
|
||||||
},
|
},
|
||||||
|
getPackageSpikeAnalysis: (params = {}) => {
|
||||||
|
const queryString = new URLSearchParams(params).toString();
|
||||||
|
const url = `/dashboard/package-spike-analysis${queryString ? `?${queryString}` : ""}`;
|
||||||
|
return api.get(url);
|
||||||
|
},
|
||||||
getRecentUsers: () => api.get("/dashboard/recent-users"),
|
getRecentUsers: () => api.get("/dashboard/recent-users"),
|
||||||
getRecentCollection: () => api.get("/dashboard/recent-collection"),
|
getRecentCollection: () => api.get("/dashboard/recent-collection"),
|
||||||
};
|
};
|
||||||
@@ -75,8 +87,12 @@ export const adminHostsAPI = {
|
|||||||
api.post(`/hosts/${hostId}/regenerate-credentials`),
|
api.post(`/hosts/${hostId}/regenerate-credentials`),
|
||||||
updateGroup: (hostId, hostGroupId) =>
|
updateGroup: (hostId, hostGroupId) =>
|
||||||
api.put(`/hosts/${hostId}/group`, { hostGroupId }),
|
api.put(`/hosts/${hostId}/group`, { hostGroupId }),
|
||||||
|
updateGroups: (hostId, groupIds) =>
|
||||||
|
api.put(`/hosts/${hostId}/groups`, { groupIds }),
|
||||||
bulkUpdateGroup: (hostIds, hostGroupId) =>
|
bulkUpdateGroup: (hostIds, hostGroupId) =>
|
||||||
api.put("/hosts/bulk/group", { hostIds, hostGroupId }),
|
api.put("/hosts/bulk/group", { hostIds, hostGroupId }),
|
||||||
|
bulkUpdateGroups: (hostIds, groupIds) =>
|
||||||
|
api.put("/hosts/bulk/groups", { hostIds, groupIds }),
|
||||||
toggleAutoUpdate: (hostId, autoUpdate) =>
|
toggleAutoUpdate: (hostId, autoUpdate) =>
|
||||||
api.patch(`/hosts/${hostId}/auto-update`, { auto_update: autoUpdate }),
|
api.patch(`/hosts/${hostId}/auto-update`, { auto_update: autoUpdate }),
|
||||||
updateFriendlyName: (hostId, friendlyName) =>
|
updateFriendlyName: (hostId, friendlyName) =>
|
||||||
|
|||||||
@@ -1,43 +1,104 @@
|
|||||||
import { Monitor, Server } from "lucide-react";
|
import { Monitor, Server } from "lucide-react";
|
||||||
import { DiWindows } from "react-icons/di";
|
import { DiWindows } from "react-icons/di";
|
||||||
// Import OS icons from react-icons
|
// Import OS icons from react-icons Simple Icons - using only confirmed available icons
|
||||||
import {
|
import {
|
||||||
|
SiAlmalinux,
|
||||||
SiAlpinelinux,
|
SiAlpinelinux,
|
||||||
SiArchlinux,
|
SiArchlinux,
|
||||||
SiCentos,
|
SiCentos,
|
||||||
SiDebian,
|
SiDebian,
|
||||||
|
SiDeepin,
|
||||||
|
SiElementary,
|
||||||
SiFedora,
|
SiFedora,
|
||||||
|
SiGentoo,
|
||||||
|
SiKalilinux,
|
||||||
SiLinux,
|
SiLinux,
|
||||||
|
SiLinuxmint,
|
||||||
SiMacos,
|
SiMacos,
|
||||||
|
SiManjaro,
|
||||||
|
SiOpensuse,
|
||||||
|
SiOracle,
|
||||||
|
SiParrotsecurity,
|
||||||
|
SiPopos,
|
||||||
|
SiRedhat,
|
||||||
|
SiRockylinux,
|
||||||
|
SiSlackware,
|
||||||
|
SiSolus,
|
||||||
|
SiSuse,
|
||||||
|
SiTails,
|
||||||
SiUbuntu,
|
SiUbuntu,
|
||||||
|
SiZorin,
|
||||||
} from "react-icons/si";
|
} from "react-icons/si";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* OS Icon mapping utility
|
* OS Icon mapping utility
|
||||||
* Maps operating system types to appropriate react-icons components
|
* Maps operating system types to appropriate react-icons components
|
||||||
|
* Now uses specific icons based on actual OS names from /etc/os-release
|
||||||
*/
|
*/
|
||||||
export const getOSIcon = (osType) => {
|
export const getOSIcon = (osType) => {
|
||||||
if (!osType) return Monitor;
|
if (!osType) return Monitor;
|
||||||
|
|
||||||
const os = osType.toLowerCase();
|
const os = osType.toLowerCase();
|
||||||
|
|
||||||
// Linux distributions with authentic react-icons
|
// Ubuntu and Ubuntu variants
|
||||||
if (os.includes("ubuntu")) return SiUbuntu;
|
if (os.includes("ubuntu")) {
|
||||||
|
// For Ubuntu variants, use generic Ubuntu icon as fallback
|
||||||
|
return SiUbuntu;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pop!_OS
|
||||||
|
if (os.includes("pop") || os.includes("pop!_os")) return SiPopos;
|
||||||
|
|
||||||
|
// Linux Mint
|
||||||
|
if (os.includes("mint") || os.includes("linuxmint")) return SiLinuxmint;
|
||||||
|
|
||||||
|
// Elementary OS
|
||||||
|
if (os.includes("elementary")) return SiElementary;
|
||||||
|
|
||||||
|
// Debian
|
||||||
if (os.includes("debian")) return SiDebian;
|
if (os.includes("debian")) return SiDebian;
|
||||||
if (
|
|
||||||
os.includes("centos") ||
|
// Rocky Linux
|
||||||
os.includes("rhel") ||
|
if (os.includes("rocky")) return SiRockylinux;
|
||||||
os.includes("red hat") ||
|
|
||||||
os.includes("almalinux") ||
|
// AlmaLinux
|
||||||
os.includes("rocky")
|
if (os.includes("alma") || os.includes("almalinux")) return SiAlmalinux;
|
||||||
)
|
|
||||||
return SiCentos;
|
// CentOS
|
||||||
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
|
if (os.includes("centos")) return SiCentos;
|
||||||
return SiLinux; // Use generic Linux icon for Oracle Linux
|
|
||||||
|
// Red Hat Enterprise Linux
|
||||||
|
if (os.includes("rhel") || os.includes("red hat")) return SiRedhat;
|
||||||
|
|
||||||
|
// Fedora
|
||||||
if (os.includes("fedora")) return SiFedora;
|
if (os.includes("fedora")) return SiFedora;
|
||||||
|
|
||||||
|
// Oracle Linux
|
||||||
|
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
|
||||||
|
return SiOracle;
|
||||||
|
|
||||||
|
// SUSE distributions
|
||||||
|
if (os.includes("opensuse")) return SiOpensuse;
|
||||||
|
if (os.includes("suse")) return SiSuse;
|
||||||
|
|
||||||
|
// Arch-based distributions
|
||||||
if (os.includes("arch")) return SiArchlinux;
|
if (os.includes("arch")) return SiArchlinux;
|
||||||
|
if (os.includes("manjaro")) return SiManjaro;
|
||||||
|
if (os.includes("endeavour") || os.includes("endeavouros"))
|
||||||
|
return SiArchlinux; // Fallback to Arch
|
||||||
|
if (os.includes("garuda")) return SiArchlinux; // Fallback to Arch
|
||||||
|
if (os.includes("blackarch")) return SiArchlinux; // Fallback to Arch
|
||||||
|
|
||||||
|
// Other distributions
|
||||||
if (os.includes("alpine")) return SiAlpinelinux;
|
if (os.includes("alpine")) return SiAlpinelinux;
|
||||||
if (os.includes("suse") || os.includes("opensuse")) return SiLinux; // SUSE uses generic Linux icon
|
if (os.includes("gentoo")) return SiGentoo;
|
||||||
|
if (os.includes("slackware")) return SiSlackware;
|
||||||
|
if (os.includes("zorin")) return SiZorin;
|
||||||
|
if (os.includes("deepin")) return SiDeepin;
|
||||||
|
if (os.includes("solus")) return SiSolus;
|
||||||
|
if (os.includes("tails")) return SiTails;
|
||||||
|
if (os.includes("parrot")) return SiParrotsecurity;
|
||||||
|
if (os.includes("kali")) return SiKalilinux;
|
||||||
|
|
||||||
// Generic Linux
|
// Generic Linux
|
||||||
if (os.includes("linux")) return SiLinux;
|
if (os.includes("linux")) return SiLinux;
|
||||||
@@ -70,27 +131,83 @@ export const getOSColor = (osType) => {
|
|||||||
/**
|
/**
|
||||||
* OS Display name utility
|
* OS Display name utility
|
||||||
* Provides clean, formatted OS names for display
|
* Provides clean, formatted OS names for display
|
||||||
|
* Updated to handle more distributions from /etc/os-release
|
||||||
*/
|
*/
|
||||||
export const getOSDisplayName = (osType) => {
|
export const getOSDisplayName = (osType) => {
|
||||||
if (!osType) return "Unknown";
|
if (!osType) return "Unknown";
|
||||||
|
|
||||||
const os = osType.toLowerCase();
|
const os = osType.toLowerCase();
|
||||||
|
|
||||||
// Linux distributions
|
// Ubuntu and variants
|
||||||
if (os.includes("ubuntu")) return "Ubuntu";
|
if (os.includes("ubuntu")) {
|
||||||
|
if (os.includes("kubuntu")) return "Kubuntu";
|
||||||
|
if (os.includes("lubuntu")) return "Lubuntu";
|
||||||
|
if (os.includes("xubuntu")) return "Xubuntu";
|
||||||
|
if (os.includes("ubuntu mate") || os.includes("ubuntumate"))
|
||||||
|
return "Ubuntu MATE";
|
||||||
|
if (os.includes("ubuntu budgie") || os.includes("ubuntubudgie"))
|
||||||
|
return "Ubuntu Budgie";
|
||||||
|
if (os.includes("ubuntu studio") || os.includes("ubuntustudio"))
|
||||||
|
return "Ubuntu Studio";
|
||||||
|
if (os.includes("ubuntu kylin") || os.includes("ubuntukylin"))
|
||||||
|
return "Ubuntu Kylin";
|
||||||
|
return "Ubuntu";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pop!_OS
|
||||||
|
if (os.includes("pop") || os.includes("pop!_os")) return "Pop!_OS";
|
||||||
|
|
||||||
|
// Linux Mint
|
||||||
|
if (os.includes("mint") || os.includes("linuxmint")) return "Linux Mint";
|
||||||
|
|
||||||
|
// Elementary OS
|
||||||
|
if (os.includes("elementary")) return "Elementary OS";
|
||||||
|
|
||||||
|
// Debian
|
||||||
if (os.includes("debian")) return "Debian";
|
if (os.includes("debian")) return "Debian";
|
||||||
if (os.includes("centos")) return "CentOS";
|
|
||||||
if (os.includes("almalinux")) return "AlmaLinux";
|
// Rocky Linux
|
||||||
if (os.includes("rocky")) return "Rocky Linux";
|
if (os.includes("rocky")) return "Rocky Linux";
|
||||||
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
|
|
||||||
return "Oracle Linux";
|
// AlmaLinux
|
||||||
|
if (os.includes("alma") || os.includes("almalinux")) return "AlmaLinux";
|
||||||
|
|
||||||
|
// CentOS
|
||||||
|
if (os.includes("centos")) return "CentOS";
|
||||||
|
|
||||||
|
// Red Hat Enterprise Linux
|
||||||
if (os.includes("rhel") || os.includes("red hat"))
|
if (os.includes("rhel") || os.includes("red hat"))
|
||||||
return "Red Hat Enterprise Linux";
|
return "Red Hat Enterprise Linux";
|
||||||
|
|
||||||
|
// Fedora
|
||||||
if (os.includes("fedora")) return "Fedora";
|
if (os.includes("fedora")) return "Fedora";
|
||||||
if (os.includes("arch")) return "Arch Linux";
|
|
||||||
if (os.includes("suse")) return "SUSE Linux";
|
// Oracle Linux
|
||||||
|
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
|
||||||
|
return "Oracle Linux";
|
||||||
|
|
||||||
|
// SUSE distributions
|
||||||
if (os.includes("opensuse")) return "openSUSE";
|
if (os.includes("opensuse")) return "openSUSE";
|
||||||
|
if (os.includes("suse")) return "SUSE Linux";
|
||||||
|
|
||||||
|
// Arch-based distributions
|
||||||
|
if (os.includes("arch")) return "Arch Linux";
|
||||||
|
if (os.includes("manjaro")) return "Manjaro";
|
||||||
|
if (os.includes("endeavour") || os.includes("endeavouros"))
|
||||||
|
return "EndeavourOS";
|
||||||
|
if (os.includes("garuda")) return "Garuda Linux";
|
||||||
|
if (os.includes("blackarch")) return "BlackArch Linux";
|
||||||
|
|
||||||
|
// Other distributions
|
||||||
if (os.includes("alpine")) return "Alpine Linux";
|
if (os.includes("alpine")) return "Alpine Linux";
|
||||||
|
if (os.includes("gentoo")) return "Gentoo";
|
||||||
|
if (os.includes("slackware")) return "Slackware";
|
||||||
|
if (os.includes("zorin")) return "Zorin OS";
|
||||||
|
if (os.includes("deepin")) return "Deepin";
|
||||||
|
if (os.includes("solus")) return "Solus";
|
||||||
|
if (os.includes("tails")) return "Tails";
|
||||||
|
if (os.includes("parrot")) return "Parrot Security";
|
||||||
|
if (os.includes("kali")) return "Kali Linux";
|
||||||
|
|
||||||
// Generic Linux
|
// Generic Linux
|
||||||
if (os.includes("linux")) return "Linux";
|
if (os.includes("linux")) return "Linux";
|
||||||
|
|||||||
@@ -37,6 +37,11 @@ export default defineConfig({
|
|||||||
}
|
}
|
||||||
: undefined,
|
: undefined,
|
||||||
},
|
},
|
||||||
|
"/admin": {
|
||||||
|
target: `http://${process.env.BACKEND_HOST || "localhost"}:${process.env.BACKEND_PORT || "3001"}`,
|
||||||
|
changeOrigin: true,
|
||||||
|
secure: false,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
build: {
|
build: {
|
||||||
|
|||||||
952
package-lock.json
generated
952
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon",
|
"name": "patchmon",
|
||||||
"version": "1.2.7",
|
"version": "1.2.9",
|
||||||
"description": "Linux Patch Monitoring System",
|
"description": "Linux Patch Monitoring System",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
|
|||||||
374
setup.sh
374
setup.sh
@@ -34,7 +34,7 @@ BLUE='\033[0;34m'
|
|||||||
NC='\033[0m' # No Color
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
# Global variables
|
# Global variables
|
||||||
SCRIPT_VERSION="self-hosting-install.sh v1.2.7-selfhost-2025-01-20-1"
|
SCRIPT_VERSION="self-hosting-install.sh v1.2.9-selfhost-2025-10-11-1"
|
||||||
DEFAULT_GITHUB_REPO="https://github.com/PatchMon/PatchMon.git"
|
DEFAULT_GITHUB_REPO="https://github.com/PatchMon/PatchMon.git"
|
||||||
FQDN=""
|
FQDN=""
|
||||||
CUSTOM_FQDN=""
|
CUSTOM_FQDN=""
|
||||||
@@ -60,6 +60,9 @@ SERVICE_USE_LETSENCRYPT="true" # Will be set based on user input
|
|||||||
SERVER_PROTOCOL_SEL="https"
|
SERVER_PROTOCOL_SEL="https"
|
||||||
SERVER_PORT_SEL="" # Will be set to BACKEND_PORT in init_instance_vars
|
SERVER_PORT_SEL="" # Will be set to BACKEND_PORT in init_instance_vars
|
||||||
SETUP_NGINX="true"
|
SETUP_NGINX="true"
|
||||||
|
UPDATE_MODE="false"
|
||||||
|
SELECTED_INSTANCE=""
|
||||||
|
SELECTED_SERVICE_NAME=""
|
||||||
|
|
||||||
# Functions
|
# Functions
|
||||||
print_status() {
|
print_status() {
|
||||||
@@ -642,31 +645,61 @@ EOF
|
|||||||
|
|
||||||
# Setup database for instance
|
# Setup database for instance
|
||||||
setup_database() {
|
setup_database() {
|
||||||
print_info "Creating database: $DB_NAME"
|
print_info "Setting up database: $DB_NAME"
|
||||||
|
|
||||||
# Check if sudo is available for user switching
|
# Check if sudo is available for user switching
|
||||||
if command -v sudo >/dev/null 2>&1; then
|
if command -v sudo >/dev/null 2>&1; then
|
||||||
# Drop and recreate database and user for clean state
|
# Check if user exists
|
||||||
sudo -u postgres psql -c "DROP DATABASE IF EXISTS $DB_NAME;" || true
|
user_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'" || echo "0")
|
||||||
sudo -u postgres psql -c "DROP USER IF EXISTS $DB_USER;" || true
|
|
||||||
|
|
||||||
# Create database and user
|
if [ "$user_exists" = "1" ]; then
|
||||||
|
print_info "Database user $DB_USER already exists, skipping creation"
|
||||||
|
else
|
||||||
|
print_info "Creating database user $DB_USER"
|
||||||
sudo -u postgres psql -c "CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';"
|
sudo -u postgres psql -c "CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if database exists
|
||||||
|
db_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" || echo "0")
|
||||||
|
|
||||||
|
if [ "$db_exists" = "1" ]; then
|
||||||
|
print_info "Database $DB_NAME already exists, skipping creation"
|
||||||
|
else
|
||||||
|
print_info "Creating database $DB_NAME"
|
||||||
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;"
|
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Always grant privileges (in case they were revoked)
|
||||||
sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;"
|
sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;"
|
||||||
else
|
else
|
||||||
# Alternative method for systems without sudo (run as postgres user directly)
|
# Alternative method for systems without sudo (run as postgres user directly)
|
||||||
print_warning "sudo not available, using alternative method for PostgreSQL setup"
|
print_warning "sudo not available, using alternative method for PostgreSQL setup"
|
||||||
|
|
||||||
# Switch to postgres user using su
|
# Check if user exists
|
||||||
su - postgres -c "psql -c \"DROP DATABASE IF EXISTS $DB_NAME;\"" || true
|
user_exists=$(su - postgres -c "psql -tAc \"SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'\"" || echo "0")
|
||||||
su - postgres -c "psql -c \"DROP USER IF EXISTS $DB_USER;\"" || true
|
|
||||||
|
if [ "$user_exists" = "1" ]; then
|
||||||
|
print_info "Database user $DB_USER already exists, skipping creation"
|
||||||
|
else
|
||||||
|
print_info "Creating database user $DB_USER"
|
||||||
su - postgres -c "psql -c \"CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';\""
|
su - postgres -c "psql -c \"CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if database exists
|
||||||
|
db_exists=$(su - postgres -c "psql -tAc \"SELECT 1 FROM pg_database WHERE datname='$DB_NAME'\"" || echo "0")
|
||||||
|
|
||||||
|
if [ "$db_exists" = "1" ]; then
|
||||||
|
print_info "Database $DB_NAME already exists, skipping creation"
|
||||||
|
else
|
||||||
|
print_info "Creating database $DB_NAME"
|
||||||
su - postgres -c "psql -c \"CREATE DATABASE $DB_NAME OWNER $DB_USER;\""
|
su - postgres -c "psql -c \"CREATE DATABASE $DB_NAME OWNER $DB_USER;\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Always grant privileges (in case they were revoked)
|
||||||
su - postgres -c "psql -c \"GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;\""
|
su - postgres -c "psql -c \"GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;\""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
print_status "Database $DB_NAME created with user $DB_USER"
|
print_status "Database setup complete for $DB_NAME"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Clone application repository
|
# Clone application repository
|
||||||
@@ -834,7 +867,7 @@ EOF
|
|||||||
cat > frontend/.env << EOF
|
cat > frontend/.env << EOF
|
||||||
VITE_API_URL=$SERVER_PROTOCOL_SEL://$FQDN/api/v1
|
VITE_API_URL=$SERVER_PROTOCOL_SEL://$FQDN/api/v1
|
||||||
VITE_APP_NAME=PatchMon
|
VITE_APP_NAME=PatchMon
|
||||||
VITE_APP_VERSION=1.2.7
|
VITE_APP_VERSION=1.2.9
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
print_status "Environment files created"
|
print_status "Environment files created"
|
||||||
@@ -1206,7 +1239,7 @@ create_agent_version() {
|
|||||||
|
|
||||||
# Priority 2: Use fallback version if not found
|
# Priority 2: Use fallback version if not found
|
||||||
if [ "$current_version" = "N/A" ] || [ -z "$current_version" ]; then
|
if [ "$current_version" = "N/A" ] || [ -z "$current_version" ]; then
|
||||||
current_version="1.2.7"
|
current_version="1.2.9"
|
||||||
print_warning "Could not determine version, using fallback: $current_version"
|
print_warning "Could not determine version, using fallback: $current_version"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -1550,11 +1583,295 @@ deploy_instance() {
|
|||||||
:
|
:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Detect existing PatchMon installations
|
||||||
|
detect_installations() {
|
||||||
|
local installations=()
|
||||||
|
|
||||||
|
# Find all directories in /opt that contain PatchMon installations
|
||||||
|
if [ -d "/opt" ]; then
|
||||||
|
for dir in /opt/*/; do
|
||||||
|
local dirname=$(basename "$dir")
|
||||||
|
# Skip backup directories
|
||||||
|
if [[ "$dirname" =~ \.backup\. ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
# Check if it's a PatchMon installation
|
||||||
|
if [ -f "$dir/backend/package.json" ] && grep -q "patchmon" "$dir/backend/package.json" 2>/dev/null; then
|
||||||
|
installations+=("$dirname")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "${installations[@]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Select installation to update
|
||||||
|
select_installation_to_update() {
|
||||||
|
local installations=($(detect_installations))
|
||||||
|
|
||||||
|
if [ ${#installations[@]} -eq 0 ]; then
|
||||||
|
print_error "No existing PatchMon installations found in /opt"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Found ${#installations[@]} existing installation(s):"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
local i=1
|
||||||
|
declare -A install_map
|
||||||
|
for install in "${installations[@]}"; do
|
||||||
|
# Get current version if possible
|
||||||
|
local version="unknown"
|
||||||
|
if [ -f "/opt/$install/backend/package.json" ]; then
|
||||||
|
version=$(grep '"version"' "/opt/$install/backend/package.json" | head -1 | sed 's/.*"version": "\([^"]*\)".*/\1/')
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get service status - try multiple naming conventions
|
||||||
|
# Convention 1: Just the install name (e.g., patchmon.internal)
|
||||||
|
local service_name="$install"
|
||||||
|
# Convention 2: patchmon. prefix (e.g., patchmon.patchmon.internal)
|
||||||
|
local alt_service_name1="patchmon.$install"
|
||||||
|
# Convention 3: patchmon- prefix with underscores (e.g., patchmon-patchmon_internal)
|
||||||
|
local alt_service_name2="patchmon-$(echo "$install" | tr '.' '_')"
|
||||||
|
local status="unknown"
|
||||||
|
|
||||||
|
# Try convention 1 first (most common)
|
||||||
|
if systemctl is-active --quiet "$service_name" 2>/dev/null; then
|
||||||
|
status="running"
|
||||||
|
elif systemctl is-enabled --quiet "$service_name" 2>/dev/null; then
|
||||||
|
status="stopped"
|
||||||
|
# Try convention 2
|
||||||
|
elif systemctl is-active --quiet "$alt_service_name1" 2>/dev/null; then
|
||||||
|
status="running"
|
||||||
|
service_name="$alt_service_name1"
|
||||||
|
elif systemctl is-enabled --quiet "$alt_service_name1" 2>/dev/null; then
|
||||||
|
status="stopped"
|
||||||
|
service_name="$alt_service_name1"
|
||||||
|
# Try convention 3
|
||||||
|
elif systemctl is-active --quiet "$alt_service_name2" 2>/dev/null; then
|
||||||
|
status="running"
|
||||||
|
service_name="$alt_service_name2"
|
||||||
|
elif systemctl is-enabled --quiet "$alt_service_name2" 2>/dev/null; then
|
||||||
|
status="stopped"
|
||||||
|
service_name="$alt_service_name2"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "%2d. %-30s (v%-10s - %s)\n" "$i" "$install" "$version" "$status"
|
||||||
|
install_map[$i]="$install"
|
||||||
|
# Store the service name for later use
|
||||||
|
declare -g "service_map_$i=$service_name"
|
||||||
|
i=$((i + 1))
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
read_input "Select installation number to update" SELECTION "1"
|
||||||
|
|
||||||
|
if [[ "$SELECTION" =~ ^[0-9]+$ ]] && [ -n "${install_map[$SELECTION]}" ]; then
|
||||||
|
SELECTED_INSTANCE="${install_map[$SELECTION]}"
|
||||||
|
# Get the stored service name
|
||||||
|
local varname="service_map_$SELECTION"
|
||||||
|
SELECTED_SERVICE_NAME="${!varname}"
|
||||||
|
print_status "Selected: $SELECTED_INSTANCE"
|
||||||
|
print_info "Service: $SELECTED_SERVICE_NAME"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "Invalid selection. Please enter a number from 1 to ${#installations[@]}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update existing installation
|
||||||
|
update_installation() {
|
||||||
|
local instance_dir="/opt/$SELECTED_INSTANCE"
|
||||||
|
local service_name="$SELECTED_SERVICE_NAME"
|
||||||
|
|
||||||
|
print_info "Updating PatchMon installation: $SELECTED_INSTANCE"
|
||||||
|
print_info "Installation directory: $instance_dir"
|
||||||
|
print_info "Service name: $service_name"
|
||||||
|
|
||||||
|
# Verify it's a git repository
|
||||||
|
if [ ! -d "$instance_dir/.git" ]; then
|
||||||
|
print_error "Installation directory is not a git repository"
|
||||||
|
print_error "Cannot perform git-based update"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add git safe.directory to avoid ownership issues when running as root
|
||||||
|
print_info "Configuring git safe.directory..."
|
||||||
|
git config --global --add safe.directory "$instance_dir" 2>/dev/null || true
|
||||||
|
|
||||||
|
# Load existing .env to get database credentials
|
||||||
|
if [ -f "$instance_dir/backend/.env" ]; then
|
||||||
|
source "$instance_dir/backend/.env"
|
||||||
|
print_status "Loaded existing configuration"
|
||||||
|
|
||||||
|
# Parse DATABASE_URL to extract credentials
|
||||||
|
# Format: postgresql://user:password@host:port/database
|
||||||
|
if [ -n "$DATABASE_URL" ]; then
|
||||||
|
# Extract components using regex
|
||||||
|
DB_USER=$(echo "$DATABASE_URL" | sed -n 's|postgresql://\([^:]*\):.*|\1|p')
|
||||||
|
DB_PASS=$(echo "$DATABASE_URL" | sed -n 's|postgresql://[^:]*:\([^@]*\)@.*|\1|p')
|
||||||
|
DB_HOST=$(echo "$DATABASE_URL" | sed -n 's|.*@\([^:]*\):.*|\1|p')
|
||||||
|
DB_PORT=$(echo "$DATABASE_URL" | sed -n 's|.*:\([0-9]*\)/.*|\1|p')
|
||||||
|
DB_NAME=$(echo "$DATABASE_URL" | sed -n 's|.*/\([^?]*\).*|\1|p')
|
||||||
|
|
||||||
|
print_info "Database: $DB_NAME (user: $DB_USER)"
|
||||||
|
else
|
||||||
|
print_error "DATABASE_URL not found in .env file"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "Cannot find .env file at $instance_dir/backend/.env"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Select branch/version to update to
|
||||||
|
select_branch
|
||||||
|
|
||||||
|
print_info "Updating to: $DEPLOYMENT_BRANCH"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
read_yes_no "Proceed with update? This will pull new code and restart services" CONFIRM_UPDATE "y"
|
||||||
|
|
||||||
|
if [ "$CONFIRM_UPDATE" != "y" ]; then
|
||||||
|
print_warning "Update cancelled by user"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Stop the service
|
||||||
|
print_info "Stopping service: $service_name"
|
||||||
|
systemctl stop "$service_name" || true
|
||||||
|
|
||||||
|
# Create backup directory
|
||||||
|
local timestamp=$(date +%Y%m%d_%H%M%S)
|
||||||
|
local backup_dir="$instance_dir.backup.$timestamp"
|
||||||
|
local db_backup_file="$backup_dir/database_backup_$timestamp.sql"
|
||||||
|
|
||||||
|
print_info "Creating backup directory: $backup_dir"
|
||||||
|
mkdir -p "$backup_dir"
|
||||||
|
|
||||||
|
# Backup database
|
||||||
|
print_info "Backing up database: $DB_NAME"
|
||||||
|
if PGPASSWORD="$DB_PASS" pg_dump -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -F c -f "$db_backup_file" 2>/dev/null; then
|
||||||
|
print_status "Database backup created: $db_backup_file"
|
||||||
|
else
|
||||||
|
print_warning "Database backup failed, but continuing with code backup"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Backup code
|
||||||
|
print_info "Backing up code files..."
|
||||||
|
cp -r "$instance_dir" "$backup_dir/code"
|
||||||
|
print_status "Code backup created"
|
||||||
|
|
||||||
|
# Update code
|
||||||
|
print_info "Pulling latest code from branch: $DEPLOYMENT_BRANCH"
|
||||||
|
cd "$instance_dir"
|
||||||
|
|
||||||
|
# Clean up any untracked files that might conflict with incoming changes
|
||||||
|
print_info "Cleaning up untracked files to prevent merge conflicts..."
|
||||||
|
git clean -fd
|
||||||
|
|
||||||
|
# Reset any local changes to ensure clean state
|
||||||
|
print_info "Resetting local changes to ensure clean state..."
|
||||||
|
git reset --hard HEAD
|
||||||
|
|
||||||
|
# Fetch latest changes
|
||||||
|
git fetch origin
|
||||||
|
|
||||||
|
# Checkout the selected branch/tag
|
||||||
|
git checkout "$DEPLOYMENT_BRANCH"
|
||||||
|
git pull origin "$DEPLOYMENT_BRANCH" || git pull # For tags, just pull
|
||||||
|
|
||||||
|
print_status "Code updated successfully"
|
||||||
|
|
||||||
|
# Update dependencies
|
||||||
|
print_info "Updating backend dependencies..."
|
||||||
|
cd "$instance_dir/backend"
|
||||||
|
npm install --production --ignore-scripts
|
||||||
|
|
||||||
|
print_info "Updating frontend dependencies..."
|
||||||
|
cd "$instance_dir/frontend"
|
||||||
|
npm install --ignore-scripts
|
||||||
|
|
||||||
|
# Build frontend
|
||||||
|
print_info "Building frontend..."
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
# Run database migrations and generate Prisma client
|
||||||
|
print_info "Running database migrations..."
|
||||||
|
cd "$instance_dir/backend"
|
||||||
|
npx prisma generate
|
||||||
|
npx prisma migrate deploy
|
||||||
|
|
||||||
|
# Start the service
|
||||||
|
print_info "Starting service: $service_name"
|
||||||
|
systemctl start "$service_name"
|
||||||
|
|
||||||
|
# Wait a moment and check status
|
||||||
|
sleep 3
|
||||||
|
|
||||||
|
if systemctl is-active --quiet "$service_name"; then
|
||||||
|
print_success "✅ Update completed successfully!"
|
||||||
|
print_status "Service $service_name is running"
|
||||||
|
|
||||||
|
# Get new version
|
||||||
|
local new_version=$(grep '"version"' "$instance_dir/backend/package.json" | head -1 | sed 's/.*"version": "\([^"]*\)".*/\1/')
|
||||||
|
print_info "Updated to version: $new_version"
|
||||||
|
echo ""
|
||||||
|
print_info "Backup Information:"
|
||||||
|
print_info " Code backup: $backup_dir/code"
|
||||||
|
print_info " Database backup: $db_backup_file"
|
||||||
|
echo ""
|
||||||
|
print_info "To restore database if needed:"
|
||||||
|
print_info " PGPASSWORD=\"$DB_PASS\" pg_restore -h \"$DB_HOST\" -U \"$DB_USER\" -d \"$DB_NAME\" -c \"$db_backup_file\""
|
||||||
|
echo ""
|
||||||
|
else
|
||||||
|
print_error "Service failed to start after update"
|
||||||
|
echo ""
|
||||||
|
print_warning "ROLLBACK INSTRUCTIONS:"
|
||||||
|
print_info "1. Restore code:"
|
||||||
|
print_info " sudo rm -rf $instance_dir"
|
||||||
|
print_info " sudo mv $backup_dir/code $instance_dir"
|
||||||
|
echo ""
|
||||||
|
print_info "2. Restore database:"
|
||||||
|
print_info " PGPASSWORD=\"$DB_PASS\" pg_restore -h \"$DB_HOST\" -U \"$DB_USER\" -d \"$DB_NAME\" -c \"$db_backup_file\""
|
||||||
|
echo ""
|
||||||
|
print_info "3. Restart service:"
|
||||||
|
print_info " sudo systemctl start $service_name"
|
||||||
|
echo ""
|
||||||
|
print_info "Check logs: journalctl -u $service_name -f"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# Main script execution
|
# Main script execution
|
||||||
main() {
|
main() {
|
||||||
# Log script entry
|
# Parse command-line arguments
|
||||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Interactive installation started" >> "$DEBUG_LOG"
|
if [ "$1" = "--update" ]; then
|
||||||
|
UPDATE_MODE="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Log script entry
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Script started - Update mode: $UPDATE_MODE" >> "$DEBUG_LOG"
|
||||||
|
|
||||||
|
# Handle update mode
|
||||||
|
if [ "$UPDATE_MODE" = "true" ]; then
|
||||||
|
print_banner
|
||||||
|
print_info "🔄 PatchMon Update Mode"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Select installation to update
|
||||||
|
select_installation_to_update
|
||||||
|
|
||||||
|
# Perform update
|
||||||
|
update_installation
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Normal installation mode
|
||||||
# Run interactive setup
|
# Run interactive setup
|
||||||
interactive_setup
|
interactive_setup
|
||||||
|
|
||||||
@@ -1588,5 +1905,30 @@ main() {
|
|||||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] deploy_instance function completed" >> "$DEBUG_LOG"
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] deploy_instance function completed" >> "$DEBUG_LOG"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Run main function (no arguments needed for interactive mode)
|
# Show usage/help
|
||||||
main
|
show_usage() {
|
||||||
|
echo "PatchMon Self-Hosting Installation & Update Script"
|
||||||
|
echo "Version: $SCRIPT_VERSION"
|
||||||
|
echo ""
|
||||||
|
echo "Usage:"
|
||||||
|
echo " $0 # Interactive installation (default)"
|
||||||
|
echo " $0 --update # Update existing installation"
|
||||||
|
echo " $0 --help # Show this help message"
|
||||||
|
echo ""
|
||||||
|
echo "Examples:"
|
||||||
|
echo " # New installation:"
|
||||||
|
echo " sudo bash $0"
|
||||||
|
echo ""
|
||||||
|
echo " # Update existing installation:"
|
||||||
|
echo " sudo bash $0 --update"
|
||||||
|
echo ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for help flag
|
||||||
|
if [ "$1" = "--help" ] || [ "$1" = "-h" ]; then
|
||||||
|
show_usage
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
|
|||||||
Reference in New Issue
Block a user