mirror of
https://github.com/9technologygroup/patchmon.net.git
synced 2025-10-24 16:43:41 +00:00
Compare commits
63 Commits
renovate/p
...
00abbc8c62
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
00abbc8c62 | ||
|
|
1350fd4e47 | ||
|
|
6b9a42fb0b | ||
|
|
3ee6f9aaa0 | ||
|
|
c9aef78912 | ||
|
|
8a5d61a7c1 | ||
|
|
fd2df0729e | ||
|
|
df502c676f | ||
|
|
d7f7b24f8f | ||
|
|
54cea6b20b | ||
|
|
1ef2308d56 | ||
|
|
af9b0d5d76 | ||
|
|
7b8c29860c | ||
|
|
fcd1b52e0e | ||
|
|
d78fb63c2d | ||
|
|
d3dc068c8e | ||
|
|
46e19fbfc2 | ||
|
|
5be8e01aa3 | ||
|
|
80a701cc33 | ||
|
|
293733dc0b | ||
|
|
c4d0d8bee8 | ||
|
|
30c89de134 | ||
|
|
c7ab40e4a2 | ||
|
|
4b35fc9ab9 | ||
|
|
191a1afada | ||
|
|
175f10b8b7 | ||
|
|
080bcbe22e | ||
|
|
3175ed79a5 | ||
|
|
fba6d0ede5 | ||
|
|
54a5012012 | ||
|
|
5004e062b4 | ||
|
|
44d52a5536 | ||
|
|
52c8ba6b03 | ||
|
|
9db563dec3 | ||
|
|
c328123bd3 | ||
|
|
46eb797ac3 | ||
|
|
c43afeb127 | ||
|
|
5b77a1328d | ||
|
|
9a40d5e6ee | ||
|
|
fdd0cfd619 | ||
|
|
de236f9ae2 | ||
|
|
4d5040e0e9 | ||
|
|
28c5310b99 | ||
|
|
a2e9743da6 | ||
|
|
3863d641fa | ||
|
|
cc8f77a946 | ||
|
|
36455e2bfd | ||
|
|
af65d38cad | ||
|
|
29266b6d77 | ||
|
|
f96e468482 | ||
|
|
9f8c88badf | ||
|
|
7985a225d7 | ||
|
|
8c538bd99c | ||
|
|
623bf5e2c8 | ||
|
|
ed8cc81b89 | ||
|
|
5c4353a688 | ||
|
|
6ebcdd57d5 | ||
|
|
a3d0dfd665 | ||
|
|
d99ded6d65 | ||
|
|
1ea96b6172 | ||
|
|
1e5ee66825 | ||
|
|
88130797e4 | ||
|
|
0ad1a96871 |
34
.dockerignore
Normal file
34
.dockerignore
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Environment files
|
||||||
|
**/.env
|
||||||
|
**/.env.*
|
||||||
|
**/env.example
|
||||||
|
|
||||||
|
# Node modules
|
||||||
|
**/node_modules
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
**/logs
|
||||||
|
**/*.log
|
||||||
|
|
||||||
|
# Git
|
||||||
|
**/.git
|
||||||
|
**/.gitignore
|
||||||
|
|
||||||
|
# IDE files
|
||||||
|
**/.vscode
|
||||||
|
**/.idea
|
||||||
|
**/*.swp
|
||||||
|
**/*.swo
|
||||||
|
|
||||||
|
# OS files
|
||||||
|
**/.DS_Store
|
||||||
|
**/Thumbs.db
|
||||||
|
|
||||||
|
# Build artifacts
|
||||||
|
**/dist
|
||||||
|
**/build
|
||||||
|
**/coverage
|
||||||
|
|
||||||
|
# Temporary files
|
||||||
|
**/tmp
|
||||||
|
**/temp
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -139,6 +139,7 @@ playwright-report/
|
|||||||
test-results.xml
|
test-results.xml
|
||||||
test_*.sh
|
test_*.sh
|
||||||
test-*.sh
|
test-*.sh
|
||||||
|
*.code-workspace
|
||||||
|
|
||||||
# Package manager lock files (uncomment if you want to ignore them)
|
# Package manager lock files (uncomment if you want to ignore them)
|
||||||
# package-lock.json
|
# package-lock.json
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ PatchMon provides centralized patch management across diverse server environment
|
|||||||
|
|
||||||
### API & Integrations
|
### API & Integrations
|
||||||
- REST API under `/api/v1` with JWT auth
|
- REST API under `/api/v1` with JWT auth
|
||||||
- **Proxmox LXC Auto-Enrollment** - Automatically discover and enroll LXC containers from Proxmox hosts ([Documentation](PROXMOX_AUTO_ENROLLMENT.md))
|
- Proxmox LXC Auto-Enrollment - Automatically discover and enroll LXC containers from Proxmox hosts
|
||||||
|
|
||||||
### Security
|
### Security
|
||||||
- Rate limiting for general, auth, and agent endpoints
|
- Rate limiting for general, auth, and agent endpoints
|
||||||
@@ -85,11 +85,16 @@ apt-get upgrade -y
|
|||||||
apt install curl -y
|
apt install curl -y
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Script
|
#### Install Script
|
||||||
```bash
|
```bash
|
||||||
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh
|
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Update Script (--update flag)
|
||||||
|
```bash
|
||||||
|
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh --update
|
||||||
|
```
|
||||||
|
|
||||||
#### Minimum specs for building : #####
|
#### Minimum specs for building : #####
|
||||||
CPU : 2 vCPU
|
CPU : 2 vCPU
|
||||||
RAM : 2GB
|
RAM : 2GB
|
||||||
|
|||||||
1598
agents/patchmon-agent-legacy1-2-8.sh
Normal file
1598
agents/patchmon-agent-legacy1-2-8.sh
Normal file
File diff suppressed because it is too large
Load Diff
BIN
agents/patchmon-agent-linux-386
Executable file
BIN
agents/patchmon-agent-linux-386
Executable file
Binary file not shown.
BIN
agents/patchmon-agent-linux-amd64
Executable file
BIN
agents/patchmon-agent-linux-amd64
Executable file
Binary file not shown.
BIN
agents/patchmon-agent-linux-arm
Executable file
BIN
agents/patchmon-agent-linux-arm
Executable file
Binary file not shown.
BIN
agents/patchmon-agent-linux-arm64
Executable file
BIN
agents/patchmon-agent-linux-arm64
Executable file
Binary file not shown.
File diff suppressed because it is too large
Load Diff
496
agents/patchmon-docker-agent.sh
Executable file
496
agents/patchmon-docker-agent.sh
Executable file
@@ -0,0 +1,496 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# PatchMon Docker Agent Script v1.3.0
|
||||||
|
# This script collects Docker container and image information and sends it to PatchMon
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
|
||||||
|
API_VERSION="v1"
|
||||||
|
AGENT_VERSION="1.3.0"
|
||||||
|
CONFIG_FILE="/etc/patchmon/agent.conf"
|
||||||
|
CREDENTIALS_FILE="/etc/patchmon/credentials"
|
||||||
|
LOG_FILE="/var/log/patchmon-docker-agent.log"
|
||||||
|
|
||||||
|
# Curl flags placeholder (replaced by server based on SSL settings)
|
||||||
|
CURL_FLAGS=""
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Logging function
|
||||||
|
log() {
|
||||||
|
if [[ -w "$(dirname "$LOG_FILE")" ]] 2>/dev/null; then
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" >> "$LOG_FILE" 2>/dev/null
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Error handling
|
||||||
|
error() {
|
||||||
|
echo -e "${RED}ERROR: $1${NC}" >&2
|
||||||
|
log "ERROR: $1"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Info logging
|
||||||
|
info() {
|
||||||
|
echo -e "${BLUE}ℹ️ $1${NC}" >&2
|
||||||
|
log "INFO: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Success logging
|
||||||
|
success() {
|
||||||
|
echo -e "${GREEN}✅ $1${NC}" >&2
|
||||||
|
log "SUCCESS: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Warning logging
|
||||||
|
warning() {
|
||||||
|
echo -e "${YELLOW}⚠️ $1${NC}" >&2
|
||||||
|
log "WARNING: $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if Docker is installed and running
|
||||||
|
check_docker() {
|
||||||
|
if ! command -v docker &> /dev/null; then
|
||||||
|
error "Docker is not installed on this system"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! docker info &> /dev/null; then
|
||||||
|
error "Docker daemon is not running or you don't have permission to access it. Try running with sudo."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Load credentials
|
||||||
|
load_credentials() {
|
||||||
|
if [[ ! -f "$CREDENTIALS_FILE" ]]; then
|
||||||
|
error "Credentials file not found at $CREDENTIALS_FILE. Please configure the main PatchMon agent first."
|
||||||
|
fi
|
||||||
|
|
||||||
|
source "$CREDENTIALS_FILE"
|
||||||
|
|
||||||
|
if [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
|
||||||
|
error "API credentials not found in $CREDENTIALS_FILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Use PATCHMON_URL from credentials if available, otherwise use default
|
||||||
|
if [[ -n "$PATCHMON_URL" ]]; then
|
||||||
|
PATCHMON_SERVER="$PATCHMON_URL"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Load configuration
|
||||||
|
load_config() {
|
||||||
|
if [[ -f "$CONFIG_FILE" ]]; then
|
||||||
|
source "$CONFIG_FILE"
|
||||||
|
if [[ -n "$SERVER_URL" ]]; then
|
||||||
|
PATCHMON_SERVER="$SERVER_URL"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Collect Docker containers
|
||||||
|
collect_containers() {
|
||||||
|
info "Collecting Docker container information..."
|
||||||
|
|
||||||
|
local containers_json="["
|
||||||
|
local first=true
|
||||||
|
|
||||||
|
# Get all containers (running and stopped)
|
||||||
|
while IFS='|' read -r container_id name image status state created started ports; do
|
||||||
|
if [[ -z "$container_id" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse image name and tag
|
||||||
|
local image_name="${image%%:*}"
|
||||||
|
local image_tag="${image##*:}"
|
||||||
|
if [[ "$image_tag" == "$image_name" ]]; then
|
||||||
|
image_tag="latest"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Determine image source based on registry
|
||||||
|
local image_source="docker-hub"
|
||||||
|
if [[ "$image_name" == ghcr.io/* ]]; then
|
||||||
|
image_source="github"
|
||||||
|
elif [[ "$image_name" == registry.gitlab.com/* ]]; then
|
||||||
|
image_source="gitlab"
|
||||||
|
elif [[ "$image_name" == *"/"*"/"* ]]; then
|
||||||
|
image_source="private"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get repository name (without registry prefix for common registries)
|
||||||
|
local image_repository="$image_name"
|
||||||
|
image_repository="${image_repository#ghcr.io/}"
|
||||||
|
image_repository="${image_repository#registry.gitlab.com/}"
|
||||||
|
|
||||||
|
# Get image ID
|
||||||
|
local full_image_id=$(docker inspect --format='{{.Image}}' "$container_id" 2>/dev/null || echo "unknown")
|
||||||
|
full_image_id="${full_image_id#sha256:}"
|
||||||
|
|
||||||
|
# Normalize status (extract just the status keyword)
|
||||||
|
local normalized_status="unknown"
|
||||||
|
if [[ "$status" =~ ^Up ]]; then
|
||||||
|
normalized_status="running"
|
||||||
|
elif [[ "$status" =~ ^Exited ]]; then
|
||||||
|
normalized_status="exited"
|
||||||
|
elif [[ "$status" =~ ^Created ]]; then
|
||||||
|
normalized_status="created"
|
||||||
|
elif [[ "$status" =~ ^Restarting ]]; then
|
||||||
|
normalized_status="restarting"
|
||||||
|
elif [[ "$status" =~ ^Paused ]]; then
|
||||||
|
normalized_status="paused"
|
||||||
|
elif [[ "$status" =~ ^Dead ]]; then
|
||||||
|
normalized_status="dead"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse ports
|
||||||
|
local ports_json="null"
|
||||||
|
if [[ -n "$ports" && "$ports" != "null" ]]; then
|
||||||
|
# Convert Docker port format to JSON
|
||||||
|
ports_json=$(echo "$ports" | jq -R -s -c 'split(",") | map(select(length > 0)) | map(split("->") | {(.[0]): .[1]}) | add // {}')
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert dates to ISO 8601 format
|
||||||
|
# If date conversion fails, use null instead of invalid date string
|
||||||
|
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
|
||||||
|
local started_iso="null"
|
||||||
|
if [[ -n "$started" && "$started" != "null" ]]; then
|
||||||
|
started_iso=$(date -d "$started" -Iseconds 2>/dev/null || echo "null")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add comma for JSON array
|
||||||
|
if [[ "$first" == false ]]; then
|
||||||
|
containers_json+=","
|
||||||
|
fi
|
||||||
|
first=false
|
||||||
|
|
||||||
|
# Build JSON object for this container
|
||||||
|
containers_json+="{\"container_id\":\"$container_id\","
|
||||||
|
containers_json+="\"name\":\"$name\","
|
||||||
|
containers_json+="\"image_name\":\"$image_name\","
|
||||||
|
containers_json+="\"image_tag\":\"$image_tag\","
|
||||||
|
containers_json+="\"image_repository\":\"$image_repository\","
|
||||||
|
containers_json+="\"image_source\":\"$image_source\","
|
||||||
|
containers_json+="\"image_id\":\"$full_image_id\","
|
||||||
|
containers_json+="\"status\":\"$normalized_status\","
|
||||||
|
containers_json+="\"state\":\"$state\","
|
||||||
|
containers_json+="\"ports\":$ports_json"
|
||||||
|
|
||||||
|
# Only add created_at if we have a valid date
|
||||||
|
if [[ "$created_iso" != "null" ]]; then
|
||||||
|
containers_json+=",\"created_at\":\"$created_iso\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Only add started_at if we have a valid date
|
||||||
|
if [[ "$started_iso" != "null" ]]; then
|
||||||
|
containers_json+=",\"started_at\":\"$started_iso\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
containers_json+="}"
|
||||||
|
|
||||||
|
done < <(docker ps -a --format '{{.ID}}|{{.Names}}|{{.Image}}|{{.Status}}|{{.State}}|{{.CreatedAt}}|{{.RunningFor}}|{{.Ports}}' 2>/dev/null)
|
||||||
|
|
||||||
|
containers_json+="]"
|
||||||
|
|
||||||
|
echo "$containers_json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Collect Docker images
|
||||||
|
collect_images() {
|
||||||
|
info "Collecting Docker image information..."
|
||||||
|
|
||||||
|
local images_json="["
|
||||||
|
local first=true
|
||||||
|
|
||||||
|
while IFS='|' read -r repository tag image_id created size digest; do
|
||||||
|
if [[ -z "$repository" || "$repository" == "<none>" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up tag
|
||||||
|
if [[ -z "$tag" || "$tag" == "<none>" ]]; then
|
||||||
|
tag="latest"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean image ID
|
||||||
|
image_id="${image_id#sha256:}"
|
||||||
|
|
||||||
|
# Determine source
|
||||||
|
local source="docker-hub"
|
||||||
|
if [[ "$repository" == ghcr.io/* ]]; then
|
||||||
|
source="github"
|
||||||
|
elif [[ "$repository" == registry.gitlab.com/* ]]; then
|
||||||
|
source="gitlab"
|
||||||
|
elif [[ "$repository" == *"/"*"/"* ]]; then
|
||||||
|
source="private"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert size to bytes (approximate)
|
||||||
|
local size_bytes=0
|
||||||
|
if [[ "$size" =~ ([0-9.]+)([KMGT]?B) ]]; then
|
||||||
|
local num="${BASH_REMATCH[1]}"
|
||||||
|
local unit="${BASH_REMATCH[2]}"
|
||||||
|
case "$unit" in
|
||||||
|
KB) size_bytes=$(echo "$num * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
MB) size_bytes=$(echo "$num * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
GB) size_bytes=$(echo "$num * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
TB) size_bytes=$(echo "$num * 1024 * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||||
|
B) size_bytes=$(echo "$num" | cut -d. -f1) ;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Convert created date to ISO 8601
|
||||||
|
# If date conversion fails, use null instead of invalid date string
|
||||||
|
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
|
||||||
|
|
||||||
|
# Add comma for JSON array
|
||||||
|
if [[ "$first" == false ]]; then
|
||||||
|
images_json+=","
|
||||||
|
fi
|
||||||
|
first=false
|
||||||
|
|
||||||
|
# Build JSON object for this image
|
||||||
|
images_json+="{\"repository\":\"$repository\","
|
||||||
|
images_json+="\"tag\":\"$tag\","
|
||||||
|
images_json+="\"image_id\":\"$image_id\","
|
||||||
|
images_json+="\"source\":\"$source\","
|
||||||
|
images_json+="\"size_bytes\":$size_bytes"
|
||||||
|
|
||||||
|
# Only add created_at if we have a valid date
|
||||||
|
if [[ "$created_iso" != "null" ]]; then
|
||||||
|
images_json+=",\"created_at\":\"$created_iso\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Only add digest if present
|
||||||
|
if [[ -n "$digest" && "$digest" != "<none>" ]]; then
|
||||||
|
images_json+=",\"digest\":\"$digest\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
images_json+="}"
|
||||||
|
|
||||||
|
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.CreatedAt}}|{{.Size}}|{{.Digest}}' --no-trunc 2>/dev/null)
|
||||||
|
|
||||||
|
images_json+="]"
|
||||||
|
|
||||||
|
echo "$images_json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for image updates
|
||||||
|
check_image_updates() {
|
||||||
|
info "Checking for image updates..."
|
||||||
|
|
||||||
|
local updates_json="["
|
||||||
|
local first=true
|
||||||
|
local update_count=0
|
||||||
|
|
||||||
|
# Get all images
|
||||||
|
while IFS='|' read -r repository tag image_id digest; do
|
||||||
|
if [[ -z "$repository" || "$repository" == "<none>" || "$tag" == "<none>" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Skip checking 'latest' tag as it's always considered current by name
|
||||||
|
# We'll still check digest though
|
||||||
|
local full_image="${repository}:${tag}"
|
||||||
|
|
||||||
|
# Try to get remote digest from registry
|
||||||
|
# Use docker manifest inspect to avoid pulling the image
|
||||||
|
local remote_digest=$(docker manifest inspect "$full_image" 2>/dev/null | jq -r '.config.digest // .manifests[0].digest // empty' 2>/dev/null)
|
||||||
|
|
||||||
|
if [[ -z "$remote_digest" ]]; then
|
||||||
|
# If manifest inspect fails, try buildx imagetools inspect (works for more registries)
|
||||||
|
remote_digest=$(docker buildx imagetools inspect "$full_image" 2>/dev/null | grep -oP 'Digest:\s*\K\S+' | head -1)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up digests for comparison
|
||||||
|
local local_digest="${digest#sha256:}"
|
||||||
|
remote_digest="${remote_digest#sha256:}"
|
||||||
|
|
||||||
|
# If we got a remote digest and it's different from local, there's an update
|
||||||
|
if [[ -n "$remote_digest" && -n "$local_digest" && "$remote_digest" != "$local_digest" ]]; then
|
||||||
|
if [[ "$first" == false ]]; then
|
||||||
|
updates_json+=","
|
||||||
|
fi
|
||||||
|
first=false
|
||||||
|
|
||||||
|
# Build update JSON object
|
||||||
|
updates_json+="{\"repository\":\"$repository\","
|
||||||
|
updates_json+="\"current_tag\":\"$tag\","
|
||||||
|
updates_json+="\"available_tag\":\"$tag\","
|
||||||
|
updates_json+="\"current_digest\":\"$local_digest\","
|
||||||
|
updates_json+="\"available_digest\":\"$remote_digest\","
|
||||||
|
updates_json+="\"image_id\":\"${image_id#sha256:}\""
|
||||||
|
updates_json+="}"
|
||||||
|
|
||||||
|
((update_count++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.Digest}}' --no-trunc 2>/dev/null)
|
||||||
|
|
||||||
|
updates_json+="]"
|
||||||
|
|
||||||
|
info "Found $update_count image update(s) available"
|
||||||
|
|
||||||
|
echo "$updates_json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send Docker data to server
|
||||||
|
send_docker_data() {
|
||||||
|
load_credentials
|
||||||
|
|
||||||
|
info "Collecting Docker data..."
|
||||||
|
|
||||||
|
local containers=$(collect_containers)
|
||||||
|
local images=$(collect_images)
|
||||||
|
local updates=$(check_image_updates)
|
||||||
|
|
||||||
|
# Count collected items
|
||||||
|
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
|
||||||
|
info "Found $container_count containers, $image_count images, and $update_count update(s) available"
|
||||||
|
|
||||||
|
# Build payload
|
||||||
|
local payload="{\"apiId\":\"$API_ID\",\"apiKey\":\"$API_KEY\",\"containers\":$containers,\"images\":$images,\"updates\":$updates}"
|
||||||
|
|
||||||
|
# Send to server
|
||||||
|
info "Sending Docker data to PatchMon server..."
|
||||||
|
|
||||||
|
local response=$(curl $CURL_FLAGS -s -w "\n%{http_code}" -X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "$payload" \
|
||||||
|
"${PATCHMON_SERVER}/api/${API_VERSION}/docker/collect" 2>&1)
|
||||||
|
|
||||||
|
local http_code=$(echo "$response" | tail -n1)
|
||||||
|
local response_body=$(echo "$response" | head -n-1)
|
||||||
|
|
||||||
|
if [[ "$http_code" == "200" ]]; then
|
||||||
|
success "Docker data sent successfully!"
|
||||||
|
log "Docker data sent: $container_count containers, $image_count images"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
error "Failed to send Docker data. HTTP Status: $http_code\nResponse: $response_body"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test Docker data collection without sending
|
||||||
|
test_collection() {
|
||||||
|
check_docker
|
||||||
|
|
||||||
|
info "Testing Docker data collection (dry run)..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
local containers=$(collect_containers)
|
||||||
|
local images=$(collect_images)
|
||||||
|
local updates=$(check_image_updates)
|
||||||
|
|
||||||
|
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
|
||||||
|
|
||||||
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||||
|
echo -e "${GREEN}Docker Data Collection Results${NC}"
|
||||||
|
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||||
|
echo -e "Containers found: ${GREEN}$container_count${NC}"
|
||||||
|
echo -e "Images found: ${GREEN}$image_count${NC}"
|
||||||
|
echo -e "Updates available: ${YELLOW}$update_count${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if command -v jq &> /dev/null; then
|
||||||
|
echo "━━━ Containers ━━━"
|
||||||
|
echo "$containers" | jq -r '.[] | "\(.name) (\(.status)) - \(.image_name):\(.image_tag)"' | head -10
|
||||||
|
if [[ $container_count -gt 10 ]]; then
|
||||||
|
echo "... and $((container_count - 10)) more"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
echo "━━━ Images ━━━"
|
||||||
|
echo "$images" | jq -r '.[] | "\(.repository):\(.tag) (\(.size_bytes / 1024 / 1024 | floor)MB)"' | head -10
|
||||||
|
if [[ $image_count -gt 10 ]]; then
|
||||||
|
echo "... and $((image_count - 10)) more"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $update_count -gt 0 ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "━━━ Available Updates ━━━"
|
||||||
|
echo "$updates" | jq -r '.[] | "\(.repository):\(.current_tag) → \(.available_tag)"'
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
success "Test collection completed successfully!"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Show help
|
||||||
|
show_help() {
|
||||||
|
cat << EOF
|
||||||
|
PatchMon Docker Agent v${AGENT_VERSION}
|
||||||
|
|
||||||
|
This agent collects Docker container and image information and sends it to PatchMon.
|
||||||
|
|
||||||
|
USAGE:
|
||||||
|
$0 <command>
|
||||||
|
|
||||||
|
COMMANDS:
|
||||||
|
collect Collect and send Docker data to PatchMon server
|
||||||
|
test Test Docker data collection without sending (dry run)
|
||||||
|
help Show this help message
|
||||||
|
|
||||||
|
REQUIREMENTS:
|
||||||
|
- Docker must be installed and running
|
||||||
|
- Main PatchMon agent must be configured first
|
||||||
|
- Credentials file must exist at $CREDENTIALS_FILE
|
||||||
|
|
||||||
|
EXAMPLES:
|
||||||
|
# Test collection (dry run)
|
||||||
|
sudo $0 test
|
||||||
|
|
||||||
|
# Collect and send Docker data
|
||||||
|
sudo $0 collect
|
||||||
|
|
||||||
|
SCHEDULING:
|
||||||
|
To run this agent automatically, add a cron job:
|
||||||
|
|
||||||
|
# Run every 5 minutes
|
||||||
|
*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
|
||||||
|
|
||||||
|
# Run every hour
|
||||||
|
0 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
|
||||||
|
|
||||||
|
FILES:
|
||||||
|
Config: $CONFIG_FILE
|
||||||
|
Credentials: $CREDENTIALS_FILE
|
||||||
|
Log: $LOG_FILE
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main function
|
||||||
|
main() {
|
||||||
|
case "$1" in
|
||||||
|
"collect")
|
||||||
|
check_docker
|
||||||
|
load_config
|
||||||
|
send_docker_data
|
||||||
|
;;
|
||||||
|
"test")
|
||||||
|
check_docker
|
||||||
|
load_config
|
||||||
|
test_collection
|
||||||
|
;;
|
||||||
|
"help"|"--help"|"-h"|"")
|
||||||
|
show_help
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
error "Unknown command: $1\n\nRun '$0 help' for usage information."
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
|
|
||||||
@@ -97,13 +97,22 @@ verify_datetime
|
|||||||
# Clean up old files (keep only last 3 of each type)
|
# Clean up old files (keep only last 3 of each type)
|
||||||
cleanup_old_files() {
|
cleanup_old_files() {
|
||||||
# Clean up old credential backups
|
# Clean up old credential backups
|
||||||
ls -t /etc/patchmon/credentials.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /etc/patchmon/credentials.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
|
# Clean up old config backups
|
||||||
|
ls -t /etc/patchmon/config.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
# Clean up old agent backups
|
# Clean up old agent backups
|
||||||
ls -t /usr/local/bin/patchmon-agent.sh.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /usr/local/bin/patchmon-agent.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
# Clean up old log files
|
# Clean up old log files
|
||||||
ls -t /var/log/patchmon-agent.log.old.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /etc/patchmon/logs/patchmon-agent.log.old.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
|
# Clean up old shell script backups (if any exist)
|
||||||
|
ls -t /usr/local/bin/patchmon-agent.sh.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
|
# Clean up old credentials backups (if any exist)
|
||||||
|
ls -t /etc/patchmon/credentials.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
}
|
}
|
||||||
|
|
||||||
# Run cleanup at start
|
# Run cleanup at start
|
||||||
@@ -127,6 +136,12 @@ if [[ -z "$PATCHMON_URL" ]] || [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
|
|||||||
error "Missing required parameters. This script should be called via the PatchMon web interface."
|
error "Missing required parameters. This script should be called via the PatchMon web interface."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Parse architecture parameter (default to amd64)
|
||||||
|
ARCHITECTURE="${ARCHITECTURE:-amd64}"
|
||||||
|
if [[ "$ARCHITECTURE" != "amd64" && "$ARCHITECTURE" != "386" && "$ARCHITECTURE" != "arm64" ]]; then
|
||||||
|
error "Invalid architecture '$ARCHITECTURE'. Must be one of: amd64, 386, arm64"
|
||||||
|
fi
|
||||||
|
|
||||||
# Check if --force flag is set (for bypassing broken packages)
|
# Check if --force flag is set (for bypassing broken packages)
|
||||||
FORCE_INSTALL="${FORCE_INSTALL:-false}"
|
FORCE_INSTALL="${FORCE_INSTALL:-false}"
|
||||||
if [[ "$*" == *"--force"* ]] || [[ "$FORCE_INSTALL" == "true" ]]; then
|
if [[ "$*" == *"--force"* ]] || [[ "$FORCE_INSTALL" == "true" ]]; then
|
||||||
@@ -142,6 +157,7 @@ info "🚀 Starting PatchMon Agent Installation..."
|
|||||||
info "📋 Server: $PATCHMON_URL"
|
info "📋 Server: $PATCHMON_URL"
|
||||||
info "🔑 API ID: ${API_ID:0:16}..."
|
info "🔑 API ID: ${API_ID:0:16}..."
|
||||||
info "🆔 Machine ID: ${MACHINE_ID:0:16}..."
|
info "🆔 Machine ID: ${MACHINE_ID:0:16}..."
|
||||||
|
info "🏗️ Architecture: $ARCHITECTURE"
|
||||||
|
|
||||||
# Display diagnostic information
|
# Display diagnostic information
|
||||||
echo ""
|
echo ""
|
||||||
@@ -150,6 +166,7 @@ echo " • URL: $PATCHMON_URL"
|
|||||||
echo " • CURL FLAGS: $CURL_FLAGS"
|
echo " • CURL FLAGS: $CURL_FLAGS"
|
||||||
echo " • API ID: ${API_ID:0:16}..."
|
echo " • API ID: ${API_ID:0:16}..."
|
||||||
echo " • API Key: ${API_KEY:0:16}..."
|
echo " • API Key: ${API_KEY:0:16}..."
|
||||||
|
echo " • Architecture: $ARCHITECTURE"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# Install required dependencies
|
# Install required dependencies
|
||||||
@@ -294,67 +311,117 @@ else
|
|||||||
mkdir -p /etc/patchmon
|
mkdir -p /etc/patchmon
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Step 2: Create credentials file
|
# Step 2: Create configuration files
|
||||||
info "🔐 Creating API credentials file..."
|
info "🔐 Creating configuration files..."
|
||||||
|
|
||||||
|
# Check if config file already exists
|
||||||
|
if [[ -f "/etc/patchmon/config.yml" ]]; then
|
||||||
|
warning "⚠️ Config file already exists at /etc/patchmon/config.yml"
|
||||||
|
warning "⚠️ Moving existing file out of the way for fresh installation"
|
||||||
|
|
||||||
|
# Clean up old config backups (keep only last 3)
|
||||||
|
ls -t /etc/patchmon/config.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
|
# Move existing file out of the way
|
||||||
|
mv /etc/patchmon/config.yml /etc/patchmon/config.yml.backup.$(date +%Y%m%d_%H%M%S)
|
||||||
|
info "📋 Moved existing config to: /etc/patchmon/config.yml.backup.$(date +%Y%m%d_%H%M%S)"
|
||||||
|
fi
|
||||||
|
|
||||||
# Check if credentials file already exists
|
# Check if credentials file already exists
|
||||||
if [[ -f "/etc/patchmon/credentials" ]]; then
|
if [[ -f "/etc/patchmon/credentials.yml" ]]; then
|
||||||
warning "⚠️ Credentials file already exists at /etc/patchmon/credentials"
|
warning "⚠️ Credentials file already exists at /etc/patchmon/credentials.yml"
|
||||||
warning "⚠️ Moving existing file out of the way for fresh installation"
|
warning "⚠️ Moving existing file out of the way for fresh installation"
|
||||||
|
|
||||||
# Clean up old credential backups (keep only last 3)
|
# Clean up old credential backups (keep only last 3)
|
||||||
ls -t /etc/patchmon/credentials.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /etc/patchmon/credentials.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
# Move existing file out of the way
|
# Move existing file out of the way
|
||||||
mv /etc/patchmon/credentials /etc/patchmon/credentials.backup.$(date +%Y%m%d_%H%M%S)
|
mv /etc/patchmon/credentials.yml /etc/patchmon/credentials.yml.backup.$(date +%Y%m%d_%H%M%S)
|
||||||
info "📋 Moved existing credentials to: /etc/patchmon/credentials.backup.$(date +%Y%m%d_%H%M%S)"
|
info "📋 Moved existing credentials to: /etc/patchmon/credentials.yml.backup.$(date +%Y%m%d_%H%M%S)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cat > /etc/patchmon/credentials << EOF
|
# Clean up old credentials file if it exists (from previous installations)
|
||||||
|
if [[ -f "/etc/patchmon/credentials" ]]; then
|
||||||
|
warning "⚠️ Found old credentials file, removing it..."
|
||||||
|
rm -f /etc/patchmon/credentials
|
||||||
|
info "📋 Removed old credentials file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create main config file
|
||||||
|
cat > /etc/patchmon/config.yml << EOF
|
||||||
|
# PatchMon Agent Configuration
|
||||||
|
# Generated on $(date)
|
||||||
|
patchmon_server: "$PATCHMON_URL"
|
||||||
|
api_version: "v1"
|
||||||
|
credentials_file: "/etc/patchmon/credentials.yml"
|
||||||
|
log_file: "/etc/patchmon/logs/patchmon-agent.log"
|
||||||
|
log_level: "info"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create credentials file
|
||||||
|
cat > /etc/patchmon/credentials.yml << EOF
|
||||||
# PatchMon API Credentials
|
# PatchMon API Credentials
|
||||||
# Generated on $(date)
|
# Generated on $(date)
|
||||||
PATCHMON_URL="$PATCHMON_URL"
|
api_id: "$API_ID"
|
||||||
API_ID="$API_ID"
|
api_key: "$API_KEY"
|
||||||
API_KEY="$API_KEY"
|
|
||||||
EOF
|
EOF
|
||||||
chmod 600 /etc/patchmon/credentials
|
|
||||||
|
|
||||||
# Step 3: Download the agent script using API credentials
|
chmod 600 /etc/patchmon/config.yml
|
||||||
info "📥 Downloading PatchMon agent script..."
|
chmod 600 /etc/patchmon/credentials.yml
|
||||||
|
|
||||||
# Check if agent script already exists
|
# Step 3: Download the PatchMon agent binary using API credentials
|
||||||
if [[ -f "/usr/local/bin/patchmon-agent.sh" ]]; then
|
info "📥 Downloading PatchMon agent binary..."
|
||||||
warning "⚠️ Agent script already exists at /usr/local/bin/patchmon-agent.sh"
|
|
||||||
|
# Determine the binary filename based on architecture
|
||||||
|
BINARY_NAME="patchmon-agent-linux-${ARCHITECTURE}"
|
||||||
|
|
||||||
|
# Check if agent binary already exists
|
||||||
|
if [[ -f "/usr/local/bin/patchmon-agent" ]]; then
|
||||||
|
warning "⚠️ Agent binary already exists at /usr/local/bin/patchmon-agent"
|
||||||
warning "⚠️ Moving existing file out of the way for fresh installation"
|
warning "⚠️ Moving existing file out of the way for fresh installation"
|
||||||
|
|
||||||
# Clean up old agent backups (keep only last 3)
|
# Clean up old agent backups (keep only last 3)
|
||||||
ls -t /usr/local/bin/patchmon-agent.sh.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
ls -t /usr/local/bin/patchmon-agent.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||||
|
|
||||||
# Move existing file out of the way
|
# Move existing file out of the way
|
||||||
mv /usr/local/bin/patchmon-agent.sh /usr/local/bin/patchmon-agent.sh.backup.$(date +%Y%m%d_%H%M%S)
|
mv /usr/local/bin/patchmon-agent /usr/local/bin/patchmon-agent.backup.$(date +%Y%m%d_%H%M%S)
|
||||||
info "📋 Moved existing agent to: /usr/local/bin/patchmon-agent.sh.backup.$(date +%Y%m%d_%H%M%S)"
|
info "📋 Moved existing agent to: /usr/local/bin/patchmon-agent.backup.$(date +%Y%m%d_%H%M%S)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Clean up old shell script if it exists (from previous installations)
|
||||||
|
if [[ -f "/usr/local/bin/patchmon-agent.sh" ]]; then
|
||||||
|
warning "⚠️ Found old shell script agent, removing it..."
|
||||||
|
rm -f /usr/local/bin/patchmon-agent.sh
|
||||||
|
info "📋 Removed old shell script agent"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Download the binary
|
||||||
curl $CURL_FLAGS \
|
curl $CURL_FLAGS \
|
||||||
-H "X-API-ID: $API_ID" \
|
-H "X-API-ID: $API_ID" \
|
||||||
-H "X-API-KEY: $API_KEY" \
|
-H "X-API-KEY: $API_KEY" \
|
||||||
"$PATCHMON_URL/api/v1/hosts/agent/download" \
|
"$PATCHMON_URL/api/v1/hosts/agent/download?arch=$ARCHITECTURE&force=binary" \
|
||||||
-o /usr/local/bin/patchmon-agent.sh
|
-o /usr/local/bin/patchmon-agent
|
||||||
|
|
||||||
chmod +x /usr/local/bin/patchmon-agent.sh
|
chmod +x /usr/local/bin/patchmon-agent
|
||||||
|
|
||||||
# Get the agent version from the downloaded script
|
# Get the agent version from the binary
|
||||||
AGENT_VERSION=$(grep '^AGENT_VERSION=' /usr/local/bin/patchmon-agent.sh | cut -d'"' -f2 2>/dev/null || echo "Unknown")
|
AGENT_VERSION=$(/usr/local/bin/patchmon-agent version 2>/dev/null || echo "Unknown")
|
||||||
info "📋 Agent version: $AGENT_VERSION"
|
info "📋 Agent version: $AGENT_VERSION"
|
||||||
|
|
||||||
|
# Handle existing log files and create log directory
|
||||||
|
info "📁 Setting up log directory..."
|
||||||
|
|
||||||
|
# Create log directory if it doesn't exist
|
||||||
|
mkdir -p /etc/patchmon/logs
|
||||||
|
|
||||||
# Handle existing log files
|
# Handle existing log files
|
||||||
if [[ -f "/var/log/patchmon-agent.log" ]]; then
|
if [[ -f "/etc/patchmon/logs/patchmon-agent.log" ]]; then
|
||||||
warning "⚠️ Existing log file found at /var/log/patchmon-agent.log"
|
warning "⚠️ Existing log file found at /etc/patchmon/logs/patchmon-agent.log"
|
||||||
warning "⚠️ Rotating log file for fresh start"
|
warning "⚠️ Rotating log file for fresh start"
|
||||||
|
|
||||||
# Rotate the log file
|
# Rotate the log file
|
||||||
mv /var/log/patchmon-agent.log /var/log/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)
|
mv /etc/patchmon/logs/patchmon-agent.log /etc/patchmon/logs/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)
|
||||||
info "📋 Log file rotated to: /var/log/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)"
|
info "📋 Log file rotated to: /etc/patchmon/logs/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Step 4: Test the configuration
|
# Step 4: Test the configuration
|
||||||
@@ -386,19 +453,76 @@ if [[ "$http_code" == "200" ]]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
info "🧪 Testing API credentials and connectivity..."
|
info "🧪 Testing API credentials and connectivity..."
|
||||||
if /usr/local/bin/patchmon-agent.sh test; then
|
if /usr/local/bin/patchmon-agent ping; then
|
||||||
success "✅ TEST: API credentials are valid and server is reachable"
|
success "✅ TEST: API credentials are valid and server is reachable"
|
||||||
else
|
else
|
||||||
error "❌ Failed to validate API credentials or reach server"
|
error "❌ Failed to validate API credentials or reach server"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Step 5: Send initial data and setup automated updates
|
# Step 5: Send initial data and setup systemd service
|
||||||
info "📊 Sending initial package data to server..."
|
info "📊 Sending initial package data to server..."
|
||||||
if /usr/local/bin/patchmon-agent.sh update; then
|
if /usr/local/bin/patchmon-agent report; then
|
||||||
success "✅ UPDATE: Initial package data sent successfully"
|
success "✅ UPDATE: Initial package data sent successfully"
|
||||||
info "✅ Automated updates configured by agent"
|
|
||||||
else
|
else
|
||||||
warning "⚠️ Failed to send initial data. You can retry later with: /usr/local/bin/patchmon-agent.sh update"
|
warning "⚠️ Failed to send initial data. You can retry later with: /usr/local/bin/patchmon-agent report"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 6: Setup systemd service for WebSocket connection
|
||||||
|
info "🔧 Setting up systemd service..."
|
||||||
|
|
||||||
|
# Stop and disable existing service if it exists
|
||||||
|
if systemctl is-active --quiet patchmon-agent.service 2>/dev/null; then
|
||||||
|
warning "⚠️ Stopping existing PatchMon agent service..."
|
||||||
|
systemctl stop patchmon-agent.service
|
||||||
|
fi
|
||||||
|
|
||||||
|
if systemctl is-enabled --quiet patchmon-agent.service 2>/dev/null; then
|
||||||
|
warning "⚠️ Disabling existing PatchMon agent service..."
|
||||||
|
systemctl disable patchmon-agent.service
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create systemd service file
|
||||||
|
cat > /etc/systemd/system/patchmon-agent.service << EOF
|
||||||
|
[Unit]
|
||||||
|
Description=PatchMon Agent Service
|
||||||
|
After=network.target
|
||||||
|
Wants=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=root
|
||||||
|
ExecStart=/usr/local/bin/patchmon-agent serve
|
||||||
|
Restart=always
|
||||||
|
RestartSec=10
|
||||||
|
WorkingDirectory=/etc/patchmon
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
SyslogIdentifier=patchmon-agent
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Clean up old crontab entries if they exist (from previous installations)
|
||||||
|
if crontab -l 2>/dev/null | grep -q "patchmon-agent"; then
|
||||||
|
warning "⚠️ Found old crontab entries, removing them..."
|
||||||
|
crontab -l 2>/dev/null | grep -v "patchmon-agent" | crontab -
|
||||||
|
info "📋 Removed old crontab entries"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Reload systemd and enable/start the service
|
||||||
|
systemctl daemon-reload
|
||||||
|
systemctl enable patchmon-agent.service
|
||||||
|
systemctl start patchmon-agent.service
|
||||||
|
|
||||||
|
# Check if service started successfully
|
||||||
|
if systemctl is-active --quiet patchmon-agent.service; then
|
||||||
|
success "✅ PatchMon Agent service started successfully"
|
||||||
|
info "🔗 WebSocket connection established"
|
||||||
|
else
|
||||||
|
warning "⚠️ Service may have failed to start. Check status with: systemctl status patchmon-agent"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Installation complete
|
# Installation complete
|
||||||
@@ -406,14 +530,16 @@ success "🎉 PatchMon Agent installation completed successfully!"
|
|||||||
echo ""
|
echo ""
|
||||||
echo -e "${GREEN}📋 Installation Summary:${NC}"
|
echo -e "${GREEN}📋 Installation Summary:${NC}"
|
||||||
echo " • Configuration directory: /etc/patchmon"
|
echo " • Configuration directory: /etc/patchmon"
|
||||||
echo " • Agent installed: /usr/local/bin/patchmon-agent.sh"
|
echo " • Agent binary installed: /usr/local/bin/patchmon-agent"
|
||||||
|
echo " • Architecture: $ARCHITECTURE"
|
||||||
echo " • Dependencies installed: jq, curl, bc"
|
echo " • Dependencies installed: jq, curl, bc"
|
||||||
echo " • Automated updates configured via crontab"
|
echo " • Systemd service configured and running"
|
||||||
echo " • API credentials configured and tested"
|
echo " • API credentials configured and tested"
|
||||||
echo " • Update schedule managed by agent"
|
echo " • WebSocket connection established"
|
||||||
|
echo " • Logs directory: /etc/patchmon/logs"
|
||||||
|
|
||||||
# Check for moved files and show them
|
# Check for moved files and show them
|
||||||
MOVED_FILES=$(ls /etc/patchmon/credentials.backup.* /usr/local/bin/patchmon-agent.sh.backup.* /var/log/patchmon-agent.log.old.* 2>/dev/null || true)
|
MOVED_FILES=$(ls /etc/patchmon/credentials.yml.backup.* /etc/patchmon/config.yml.backup.* /usr/local/bin/patchmon-agent.backup.* /etc/patchmon/logs/patchmon-agent.log.old.* /usr/local/bin/patchmon-agent.sh.backup.* /etc/patchmon/credentials.backup.* 2>/dev/null || true)
|
||||||
if [[ -n "$MOVED_FILES" ]]; then
|
if [[ -n "$MOVED_FILES" ]]; then
|
||||||
echo ""
|
echo ""
|
||||||
echo -e "${YELLOW}📋 Files Moved for Fresh Installation:${NC}"
|
echo -e "${YELLOW}📋 Files Moved for Fresh Installation:${NC}"
|
||||||
@@ -426,8 +552,11 @@ fi
|
|||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo -e "${BLUE}🔧 Management Commands:${NC}"
|
echo -e "${BLUE}🔧 Management Commands:${NC}"
|
||||||
echo " • Test connection: /usr/local/bin/patchmon-agent.sh test"
|
echo " • Test connection: /usr/local/bin/patchmon-agent ping"
|
||||||
echo " • Manual update: /usr/local/bin/patchmon-agent.sh update"
|
echo " • Manual report: /usr/local/bin/patchmon-agent report"
|
||||||
echo " • Check status: /usr/local/bin/patchmon-agent.sh diagnostics"
|
echo " • Check status: /usr/local/bin/patchmon-agent diagnostics"
|
||||||
|
echo " • Service status: systemctl status patchmon-agent"
|
||||||
|
echo " • Service logs: journalctl -u patchmon-agent -f"
|
||||||
|
echo " • Restart service: systemctl restart patchmon-agent"
|
||||||
echo ""
|
echo ""
|
||||||
success "✅ Your system is now being monitored by PatchMon!"
|
success "✅ Your system is now being monitored by PatchMon!"
|
||||||
|
|||||||
@@ -153,6 +153,32 @@ while IFS= read -r line; do
|
|||||||
ip_address=$(timeout 5 pct exec "$vmid" -- hostname -I 2>/dev/null </dev/null | awk '{print $1}' || echo "unknown")
|
ip_address=$(timeout 5 pct exec "$vmid" -- hostname -I 2>/dev/null </dev/null | awk '{print $1}' || echo "unknown")
|
||||||
os_info=$(timeout 5 pct exec "$vmid" -- cat /etc/os-release 2>/dev/null </dev/null | grep "^PRETTY_NAME=" | cut -d'"' -f2 || echo "unknown")
|
os_info=$(timeout 5 pct exec "$vmid" -- cat /etc/os-release 2>/dev/null </dev/null | grep "^PRETTY_NAME=" | cut -d'"' -f2 || echo "unknown")
|
||||||
|
|
||||||
|
# Detect container architecture
|
||||||
|
debug " Detecting container architecture..."
|
||||||
|
arch_raw=$(timeout 5 pct exec "$vmid" -- uname -m 2>/dev/null </dev/null || echo "unknown")
|
||||||
|
|
||||||
|
# Map architecture to supported values
|
||||||
|
case "$arch_raw" in
|
||||||
|
"x86_64")
|
||||||
|
architecture="amd64"
|
||||||
|
;;
|
||||||
|
"i386"|"i686")
|
||||||
|
architecture="386"
|
||||||
|
;;
|
||||||
|
"aarch64"|"arm64")
|
||||||
|
architecture="arm64"
|
||||||
|
;;
|
||||||
|
"armv7l"|"armv6l"|"arm")
|
||||||
|
architecture="arm"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
warn " ⚠ Unknown architecture '$arch_raw', defaulting to amd64"
|
||||||
|
architecture="amd64"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
debug " Detected architecture: $arch_raw -> $architecture"
|
||||||
|
|
||||||
# Get machine ID from container
|
# Get machine ID from container
|
||||||
machine_id=$(timeout 5 pct exec "$vmid" -- bash -c "cat /etc/machine-id 2>/dev/null || cat /var/lib/dbus/machine-id 2>/dev/null || echo 'proxmox-lxc-$vmid-'$(cat /proc/sys/kernel/random/uuid)" </dev/null 2>/dev/null || echo "proxmox-lxc-$vmid-unknown")
|
machine_id=$(timeout 5 pct exec "$vmid" -- bash -c "cat /etc/machine-id 2>/dev/null || cat /var/lib/dbus/machine-id 2>/dev/null || echo 'proxmox-lxc-$vmid-'$(cat /proc/sys/kernel/random/uuid)" </dev/null 2>/dev/null || echo "proxmox-lxc-$vmid-unknown")
|
||||||
|
|
||||||
@@ -161,6 +187,7 @@ while IFS= read -r line; do
|
|||||||
info " Hostname: $hostname"
|
info " Hostname: $hostname"
|
||||||
info " IP Address: $ip_address"
|
info " IP Address: $ip_address"
|
||||||
info " OS: $os_info"
|
info " OS: $os_info"
|
||||||
|
info " Architecture: $architecture ($arch_raw)"
|
||||||
info " Machine ID: ${machine_id:0:16}..."
|
info " Machine ID: ${machine_id:0:16}..."
|
||||||
|
|
||||||
if [[ "$DRY_RUN" == "true" ]]; then
|
if [[ "$DRY_RUN" == "true" ]]; then
|
||||||
@@ -244,12 +271,13 @@ while IFS= read -r line; do
|
|||||||
# Install PatchMon agent in container
|
# Install PatchMon agent in container
|
||||||
info " Installing PatchMon agent..."
|
info " Installing PatchMon agent..."
|
||||||
|
|
||||||
# Build install URL with force flag if enabled
|
# Build install URL with force flag and architecture if enabled
|
||||||
install_url="$PATCHMON_URL/api/v1/hosts/install"
|
install_url="$PATCHMON_URL/api/v1/hosts/install?arch=$architecture"
|
||||||
if [[ "$FORCE_INSTALL" == "true" ]]; then
|
if [[ "$FORCE_INSTALL" == "true" ]]; then
|
||||||
install_url="$install_url?force=true"
|
install_url="$install_url&force=true"
|
||||||
info " Using force mode - will bypass broken packages"
|
info " Using force mode - will bypass broken packages"
|
||||||
fi
|
fi
|
||||||
|
info " Using architecture: $architecture"
|
||||||
|
|
||||||
# Reset exit code for this container
|
# Reset exit code for this container
|
||||||
install_exit_code=0
|
install_exit_code=0
|
||||||
@@ -400,7 +428,7 @@ if [[ ${#dpkg_error_containers[@]} -gt 0 ]]; then
|
|||||||
-H \"X-API-ID: $api_id\" \
|
-H \"X-API-ID: $api_id\" \
|
||||||
-H \"X-API-KEY: $api_key\" \
|
-H \"X-API-KEY: $api_key\" \
|
||||||
-o patchmon-install.sh \
|
-o patchmon-install.sh \
|
||||||
'$PATCHMON_URL/api/v1/hosts/install' && \
|
'$PATCHMON_URL/api/v1/hosts/install?arch=$architecture' && \
|
||||||
bash patchmon-install.sh && \
|
bash patchmon-install.sh && \
|
||||||
rm -f patchmon-install.sh
|
rm -f patchmon-install.sh
|
||||||
" 2>&1 </dev/null) || install_exit_code=$?
|
" 2>&1 </dev/null) || install_exit_code=$?
|
||||||
|
|||||||
@@ -3,6 +3,13 @@ DATABASE_URL="postgresql://patchmon_user:p@tchm0n_p@55@localhost:5432/patchmon_d
|
|||||||
PM_DB_CONN_MAX_ATTEMPTS=30
|
PM_DB_CONN_MAX_ATTEMPTS=30
|
||||||
PM_DB_CONN_WAIT_INTERVAL=2
|
PM_DB_CONN_WAIT_INTERVAL=2
|
||||||
|
|
||||||
|
# Redis Configuration
|
||||||
|
REDIS_HOST=localhost
|
||||||
|
REDIS_PORT=6379
|
||||||
|
REDIS_USER=your-redis-username-here
|
||||||
|
REDIS_PASSWORD=your-redis-password-here
|
||||||
|
REDIS_DB=0
|
||||||
|
|
||||||
# Server Configuration
|
# Server Configuration
|
||||||
PORT=3001
|
PORT=3001
|
||||||
NODE_ENV=development
|
NODE_ENV=development
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon-backend",
|
"name": "patchmon-backend",
|
||||||
"version": "1.2.7",
|
"version": "1.3.0",
|
||||||
"description": "Backend API for Linux Patch Monitoring System",
|
"description": "Backend API for Linux Patch Monitoring System",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"main": "src/server.js",
|
"main": "src/server.js",
|
||||||
@@ -14,20 +14,27 @@
|
|||||||
"db:studio": "prisma studio"
|
"db:studio": "prisma studio"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@bull-board/api": "^6.13.1",
|
||||||
|
"@bull-board/express": "^6.13.1",
|
||||||
"@prisma/client": "^6.1.0",
|
"@prisma/client": "^6.1.0",
|
||||||
|
"axios": "^1.7.9",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
|
"bullmq": "^5.61.0",
|
||||||
|
"cookie-parser": "^1.4.7",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"dotenv": "^16.4.7",
|
"dotenv": "^16.4.7",
|
||||||
"express": "^4.21.2",
|
"express": "^4.21.2",
|
||||||
"express-rate-limit": "^7.5.0",
|
"express-rate-limit": "^7.5.0",
|
||||||
"express-validator": "^7.2.0",
|
"express-validator": "^7.2.0",
|
||||||
"helmet": "^8.0.0",
|
"helmet": "^8.0.0",
|
||||||
|
"ioredis": "^5.8.1",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"moment": "^2.30.1",
|
"moment": "^2.30.1",
|
||||||
"qrcode": "^1.5.4",
|
"qrcode": "^1.5.4",
|
||||||
"speakeasy": "^2.0.0",
|
"speakeasy": "^2.0.0",
|
||||||
"uuid": "^11.0.3",
|
"uuid": "^11.0.3",
|
||||||
"winston": "^3.17.0"
|
"winston": "^3.17.0",
|
||||||
|
"ws": "^8.18.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/bcryptjs": "^2.4.6",
|
"@types/bcryptjs": "^2.4.6",
|
||||||
|
|||||||
@@ -0,0 +1,64 @@
|
|||||||
|
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
|
||||||
|
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
|
||||||
|
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
table_exists boolean := false;
|
||||||
|
migration_exists boolean := false;
|
||||||
|
BEGIN
|
||||||
|
-- Check if user_sessions table exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'user_sessions'
|
||||||
|
) INTO table_exists;
|
||||||
|
|
||||||
|
-- Check if the migration record already exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
) INTO migration_exists;
|
||||||
|
|
||||||
|
-- If table exists but no migration record, create one
|
||||||
|
IF table_exists AND NOT migration_exists THEN
|
||||||
|
RAISE NOTICE 'Table exists but no migration record found - creating migration record for 1.2.7 upgrade';
|
||||||
|
|
||||||
|
-- Insert a successful migration record for the existing table
|
||||||
|
INSERT INTO _prisma_migrations (
|
||||||
|
id,
|
||||||
|
checksum,
|
||||||
|
finished_at,
|
||||||
|
migration_name,
|
||||||
|
logs,
|
||||||
|
rolled_back_at,
|
||||||
|
started_at,
|
||||||
|
applied_steps_count
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid()::text,
|
||||||
|
'', -- Empty checksum since we're reconciling
|
||||||
|
NOW(),
|
||||||
|
'20251005000000_add_user_sessions',
|
||||||
|
'Reconciled from 1.2.7 - table already exists',
|
||||||
|
NULL,
|
||||||
|
NOW(),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration record created for existing table';
|
||||||
|
ELSIF table_exists AND migration_exists THEN
|
||||||
|
RAISE NOTICE 'Table exists and migration record exists - no action needed';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'Table does not exist - migration will proceed normally';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Additional check: If we have any old migration names, update them
|
||||||
|
IF EXISTS (SELECT 1 FROM _prisma_migrations WHERE migration_name = 'add_user_sessions') THEN
|
||||||
|
RAISE NOTICE 'Found old migration name - updating to new format';
|
||||||
|
UPDATE _prisma_migrations
|
||||||
|
SET migration_name = '20251005000000_add_user_sessions'
|
||||||
|
WHERE migration_name = 'add_user_sessions';
|
||||||
|
RAISE NOTICE 'Old migration name updated';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
END $$;
|
||||||
@@ -0,0 +1,96 @@
|
|||||||
|
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
|
||||||
|
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
|
||||||
|
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
|
||||||
|
|
||||||
|
DO $$
|
||||||
|
DECLARE
|
||||||
|
old_migration_exists boolean := false;
|
||||||
|
table_exists boolean := false;
|
||||||
|
failed_migration_exists boolean := false;
|
||||||
|
BEGIN
|
||||||
|
-- Check if the old migration name exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = 'add_user_sessions'
|
||||||
|
) INTO old_migration_exists;
|
||||||
|
|
||||||
|
-- Check if user_sessions table exists
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'user_sessions'
|
||||||
|
) INTO table_exists;
|
||||||
|
|
||||||
|
-- Check if there's a failed migration attempt
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL
|
||||||
|
) INTO failed_migration_exists;
|
||||||
|
|
||||||
|
-- Scenario 1: Old migration exists, table exists, no failed migration
|
||||||
|
-- This means 1.2.7 was installed and we need to update the migration name
|
||||||
|
IF old_migration_exists AND table_exists AND NOT failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'Found 1.2.7 migration "add_user_sessions" - updating to timestamped version';
|
||||||
|
|
||||||
|
-- Update the old migration name to the new timestamped version
|
||||||
|
UPDATE _prisma_migrations
|
||||||
|
SET migration_name = '20251005000000_add_user_sessions'
|
||||||
|
WHERE migration_name = 'add_user_sessions';
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration name updated: add_user_sessions -> 20251005000000_add_user_sessions';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Scenario 2: Failed migration exists (upgrade attempt gone wrong)
|
||||||
|
IF failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'Found failed migration attempt - cleaning up';
|
||||||
|
|
||||||
|
-- If table exists, it means the migration partially succeeded
|
||||||
|
IF table_exists THEN
|
||||||
|
RAISE NOTICE 'Table exists - marking migration as applied';
|
||||||
|
|
||||||
|
-- Delete the failed migration record
|
||||||
|
DELETE FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL;
|
||||||
|
|
||||||
|
-- Insert a successful migration record
|
||||||
|
INSERT INTO _prisma_migrations (
|
||||||
|
id,
|
||||||
|
checksum,
|
||||||
|
finished_at,
|
||||||
|
migration_name,
|
||||||
|
logs,
|
||||||
|
rolled_back_at,
|
||||||
|
started_at,
|
||||||
|
applied_steps_count
|
||||||
|
) VALUES (
|
||||||
|
gen_random_uuid()::text,
|
||||||
|
'', -- Empty checksum since we're reconciling
|
||||||
|
NOW(),
|
||||||
|
'20251005000000_add_user_sessions',
|
||||||
|
NULL,
|
||||||
|
NULL,
|
||||||
|
NOW(),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
RAISE NOTICE 'Migration marked as successfully applied';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'Table does not exist - removing failed migration to allow retry';
|
||||||
|
|
||||||
|
-- Just delete the failed migration to allow it to retry
|
||||||
|
DELETE FROM _prisma_migrations
|
||||||
|
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||||
|
AND finished_at IS NULL;
|
||||||
|
|
||||||
|
RAISE NOTICE 'Failed migration removed - will retry on next migration run';
|
||||||
|
END IF;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Scenario 3: Everything is clean (fresh install or already reconciled)
|
||||||
|
IF NOT old_migration_exists AND NOT failed_migration_exists THEN
|
||||||
|
RAISE NOTICE 'No migration reconciliation needed';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
END $$;
|
||||||
@@ -0,0 +1,106 @@
|
|||||||
|
-- CreateTable (with existence check for 1.2.7 compatibility)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
-- Check if table already exists (from 1.2.7 installation)
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name = 'user_sessions'
|
||||||
|
) THEN
|
||||||
|
-- Table doesn't exist, create it
|
||||||
|
CREATE TABLE "user_sessions" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"user_id" TEXT NOT NULL,
|
||||||
|
"refresh_token" TEXT NOT NULL,
|
||||||
|
"access_token_hash" TEXT,
|
||||||
|
"ip_address" TEXT,
|
||||||
|
"user_agent" TEXT,
|
||||||
|
"last_activity" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"expires_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"is_revoked" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
|
||||||
|
CONSTRAINT "user_sessions_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
RAISE NOTICE 'Created user_sessions table';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions table already exists, skipping creation';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- CreateIndex (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_indexes
|
||||||
|
WHERE tablename = 'user_sessions'
|
||||||
|
AND indexname = 'user_sessions_refresh_token_key'
|
||||||
|
) THEN
|
||||||
|
CREATE UNIQUE INDEX "user_sessions_refresh_token_key" ON "user_sessions"("refresh_token");
|
||||||
|
RAISE NOTICE 'Created user_sessions_refresh_token_key index';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_refresh_token_key index already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- CreateIndex (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_indexes
|
||||||
|
WHERE tablename = 'user_sessions'
|
||||||
|
AND indexname = 'user_sessions_user_id_idx'
|
||||||
|
) THEN
|
||||||
|
CREATE INDEX "user_sessions_user_id_idx" ON "user_sessions"("user_id");
|
||||||
|
RAISE NOTICE 'Created user_sessions_user_id_idx index';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_user_id_idx index already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- CreateIndex (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_indexes
|
||||||
|
WHERE tablename = 'user_sessions'
|
||||||
|
AND indexname = 'user_sessions_refresh_token_idx'
|
||||||
|
) THEN
|
||||||
|
CREATE INDEX "user_sessions_refresh_token_idx" ON "user_sessions"("refresh_token");
|
||||||
|
RAISE NOTICE 'Created user_sessions_refresh_token_idx index';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_refresh_token_idx index already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- CreateIndex (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM pg_indexes
|
||||||
|
WHERE tablename = 'user_sessions'
|
||||||
|
AND indexname = 'user_sessions_expires_at_idx'
|
||||||
|
) THEN
|
||||||
|
CREATE INDEX "user_sessions_expires_at_idx" ON "user_sessions"("expires_at");
|
||||||
|
RAISE NOTICE 'Created user_sessions_expires_at_idx index';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_expires_at_idx index already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- AddForeignKey (with existence check)
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.table_constraints
|
||||||
|
WHERE table_name = 'user_sessions'
|
||||||
|
AND constraint_name = 'user_sessions_user_id_fkey'
|
||||||
|
) THEN
|
||||||
|
ALTER TABLE "user_sessions" ADD CONSTRAINT "user_sessions_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
RAISE NOTICE 'Created user_sessions_user_id_fkey foreign key';
|
||||||
|
ELSE
|
||||||
|
RAISE NOTICE 'user_sessions_user_id_fkey foreign key already exists, skipping';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
@@ -0,0 +1,94 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_images" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"repository" TEXT NOT NULL,
|
||||||
|
"tag" TEXT NOT NULL DEFAULT 'latest',
|
||||||
|
"image_id" TEXT NOT NULL,
|
||||||
|
"digest" TEXT,
|
||||||
|
"size_bytes" BIGINT,
|
||||||
|
"source" TEXT NOT NULL DEFAULT 'docker-hub',
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"last_pulled" TIMESTAMP(3),
|
||||||
|
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_images_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_containers" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"host_id" TEXT NOT NULL,
|
||||||
|
"container_id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"image_id" TEXT,
|
||||||
|
"image_name" TEXT NOT NULL,
|
||||||
|
"image_tag" TEXT NOT NULL DEFAULT 'latest',
|
||||||
|
"status" TEXT NOT NULL,
|
||||||
|
"state" TEXT,
|
||||||
|
"ports" JSONB,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"started_at" TIMESTAMP(3),
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_containers_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "docker_image_updates" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"image_id" TEXT NOT NULL,
|
||||||
|
"current_tag" TEXT NOT NULL,
|
||||||
|
"available_tag" TEXT NOT NULL,
|
||||||
|
"is_security_update" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"severity" TEXT,
|
||||||
|
"changelog_url" TEXT,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "docker_image_updates_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_images_repository_idx" ON "docker_images"("repository");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_images_source_idx" ON "docker_images"("source");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_images_repository_tag_idx" ON "docker_images"("repository", "tag");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_images_repository_tag_image_id_key" ON "docker_images"("repository", "tag", "image_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_host_id_idx" ON "docker_containers"("host_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_image_id_idx" ON "docker_containers"("image_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_status_idx" ON "docker_containers"("status");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_containers_name_idx" ON "docker_containers"("name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_containers_host_id_container_id_key" ON "docker_containers"("host_id", "container_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_image_updates_image_id_idx" ON "docker_image_updates"("image_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "docker_image_updates_is_security_update_idx" ON "docker_image_updates"("is_security_update");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "docker_image_updates_image_id_available_tag_key" ON "docker_image_updates"("image_id", "available_tag");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "docker_containers" ADD CONSTRAINT "docker_containers_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "docker_image_updates" ADD CONSTRAINT "docker_image_updates_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "job_history" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"job_id" TEXT NOT NULL,
|
||||||
|
"queue_name" TEXT NOT NULL,
|
||||||
|
"job_name" TEXT NOT NULL,
|
||||||
|
"host_id" TEXT,
|
||||||
|
"api_id" TEXT,
|
||||||
|
"status" TEXT NOT NULL,
|
||||||
|
"attempt_number" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"error_message" TEXT,
|
||||||
|
"output" JSONB,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||||
|
"completed_at" TIMESTAMP(3),
|
||||||
|
|
||||||
|
CONSTRAINT "job_history_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_job_id_idx" ON "job_history"("job_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_queue_name_idx" ON "job_history"("queue_name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_host_id_idx" ON "job_history"("host_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_api_id_idx" ON "job_history"("api_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_status_idx" ON "job_history"("status");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "job_history_created_at_idx" ON "job_history"("created_at");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "job_history" ADD CONSTRAINT "job_history_host_id_fkey" FOREIGN KEY ("host_id") REFERENCES "hosts"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "host_group_memberships" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"host_id" TEXT NOT NULL,
|
||||||
|
"host_group_id" TEXT NOT NULL,
|
||||||
|
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "host_group_memberships_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "host_group_memberships_host_id_host_group_id_key" ON "host_group_memberships"("host_id", "host_group_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "host_group_memberships_host_id_idx" ON "host_group_memberships"("host_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "host_group_memberships_host_group_id_idx" ON "host_group_memberships"("host_group_id");
|
||||||
|
|
||||||
|
-- Migrate existing data from hosts.host_group_id to host_group_memberships
|
||||||
|
INSERT INTO "host_group_memberships" ("id", "host_id", "host_group_id", "created_at")
|
||||||
|
SELECT
|
||||||
|
gen_random_uuid()::text as "id",
|
||||||
|
"id" as "host_id",
|
||||||
|
"host_group_id" as "host_group_id",
|
||||||
|
CURRENT_TIMESTAMP as "created_at"
|
||||||
|
FROM "hosts"
|
||||||
|
WHERE "host_group_id" IS NOT NULL;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "host_group_memberships" ADD CONSTRAINT "host_group_memberships_host_id_fkey" FOREIGN KEY ("host_id") REFERENCES "hosts"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "host_group_memberships" ADD CONSTRAINT "host_group_memberships_host_group_id_fkey" FOREIGN KEY ("host_group_id") REFERENCES "host_groups"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- DropForeignKey
|
||||||
|
ALTER TABLE "hosts" DROP CONSTRAINT IF EXISTS "hosts_host_group_id_fkey";
|
||||||
|
|
||||||
|
-- DropIndex
|
||||||
|
DROP INDEX IF EXISTS "hosts_host_group_id_idx";
|
||||||
|
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "hosts" DROP COLUMN "host_group_id";
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
-- CreateTable
|
|
||||||
CREATE TABLE "user_sessions" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"user_id" TEXT NOT NULL,
|
|
||||||
"refresh_token" TEXT NOT NULL,
|
|
||||||
"access_token_hash" TEXT,
|
|
||||||
"ip_address" TEXT,
|
|
||||||
"user_agent" TEXT,
|
|
||||||
"last_activity" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"expires_at" TIMESTAMP(3) NOT NULL,
|
|
||||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"is_revoked" BOOLEAN NOT NULL DEFAULT false,
|
|
||||||
|
|
||||||
CONSTRAINT "user_sessions_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "user_sessions_refresh_token_key" ON "user_sessions"("refresh_token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "user_sessions_user_id_idx" ON "user_sessions"("user_id");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "user_sessions_refresh_token_idx" ON "user_sessions"("refresh_token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "user_sessions_expires_at_idx" ON "user_sessions"("expires_at");
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "user_sessions" ADD CONSTRAINT "user_sessions_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
@@ -27,10 +27,23 @@ model host_groups {
|
|||||||
color String? @default("#3B82F6")
|
color String? @default("#3B82F6")
|
||||||
created_at DateTime @default(now())
|
created_at DateTime @default(now())
|
||||||
updated_at DateTime
|
updated_at DateTime
|
||||||
hosts hosts[]
|
host_group_memberships host_group_memberships[]
|
||||||
auto_enrollment_tokens auto_enrollment_tokens[]
|
auto_enrollment_tokens auto_enrollment_tokens[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model host_group_memberships {
|
||||||
|
id String @id
|
||||||
|
host_id String
|
||||||
|
host_group_id String
|
||||||
|
created_at DateTime @default(now())
|
||||||
|
hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade)
|
||||||
|
host_groups host_groups @relation(fields: [host_group_id], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([host_id, host_group_id])
|
||||||
|
@@index([host_id])
|
||||||
|
@@index([host_group_id])
|
||||||
|
}
|
||||||
|
|
||||||
model host_packages {
|
model host_packages {
|
||||||
id String @id
|
id String @id
|
||||||
host_id String
|
host_id String
|
||||||
@@ -67,40 +80,40 @@ model host_repositories {
|
|||||||
}
|
}
|
||||||
|
|
||||||
model hosts {
|
model hosts {
|
||||||
id String @id
|
id String @id
|
||||||
machine_id String @unique
|
machine_id String @unique
|
||||||
friendly_name String
|
friendly_name String
|
||||||
ip String?
|
ip String?
|
||||||
os_type String
|
os_type String
|
||||||
os_version String
|
os_version String
|
||||||
architecture String?
|
architecture String?
|
||||||
last_update DateTime @default(now())
|
last_update DateTime @default(now())
|
||||||
status String @default("active")
|
status String @default("active")
|
||||||
created_at DateTime @default(now())
|
created_at DateTime @default(now())
|
||||||
updated_at DateTime
|
updated_at DateTime
|
||||||
api_id String @unique
|
api_id String @unique
|
||||||
api_key String @unique
|
api_key String @unique
|
||||||
host_group_id String?
|
agent_version String?
|
||||||
agent_version String?
|
auto_update Boolean @default(true)
|
||||||
auto_update Boolean @default(true)
|
cpu_cores Int?
|
||||||
cpu_cores Int?
|
cpu_model String?
|
||||||
cpu_model String?
|
disk_details Json?
|
||||||
disk_details Json?
|
dns_servers Json?
|
||||||
dns_servers Json?
|
gateway_ip String?
|
||||||
gateway_ip String?
|
hostname String?
|
||||||
hostname String?
|
kernel_version String?
|
||||||
kernel_version String?
|
load_average Json?
|
||||||
load_average Json?
|
network_interfaces Json?
|
||||||
network_interfaces Json?
|
ram_installed Int?
|
||||||
ram_installed Int?
|
selinux_status String?
|
||||||
selinux_status String?
|
swap_size Int?
|
||||||
swap_size Int?
|
system_uptime String?
|
||||||
system_uptime String?
|
notes String?
|
||||||
notes String?
|
host_packages host_packages[]
|
||||||
host_packages host_packages[]
|
host_repositories host_repositories[]
|
||||||
host_repositories host_repositories[]
|
host_group_memberships host_group_memberships[]
|
||||||
host_groups host_groups? @relation(fields: [host_group_id], references: [id])
|
update_history update_history[]
|
||||||
update_history update_history[]
|
job_history job_history[]
|
||||||
|
|
||||||
@@index([machine_id])
|
@@index([machine_id])
|
||||||
@@index([friendly_name])
|
@@index([friendly_name])
|
||||||
@@ -166,7 +179,7 @@ model settings {
|
|||||||
updated_at DateTime
|
updated_at DateTime
|
||||||
update_interval Int @default(60)
|
update_interval Int @default(60)
|
||||||
auto_update Boolean @default(false)
|
auto_update Boolean @default(false)
|
||||||
github_repo_url String @default("git@github.com:9technologygroup/patchmon.net.git")
|
github_repo_url String @default("https://github.com/PatchMon/PatchMon.git")
|
||||||
ssh_key_path String?
|
ssh_key_path String?
|
||||||
repository_type String @default("public")
|
repository_type String @default("public")
|
||||||
last_update_check DateTime?
|
last_update_check DateTime?
|
||||||
@@ -262,3 +275,89 @@ model auto_enrollment_tokens {
|
|||||||
@@index([token_key])
|
@@index([token_key])
|
||||||
@@index([is_active])
|
@@index([is_active])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model docker_containers {
|
||||||
|
id String @id
|
||||||
|
host_id String
|
||||||
|
container_id String
|
||||||
|
name String
|
||||||
|
image_id String?
|
||||||
|
image_name String
|
||||||
|
image_tag String @default("latest")
|
||||||
|
status String
|
||||||
|
state String?
|
||||||
|
ports Json?
|
||||||
|
created_at DateTime
|
||||||
|
started_at DateTime?
|
||||||
|
updated_at DateTime
|
||||||
|
last_checked DateTime @default(now())
|
||||||
|
docker_images docker_images? @relation(fields: [image_id], references: [id], onDelete: SetNull)
|
||||||
|
|
||||||
|
@@unique([host_id, container_id])
|
||||||
|
@@index([host_id])
|
||||||
|
@@index([image_id])
|
||||||
|
@@index([status])
|
||||||
|
@@index([name])
|
||||||
|
}
|
||||||
|
|
||||||
|
model docker_images {
|
||||||
|
id String @id
|
||||||
|
repository String
|
||||||
|
tag String @default("latest")
|
||||||
|
image_id String
|
||||||
|
digest String?
|
||||||
|
size_bytes BigInt?
|
||||||
|
source String @default("docker-hub")
|
||||||
|
created_at DateTime
|
||||||
|
last_pulled DateTime?
|
||||||
|
last_checked DateTime @default(now())
|
||||||
|
updated_at DateTime
|
||||||
|
docker_containers docker_containers[]
|
||||||
|
docker_image_updates docker_image_updates[]
|
||||||
|
|
||||||
|
@@unique([repository, tag, image_id])
|
||||||
|
@@index([repository])
|
||||||
|
@@index([source])
|
||||||
|
@@index([repository, tag])
|
||||||
|
}
|
||||||
|
|
||||||
|
model docker_image_updates {
|
||||||
|
id String @id
|
||||||
|
image_id String
|
||||||
|
current_tag String
|
||||||
|
available_tag String
|
||||||
|
is_security_update Boolean @default(false)
|
||||||
|
severity String?
|
||||||
|
changelog_url String?
|
||||||
|
created_at DateTime @default(now())
|
||||||
|
updated_at DateTime
|
||||||
|
docker_images docker_images @relation(fields: [image_id], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([image_id, available_tag])
|
||||||
|
@@index([image_id])
|
||||||
|
@@index([is_security_update])
|
||||||
|
}
|
||||||
|
|
||||||
|
model job_history {
|
||||||
|
id String @id
|
||||||
|
job_id String
|
||||||
|
queue_name String
|
||||||
|
job_name String
|
||||||
|
host_id String?
|
||||||
|
api_id String?
|
||||||
|
status String
|
||||||
|
attempt_number Int @default(1)
|
||||||
|
error_message String?
|
||||||
|
output Json?
|
||||||
|
created_at DateTime @default(now())
|
||||||
|
updated_at DateTime
|
||||||
|
completed_at DateTime?
|
||||||
|
hosts hosts? @relation(fields: [host_id], references: [id], onDelete: SetNull)
|
||||||
|
|
||||||
|
@@index([job_id])
|
||||||
|
@@index([queue_name])
|
||||||
|
@@index([host_id])
|
||||||
|
@@index([api_id])
|
||||||
|
@@index([status])
|
||||||
|
@@index([created_at])
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
/**
|
/**
|
||||||
* Database configuration for multiple instances
|
* Centralized Prisma Client Singleton
|
||||||
* Optimizes connection pooling to prevent "too many connections" errors
|
* Prevents multiple Prisma clients from creating connection leaks
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { PrismaClient } = require("@prisma/client");
|
||||||
@@ -26,22 +26,43 @@ function getOptimizedDatabaseUrl() {
|
|||||||
return url.toString();
|
return url.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create optimized Prisma client
|
// Singleton Prisma client instance
|
||||||
function createPrismaClient() {
|
let prismaInstance = null;
|
||||||
const optimizedUrl = getOptimizedDatabaseUrl();
|
|
||||||
|
|
||||||
return new PrismaClient({
|
function getPrismaClient() {
|
||||||
datasources: {
|
if (!prismaInstance) {
|
||||||
db: {
|
const optimizedUrl = getOptimizedDatabaseUrl();
|
||||||
url: optimizedUrl,
|
|
||||||
|
prismaInstance = new PrismaClient({
|
||||||
|
datasources: {
|
||||||
|
db: {
|
||||||
|
url: optimizedUrl,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
log:
|
||||||
log:
|
process.env.PRISMA_LOG_QUERIES === "true"
|
||||||
process.env.PRISMA_LOG_QUERIES === "true"
|
? ["query", "info", "warn", "error"]
|
||||||
? ["query", "info", "warn", "error"]
|
: ["warn", "error"],
|
||||||
: ["warn", "error"],
|
errorFormat: "pretty",
|
||||||
errorFormat: "pretty",
|
});
|
||||||
});
|
|
||||||
|
// Handle graceful shutdown
|
||||||
|
process.on("beforeExit", async () => {
|
||||||
|
await prismaInstance.$disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on("SIGINT", async () => {
|
||||||
|
await prismaInstance.$disconnect();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on("SIGTERM", async () => {
|
||||||
|
await prismaInstance.$disconnect();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return prismaInstance;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Connection health check
|
// Connection health check
|
||||||
@@ -50,7 +71,7 @@ async function checkDatabaseConnection(prisma) {
|
|||||||
await prisma.$queryRaw`SELECT 1`;
|
await prisma.$queryRaw`SELECT 1`;
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Database connection failed:", error.message);
|
console.error("Database connection check failed:", error.message);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -121,9 +142,8 @@ async function disconnectPrisma(prisma, maxRetries = 3) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
createPrismaClient,
|
getPrismaClient,
|
||||||
checkDatabaseConnection,
|
checkDatabaseConnection,
|
||||||
waitForDatabase,
|
waitForDatabase,
|
||||||
disconnectPrisma,
|
disconnectPrisma,
|
||||||
getOptimizedDatabaseUrl,
|
|
||||||
};
|
};
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
const jwt = require("jsonwebtoken");
|
const jwt = require("jsonwebtoken");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const {
|
const {
|
||||||
validate_session,
|
validate_session,
|
||||||
update_session_activity,
|
update_session_activity,
|
||||||
is_tfa_bypassed,
|
is_tfa_bypassed,
|
||||||
} = require("../utils/session_manager");
|
} = require("../utils/session_manager");
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Middleware to verify JWT token with session validation
|
// Middleware to verify JWT token with session validation
|
||||||
const authenticateToken = async (req, res, next) => {
|
const authenticateToken = async (req, res, next) => {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Permission middleware factory
|
// Permission middleware factory
|
||||||
const requirePermission = (permission) => {
|
const requirePermission = (permission) => {
|
||||||
|
|||||||
419
backend/src/routes/agentVersionRoutes.js
Normal file
419
backend/src/routes/agentVersionRoutes.js
Normal file
@@ -0,0 +1,419 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const router = express.Router();
|
||||||
|
const agentVersionService = require("../services/agentVersionService");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
const { requirePermission } = require("../middleware/permissions");
|
||||||
|
|
||||||
|
// Test GitHub API connectivity
|
||||||
|
router.get(
|
||||||
|
"/test-github",
|
||||||
|
authenticateToken,
|
||||||
|
requirePermission("can_manage_settings"),
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const axios = require("axios");
|
||||||
|
const response = await axios.get(
|
||||||
|
"https://api.github.com/repos/PatchMon/PatchMon-agent/releases",
|
||||||
|
{
|
||||||
|
timeout: 10000,
|
||||||
|
headers: {
|
||||||
|
"User-Agent": "PatchMon-Server/1.0",
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
status: response.status,
|
||||||
|
releasesFound: response.data.length,
|
||||||
|
latestRelease: response.data[0]?.tag_name || "No releases",
|
||||||
|
rateLimitRemaining: response.headers["x-ratelimit-remaining"],
|
||||||
|
rateLimitLimit: response.headers["x-ratelimit-limit"],
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ GitHub API test failed:", error.message);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: error.message,
|
||||||
|
status: error.response?.status,
|
||||||
|
statusText: error.response?.statusText,
|
||||||
|
rateLimitRemaining: error.response?.headers["x-ratelimit-remaining"],
|
||||||
|
rateLimitLimit: error.response?.headers["x-ratelimit-limit"],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get current version information
|
||||||
|
router.get("/version", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const versionInfo = await agentVersionService.getVersionInfo();
|
||||||
|
console.log(
|
||||||
|
"📊 Version info response:",
|
||||||
|
JSON.stringify(versionInfo, null, 2),
|
||||||
|
);
|
||||||
|
res.json(versionInfo);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to get version info:", error.message);
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to get version information",
|
||||||
|
details: error.message,
|
||||||
|
status: "error",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Refresh current version by executing agent binary
|
||||||
|
router.post(
|
||||||
|
"/version/refresh",
|
||||||
|
authenticateToken,
|
||||||
|
requirePermission("can_manage_settings"),
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
console.log("🔄 Refreshing current agent version...");
|
||||||
|
const currentVersion = await agentVersionService.refreshCurrentVersion();
|
||||||
|
console.log("📊 Refreshed current version:", currentVersion);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
currentVersion: currentVersion,
|
||||||
|
message: currentVersion
|
||||||
|
? `Current version refreshed: ${currentVersion}`
|
||||||
|
: "No agent binary found",
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to refresh current version:", error.message);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to refresh current version",
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Download latest update
|
||||||
|
router.post(
|
||||||
|
"/version/download",
|
||||||
|
authenticateToken,
|
||||||
|
requirePermission("can_manage_settings"),
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
console.log("🔄 Downloading latest agent update...");
|
||||||
|
const downloadResult = await agentVersionService.downloadLatestUpdate();
|
||||||
|
console.log(
|
||||||
|
"📊 Download result:",
|
||||||
|
JSON.stringify(downloadResult, null, 2),
|
||||||
|
);
|
||||||
|
res.json(downloadResult);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to download latest update:", error.message);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to download latest update",
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check for updates
|
||||||
|
router.post(
|
||||||
|
"/version/check",
|
||||||
|
authenticateToken,
|
||||||
|
requirePermission("can_manage_settings"),
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
console.log("🔄 Manual update check triggered");
|
||||||
|
const updateInfo = await agentVersionService.checkForUpdates();
|
||||||
|
console.log(
|
||||||
|
"📊 Update check result:",
|
||||||
|
JSON.stringify(updateInfo, null, 2),
|
||||||
|
);
|
||||||
|
res.json(updateInfo);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to check for updates:", error.message);
|
||||||
|
res.status(500).json({ error: "Failed to check for updates" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get available versions
|
||||||
|
router.get("/versions", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const versions = await agentVersionService.getAvailableVersions();
|
||||||
|
console.log(
|
||||||
|
"📦 Available versions response:",
|
||||||
|
JSON.stringify(versions, null, 2),
|
||||||
|
);
|
||||||
|
res.json({ versions });
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to get available versions:", error.message);
|
||||||
|
res.status(500).json({ error: "Failed to get available versions" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get binary information
|
||||||
|
router.get(
|
||||||
|
"/binary/:version/:architecture",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const { version, architecture } = req.params;
|
||||||
|
const binaryInfo = await agentVersionService.getBinaryInfo(
|
||||||
|
version,
|
||||||
|
architecture,
|
||||||
|
);
|
||||||
|
res.json(binaryInfo);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to get binary info:", error.message);
|
||||||
|
res.status(404).json({ error: error.message });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Download agent binary
|
||||||
|
router.get(
|
||||||
|
"/download/:version/:architecture",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const { version, architecture } = req.params;
|
||||||
|
|
||||||
|
// Validate architecture
|
||||||
|
if (!agentVersionService.supportedArchitectures.includes(architecture)) {
|
||||||
|
return res.status(400).json({ error: "Unsupported architecture" });
|
||||||
|
}
|
||||||
|
|
||||||
|
await agentVersionService.serveBinary(version, architecture, res);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to serve binary:", error.message);
|
||||||
|
res.status(500).json({ error: "Failed to serve binary" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get latest binary for architecture (for agents to query)
|
||||||
|
router.get("/latest/:architecture", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { architecture } = req.params;
|
||||||
|
|
||||||
|
// Validate architecture
|
||||||
|
if (!agentVersionService.supportedArchitectures.includes(architecture)) {
|
||||||
|
return res.status(400).json({ error: "Unsupported architecture" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const versionInfo = await agentVersionService.getVersionInfo();
|
||||||
|
|
||||||
|
if (!versionInfo.latestVersion) {
|
||||||
|
return res.status(404).json({ error: "No latest version available" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const binaryInfo = await agentVersionService.getBinaryInfo(
|
||||||
|
versionInfo.latestVersion,
|
||||||
|
architecture,
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
version: binaryInfo.version,
|
||||||
|
architecture: binaryInfo.architecture,
|
||||||
|
size: binaryInfo.size,
|
||||||
|
hash: binaryInfo.hash,
|
||||||
|
downloadUrl: `/api/v1/agent/download/${binaryInfo.version}/${binaryInfo.architecture}`,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to get latest binary info:", error.message);
|
||||||
|
res.status(500).json({ error: "Failed to get latest binary information" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Push update notification to specific agent
|
||||||
|
router.post(
|
||||||
|
"/notify-update/:apiId",
|
||||||
|
authenticateToken,
|
||||||
|
requirePermission("admin"),
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId } = req.params;
|
||||||
|
const { version, force = false } = req.body;
|
||||||
|
|
||||||
|
const versionInfo = await agentVersionService.getVersionInfo();
|
||||||
|
const targetVersion = version || versionInfo.latestVersion;
|
||||||
|
|
||||||
|
if (!targetVersion) {
|
||||||
|
return res
|
||||||
|
.status(400)
|
||||||
|
.json({ error: "No version specified or available" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import WebSocket service
|
||||||
|
const { pushUpdateNotification } = require("../services/agentWs");
|
||||||
|
|
||||||
|
// Push update notification via WebSocket
|
||||||
|
pushUpdateNotification(apiId, {
|
||||||
|
version: targetVersion,
|
||||||
|
force,
|
||||||
|
downloadUrl: `/api/v1/agent/latest/${req.body.architecture || "linux-amd64"}`,
|
||||||
|
message: `Update available: ${targetVersion}`,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Update notification sent to agent ${apiId}`,
|
||||||
|
version: targetVersion,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to notify agent update:", error.message);
|
||||||
|
res.status(500).json({ error: "Failed to notify agent update" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Push update notification to all agents
|
||||||
|
router.post(
|
||||||
|
"/notify-update-all",
|
||||||
|
authenticateToken,
|
||||||
|
requirePermission("admin"),
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const { version, force = false } = req.body;
|
||||||
|
|
||||||
|
const versionInfo = await agentVersionService.getVersionInfo();
|
||||||
|
const targetVersion = version || versionInfo.latestVersion;
|
||||||
|
|
||||||
|
if (!targetVersion) {
|
||||||
|
return res
|
||||||
|
.status(400)
|
||||||
|
.json({ error: "No version specified or available" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import WebSocket service
|
||||||
|
const { pushUpdateNotificationToAll } = require("../services/agentWs");
|
||||||
|
|
||||||
|
// Push update notification to all connected agents
|
||||||
|
const result = await pushUpdateNotificationToAll({
|
||||||
|
version: targetVersion,
|
||||||
|
force,
|
||||||
|
message: `Update available: ${targetVersion}`,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: `Update notification sent to ${result.notifiedCount} agents`,
|
||||||
|
version: targetVersion,
|
||||||
|
notifiedCount: result.notifiedCount,
|
||||||
|
failedCount: result.failedCount,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to notify all agents update:", error.message);
|
||||||
|
res.status(500).json({ error: "Failed to notify all agents update" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if specific agent needs update and push notification
|
||||||
|
router.post(
|
||||||
|
"/check-update/:apiId",
|
||||||
|
authenticateToken,
|
||||||
|
requirePermission("can_manage_settings"),
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId } = req.params;
|
||||||
|
const { version, force = false } = req.body;
|
||||||
|
|
||||||
|
if (!version) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: "Agent version is required",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`🔍 Checking update for agent ${apiId} (version: ${version})`,
|
||||||
|
);
|
||||||
|
const result = await agentVersionService.checkAndPushAgentUpdate(
|
||||||
|
apiId,
|
||||||
|
version,
|
||||||
|
force,
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
"📊 Agent update check result:",
|
||||||
|
JSON.stringify(result, null, 2),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
...result,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to check agent update:", error.message);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to check agent update",
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Push updates to all connected agents
|
||||||
|
router.post(
|
||||||
|
"/push-updates-all",
|
||||||
|
authenticateToken,
|
||||||
|
requirePermission("can_manage_settings"),
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const { force = false } = req.body;
|
||||||
|
|
||||||
|
console.log(`🔄 Pushing updates to all agents (force: ${force})`);
|
||||||
|
const result = await agentVersionService.checkAndPushUpdatesToAll(force);
|
||||||
|
console.log("📊 Bulk update result:", JSON.stringify(result, null, 2));
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to push updates to all agents:", error.message);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to push updates to all agents",
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Agent reports its version (for automatic update checking)
|
||||||
|
router.post("/report-version", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId, version } = req.body;
|
||||||
|
|
||||||
|
if (!apiId || !version) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: "API ID and version are required",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`📊 Agent ${apiId} reported version: ${version}`);
|
||||||
|
|
||||||
|
// Check if agent needs update and push notification if needed
|
||||||
|
const updateResult = await agentVersionService.checkAndPushAgentUpdate(
|
||||||
|
apiId,
|
||||||
|
version,
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
message: "Version reported successfully",
|
||||||
|
updateCheck: updateResult,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to process agent version report:", error.message);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to process version report",
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const bcrypt = require("bcryptjs");
|
const bcrypt = require("bcryptjs");
|
||||||
const jwt = require("jsonwebtoken");
|
const jwt = require("jsonwebtoken");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const { body, validationResult } = require("express-validator");
|
const { body, validationResult } = require("express-validator");
|
||||||
const { authenticateToken, _requireAdmin } = require("../middleware/auth");
|
const { authenticateToken, _requireAdmin } = require("../middleware/auth");
|
||||||
const {
|
const {
|
||||||
@@ -20,7 +20,7 @@ const {
|
|||||||
} = require("../utils/session_manager");
|
} = require("../utils/session_manager");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse user agent string to extract browser and OS info
|
* Parse user agent string to extract browser and OS info
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const crypto = require("node:crypto");
|
const crypto = require("node:crypto");
|
||||||
const bcrypt = require("bcryptjs");
|
const bcrypt = require("bcryptjs");
|
||||||
const { body, validationResult } = require("express-validator");
|
const { body, validationResult } = require("express-validator");
|
||||||
@@ -8,7 +8,7 @@ const { requireManageSettings } = require("../middleware/permissions");
|
|||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Generate auto-enrollment token credentials
|
// Generate auto-enrollment token credentials
|
||||||
const generate_auto_enrollment_token = () => {
|
const generate_auto_enrollment_token = () => {
|
||||||
@@ -570,22 +570,25 @@ router.post(
|
|||||||
os_version: "unknown",
|
os_version: "unknown",
|
||||||
api_id: api_id,
|
api_id: api_id,
|
||||||
api_key: api_key,
|
api_key: api_key,
|
||||||
host_group_id: req.auto_enrollment_token.default_host_group_id,
|
|
||||||
status: "pending",
|
status: "pending",
|
||||||
notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`,
|
notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`,
|
||||||
updated_at: new Date(),
|
updated_at: new Date(),
|
||||||
},
|
},
|
||||||
include: {
|
|
||||||
host_groups: {
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
name: true,
|
|
||||||
color: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Create host group membership if default host group is specified
|
||||||
|
let hostGroupMembership = null;
|
||||||
|
if (req.auto_enrollment_token.default_host_group_id) {
|
||||||
|
hostGroupMembership = await prisma.host_group_memberships.create({
|
||||||
|
data: {
|
||||||
|
id: uuidv4(),
|
||||||
|
host_id: host.id,
|
||||||
|
host_group_id: req.auto_enrollment_token.default_host_group_id,
|
||||||
|
created_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Update token usage stats
|
// Update token usage stats
|
||||||
await prisma.auto_enrollment_tokens.update({
|
await prisma.auto_enrollment_tokens.update({
|
||||||
where: { id: req.auto_enrollment_token.id },
|
where: { id: req.auto_enrollment_token.id },
|
||||||
@@ -600,6 +603,19 @@ router.post(
|
|||||||
`Auto-enrolled host: ${friendly_name} (${host.id}) via token: ${req.auto_enrollment_token.token_name}`,
|
`Auto-enrolled host: ${friendly_name} (${host.id}) via token: ${req.auto_enrollment_token.token_name}`,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Get host group details for response if membership was created
|
||||||
|
let hostGroup = null;
|
||||||
|
if (hostGroupMembership) {
|
||||||
|
hostGroup = await prisma.host_groups.findUnique({
|
||||||
|
where: { id: req.auto_enrollment_token.default_host_group_id },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
name: true,
|
||||||
|
color: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
res.status(201).json({
|
res.status(201).json({
|
||||||
message: "Host enrolled successfully",
|
message: "Host enrolled successfully",
|
||||||
host: {
|
host: {
|
||||||
@@ -607,7 +623,7 @@ router.post(
|
|||||||
friendly_name: host.friendly_name,
|
friendly_name: host.friendly_name,
|
||||||
api_id: api_id,
|
api_id: api_id,
|
||||||
api_key: api_key,
|
api_key: api_key,
|
||||||
host_group: host.host_groups,
|
host_group: hostGroup,
|
||||||
status: host.status,
|
status: host.status,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -698,13 +714,24 @@ router.post(
|
|||||||
os_version: "unknown",
|
os_version: "unknown",
|
||||||
api_id: api_id,
|
api_id: api_id,
|
||||||
api_key: api_key,
|
api_key: api_key,
|
||||||
host_group_id: req.auto_enrollment_token.default_host_group_id,
|
|
||||||
status: "pending",
|
status: "pending",
|
||||||
notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`,
|
notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`,
|
||||||
updated_at: new Date(),
|
updated_at: new Date(),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Create host group membership if default host group is specified
|
||||||
|
if (req.auto_enrollment_token.default_host_group_id) {
|
||||||
|
await prisma.host_group_memberships.create({
|
||||||
|
data: {
|
||||||
|
id: uuidv4(),
|
||||||
|
host_id: host.id,
|
||||||
|
host_group_id: req.auto_enrollment_token.default_host_group_id,
|
||||||
|
created_at: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
results.success.push({
|
results.success.push({
|
||||||
id: host.id,
|
id: host.id,
|
||||||
friendly_name: host.friendly_name,
|
friendly_name: host.friendly_name,
|
||||||
|
|||||||
416
backend/src/routes/automationRoutes.js
Normal file
416
backend/src/routes/automationRoutes.js
Normal file
@@ -0,0 +1,416 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||||
|
const { getConnectedApiIds } = require("../services/agentWs");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Get all queue statistics
|
||||||
|
router.get("/stats", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await queueManager.getAllQueueStats();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: stats,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching queue stats:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch queue statistics",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get specific queue statistics
|
||||||
|
router.get("/stats/:queueName", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { queueName } = req.params;
|
||||||
|
|
||||||
|
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: "Invalid queue name",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = await queueManager.getQueueStats(queueName);
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: stats,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching queue stats:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch queue statistics",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get recent jobs for a queue
|
||||||
|
router.get("/jobs/:queueName", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { queueName } = req.params;
|
||||||
|
const { limit = 10 } = req.query;
|
||||||
|
|
||||||
|
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
success: false,
|
||||||
|
error: "Invalid queue name",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobs = await queueManager.getRecentJobs(
|
||||||
|
queueName,
|
||||||
|
parseInt(limit, 10),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Format jobs for frontend
|
||||||
|
const formattedJobs = jobs.map((job) => ({
|
||||||
|
id: job.id,
|
||||||
|
name: job.name,
|
||||||
|
status: job.finishedOn
|
||||||
|
? job.failedReason
|
||||||
|
? "failed"
|
||||||
|
: "completed"
|
||||||
|
: "active",
|
||||||
|
progress: job.progress,
|
||||||
|
data: job.data,
|
||||||
|
returnvalue: job.returnvalue,
|
||||||
|
failedReason: job.failedReason,
|
||||||
|
processedOn: job.processedOn,
|
||||||
|
finishedOn: job.finishedOn,
|
||||||
|
createdAt: new Date(job.timestamp),
|
||||||
|
attemptsMade: job.attemptsMade,
|
||||||
|
delay: job.delay,
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: formattedJobs,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching recent jobs:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch recent jobs",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Trigger manual GitHub update check
|
||||||
|
router.post("/trigger/github-update", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerGitHubUpdateCheck();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "GitHub update check triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering GitHub update check:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger GitHub update check",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Trigger manual session cleanup
|
||||||
|
router.post(
|
||||||
|
"/trigger/session-cleanup",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerSessionCleanup();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Session cleanup triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering session cleanup:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger session cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Trigger Agent Collection: enqueue report_now for connected agents only
|
||||||
|
router.post(
|
||||||
|
"/trigger/agent-collection",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const queue = queueManager.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||||
|
const apiIds = getConnectedApiIds();
|
||||||
|
if (!apiIds || apiIds.length === 0) {
|
||||||
|
return res.json({ success: true, data: { enqueued: 0 } });
|
||||||
|
}
|
||||||
|
const jobs = apiIds.map((apiId) => ({
|
||||||
|
name: "report_now",
|
||||||
|
data: { api_id: apiId, type: "report_now" },
|
||||||
|
opts: { attempts: 3, backoff: { type: "fixed", delay: 2000 } },
|
||||||
|
}));
|
||||||
|
await queue.addBulk(jobs);
|
||||||
|
res.json({ success: true, data: { enqueued: jobs.length } });
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering agent collection:", error);
|
||||||
|
res
|
||||||
|
.status(500)
|
||||||
|
.json({ success: false, error: "Failed to trigger agent collection" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Trigger manual orphaned repo cleanup
|
||||||
|
router.post(
|
||||||
|
"/trigger/orphaned-repo-cleanup",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerOrphanedRepoCleanup();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Orphaned repository cleanup triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering orphaned repository cleanup:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger orphaned repository cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Trigger manual orphaned package cleanup
|
||||||
|
router.post(
|
||||||
|
"/trigger/orphaned-package-cleanup",
|
||||||
|
authenticateToken,
|
||||||
|
async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const job = await queueManager.triggerOrphanedPackageCleanup();
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
jobId: job.id,
|
||||||
|
message: "Orphaned package cleanup triggered successfully",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering orphaned package cleanup:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to trigger orphaned package cleanup",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get queue health status
|
||||||
|
router.get("/health", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await queueManager.getAllQueueStats();
|
||||||
|
const totalJobs = Object.values(stats).reduce((sum, queueStats) => {
|
||||||
|
return sum + queueStats.waiting + queueStats.active + queueStats.failed;
|
||||||
|
}, 0);
|
||||||
|
|
||||||
|
const health = {
|
||||||
|
status: "healthy",
|
||||||
|
totalJobs,
|
||||||
|
queues: Object.keys(stats).length,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check for unhealthy conditions
|
||||||
|
if (totalJobs > 1000) {
|
||||||
|
health.status = "warning";
|
||||||
|
health.message = "High number of queued jobs";
|
||||||
|
}
|
||||||
|
|
||||||
|
const failedJobs = Object.values(stats).reduce((sum, queueStats) => {
|
||||||
|
return sum + queueStats.failed;
|
||||||
|
}, 0);
|
||||||
|
|
||||||
|
if (failedJobs > 10) {
|
||||||
|
health.status = "error";
|
||||||
|
health.message = "High number of failed jobs";
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: health,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error checking queue health:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to check queue health",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get automation overview (for dashboard cards)
|
||||||
|
router.get("/overview", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await queueManager.getAllQueueStats();
|
||||||
|
const { getSettings } = require("../services/settingsService");
|
||||||
|
const settings = await getSettings();
|
||||||
|
|
||||||
|
// Get recent jobs for each queue to show last run times
|
||||||
|
const recentJobs = await Promise.all([
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.GITHUB_UPDATE_CHECK, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1),
|
||||||
|
queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Calculate overview metrics
|
||||||
|
const overview = {
|
||||||
|
scheduledTasks:
|
||||||
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
|
||||||
|
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed,
|
||||||
|
|
||||||
|
runningTasks:
|
||||||
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
|
||||||
|
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active,
|
||||||
|
|
||||||
|
failedTasks:
|
||||||
|
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
|
||||||
|
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed +
|
||||||
|
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed,
|
||||||
|
|
||||||
|
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
|
||||||
|
return (
|
||||||
|
sum +
|
||||||
|
queueStats.completed +
|
||||||
|
queueStats.failed +
|
||||||
|
queueStats.active +
|
||||||
|
queueStats.waiting +
|
||||||
|
queueStats.delayed
|
||||||
|
);
|
||||||
|
}, 0),
|
||||||
|
|
||||||
|
// Automation details with last run times
|
||||||
|
automations: [
|
||||||
|
{
|
||||||
|
name: "GitHub Update Check",
|
||||||
|
queue: QUEUE_NAMES.GITHUB_UPDATE_CHECK,
|
||||||
|
description: "Checks for new PatchMon releases",
|
||||||
|
schedule: "Daily at midnight",
|
||||||
|
lastRun: recentJobs[0][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[0][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[0][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[0][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[0][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Session Cleanup",
|
||||||
|
queue: QUEUE_NAMES.SESSION_CLEANUP,
|
||||||
|
description: "Cleans up expired user sessions",
|
||||||
|
schedule: "Every hour",
|
||||||
|
lastRun: recentJobs[1][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[1][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[1][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[1][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[1][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.SESSION_CLEANUP],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Orphaned Repo Cleanup",
|
||||||
|
queue: QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
|
||||||
|
description: "Removes repositories with no associated hosts",
|
||||||
|
schedule: "Daily at 2 AM",
|
||||||
|
lastRun: recentJobs[2][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[2][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[2][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[2][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[2][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Orphaned Package Cleanup",
|
||||||
|
queue: QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP,
|
||||||
|
description: "Removes packages with no associated hosts",
|
||||||
|
schedule: "Daily at 3 AM",
|
||||||
|
lastRun: recentJobs[3][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[3][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[3][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[3][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[3][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Collect Host Statistics",
|
||||||
|
queue: QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
|
description: "Collects package statistics from connected agents only",
|
||||||
|
schedule: `Every ${settings.update_interval} minutes (Agent-driven)`,
|
||||||
|
lastRun: recentJobs[4][0]?.finishedOn
|
||||||
|
? new Date(recentJobs[4][0].finishedOn).toLocaleString()
|
||||||
|
: "Never",
|
||||||
|
lastRunTimestamp: recentJobs[4][0]?.finishedOn || 0,
|
||||||
|
status: recentJobs[4][0]?.failedReason
|
||||||
|
? "Failed"
|
||||||
|
: recentJobs[4][0]
|
||||||
|
? "Success"
|
||||||
|
: "Never run",
|
||||||
|
stats: stats[QUEUE_NAMES.AGENT_COMMANDS],
|
||||||
|
},
|
||||||
|
].sort((a, b) => {
|
||||||
|
// Sort by last run timestamp (most recent first)
|
||||||
|
// If both have never run (timestamp 0), maintain original order
|
||||||
|
if (a.lastRunTimestamp === 0 && b.lastRunTimestamp === 0) return 0;
|
||||||
|
if (a.lastRunTimestamp === 0) return 1; // Never run goes to bottom
|
||||||
|
if (b.lastRunTimestamp === 0) return -1; // Never run goes to bottom
|
||||||
|
return b.lastRunTimestamp - a.lastRunTimestamp; // Most recent first
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: overview,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching automation overview:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch automation overview",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { body, validationResult } = require("express-validator");
|
const { body, validationResult } = require("express-validator");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const { authenticateToken } = require("../middleware/auth");
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Helper function to get user permissions based on role
|
// Helper function to get user permissions based on role
|
||||||
async function getUserPermissions(userRole) {
|
async function getUserPermissions(userRole) {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const moment = require("moment");
|
const moment = require("moment");
|
||||||
const { authenticateToken } = require("../middleware/auth");
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
const {
|
const {
|
||||||
@@ -8,9 +8,10 @@ const {
|
|||||||
requireViewPackages,
|
requireViewPackages,
|
||||||
requireViewUsers,
|
requireViewUsers,
|
||||||
} = require("../middleware/permissions");
|
} = require("../middleware/permissions");
|
||||||
|
const { queueManager } = require("../services/automation");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Get dashboard statistics
|
// Get dashboard statistics
|
||||||
router.get(
|
router.get(
|
||||||
@@ -60,9 +61,15 @@ router.get(
|
|||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
|
||||||
// Total outdated packages across all hosts
|
// Total unique packages that need updates
|
||||||
prisma.host_packages.count({
|
prisma.packages.count({
|
||||||
where: { needs_update: true },
|
where: {
|
||||||
|
host_packages: {
|
||||||
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
|
|
||||||
// Errored hosts (not updated within threshold based on update interval)
|
// Errored hosts (not updated within threshold based on update interval)
|
||||||
@@ -75,11 +82,15 @@ router.get(
|
|||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
|
||||||
// Security updates count
|
// Security updates count (unique packages)
|
||||||
prisma.host_packages.count({
|
prisma.packages.count({
|
||||||
where: {
|
where: {
|
||||||
needs_update: true,
|
host_packages: {
|
||||||
is_security_update: true,
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
|
||||||
@@ -200,11 +211,16 @@ router.get("/hosts", authenticateToken, requireViewHosts, async (_req, res) => {
|
|||||||
agent_version: true,
|
agent_version: true,
|
||||||
auto_update: true,
|
auto_update: true,
|
||||||
notes: true,
|
notes: true,
|
||||||
host_groups: {
|
api_id: true,
|
||||||
select: {
|
host_group_memberships: {
|
||||||
id: true,
|
include: {
|
||||||
name: true,
|
host_groups: {
|
||||||
color: true,
|
select: {
|
||||||
|
id: true,
|
||||||
|
name: true,
|
||||||
|
color: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
_count: {
|
_count: {
|
||||||
@@ -354,11 +370,15 @@ router.get(
|
|||||||
prisma.hosts.findUnique({
|
prisma.hosts.findUnique({
|
||||||
where: { id: hostId },
|
where: { id: hostId },
|
||||||
include: {
|
include: {
|
||||||
host_groups: {
|
host_group_memberships: {
|
||||||
select: {
|
include: {
|
||||||
id: true,
|
host_groups: {
|
||||||
name: true,
|
select: {
|
||||||
color: true,
|
id: true,
|
||||||
|
name: true,
|
||||||
|
color: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
host_packages: {
|
host_packages: {
|
||||||
@@ -413,6 +433,51 @@ router.get(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Get agent queue status for a specific host
|
||||||
|
router.get(
|
||||||
|
"/hosts/:hostId/queue",
|
||||||
|
authenticateToken,
|
||||||
|
requireViewHosts,
|
||||||
|
async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { hostId } = req.params;
|
||||||
|
const { limit = 20 } = req.query;
|
||||||
|
|
||||||
|
// Get the host to find its API ID
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id: hostId },
|
||||||
|
select: { api_id: true, friendly_name: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(404).json({ error: "Host not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get queue jobs for this host
|
||||||
|
const queueData = await queueManager.getHostJobs(
|
||||||
|
host.api_id,
|
||||||
|
parseInt(limit, 10),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
hostId,
|
||||||
|
apiId: host.api_id,
|
||||||
|
friendlyName: host.friendly_name,
|
||||||
|
...queueData,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching host queue status:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch host queue status",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// Get recent users ordered by last_login desc
|
// Get recent users ordered by last_login desc
|
||||||
router.get(
|
router.get(
|
||||||
"/recent-users",
|
"/recent-users",
|
||||||
@@ -511,22 +576,34 @@ router.get(
|
|||||||
packages_count: true,
|
packages_count: true,
|
||||||
security_count: true,
|
security_count: true,
|
||||||
total_packages: true,
|
total_packages: true,
|
||||||
|
host_id: true,
|
||||||
|
status: true,
|
||||||
},
|
},
|
||||||
orderBy: {
|
orderBy: {
|
||||||
timestamp: "asc",
|
timestamp: "asc",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Process data to show actual values (no averaging)
|
// Enhanced data validation and processing
|
||||||
const processedData = trendsData
|
const processedData = trendsData
|
||||||
.filter((record) => record.total_packages !== null) // Only include records with valid data
|
.filter((record) => {
|
||||||
|
// Enhanced validation
|
||||||
|
return (
|
||||||
|
record.total_packages !== null &&
|
||||||
|
record.total_packages >= 0 &&
|
||||||
|
record.packages_count >= 0 &&
|
||||||
|
record.security_count >= 0 &&
|
||||||
|
record.security_count <= record.packages_count && // Security can't exceed outdated
|
||||||
|
record.status === "success"
|
||||||
|
); // Only include successful reports
|
||||||
|
})
|
||||||
.map((record) => {
|
.map((record) => {
|
||||||
const date = new Date(record.timestamp);
|
const date = new Date(record.timestamp);
|
||||||
let timeKey;
|
let timeKey;
|
||||||
|
|
||||||
if (daysInt <= 1) {
|
if (daysInt <= 1) {
|
||||||
// For hourly view, use exact timestamp
|
// For hourly view, group by hour only (not minutes)
|
||||||
timeKey = date.toISOString().substring(0, 16); // YYYY-MM-DDTHH:MM
|
timeKey = date.toISOString().substring(0, 13); // YYYY-MM-DDTHH
|
||||||
} else {
|
} else {
|
||||||
// For daily view, group by day
|
// For daily view, group by day
|
||||||
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
|
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
|
||||||
@@ -537,64 +614,342 @@ router.get(
|
|||||||
total_packages: record.total_packages,
|
total_packages: record.total_packages,
|
||||||
packages_count: record.packages_count || 0,
|
packages_count: record.packages_count || 0,
|
||||||
security_count: record.security_count || 0,
|
security_count: record.security_count || 0,
|
||||||
|
host_id: record.host_id,
|
||||||
|
timestamp: record.timestamp,
|
||||||
};
|
};
|
||||||
})
|
});
|
||||||
.sort((a, b) => a.timeKey.localeCompare(b.timeKey)); // Sort by time
|
|
||||||
|
|
||||||
// Get hosts list for dropdown (always fetch for dropdown functionality)
|
// Determine if we need aggregation based on host filter
|
||||||
|
const needsAggregation =
|
||||||
|
!hostId || hostId === "all" || hostId === "undefined";
|
||||||
|
|
||||||
|
let aggregatedArray;
|
||||||
|
|
||||||
|
if (needsAggregation) {
|
||||||
|
// For "All Hosts" mode, we need to calculate the actual total packages differently
|
||||||
|
// Instead of aggregating historical data (which is per-host), we'll use the current total
|
||||||
|
// and show that as a flat line, since total packages don't change much over time
|
||||||
|
|
||||||
|
// Get the current total packages count (unique packages across all hosts)
|
||||||
|
const currentTotalPackages = await prisma.packages.count({
|
||||||
|
where: {
|
||||||
|
host_packages: {
|
||||||
|
some: {}, // At least one host has this package
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Aggregate data by timeKey when looking at "All Hosts" or no specific host
|
||||||
|
const aggregatedData = processedData.reduce((acc, item) => {
|
||||||
|
if (!acc[item.timeKey]) {
|
||||||
|
acc[item.timeKey] = {
|
||||||
|
timeKey: item.timeKey,
|
||||||
|
total_packages: currentTotalPackages, // Use current total packages
|
||||||
|
packages_count: 0,
|
||||||
|
security_count: 0,
|
||||||
|
record_count: 0,
|
||||||
|
host_ids: new Set(),
|
||||||
|
min_timestamp: item.timestamp,
|
||||||
|
max_timestamp: item.timestamp,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// For outdated and security packages: SUM (these represent counts across hosts)
|
||||||
|
acc[item.timeKey].packages_count += item.packages_count;
|
||||||
|
acc[item.timeKey].security_count += item.security_count;
|
||||||
|
|
||||||
|
acc[item.timeKey].record_count += 1;
|
||||||
|
acc[item.timeKey].host_ids.add(item.host_id);
|
||||||
|
|
||||||
|
// Track timestamp range
|
||||||
|
if (item.timestamp < acc[item.timeKey].min_timestamp) {
|
||||||
|
acc[item.timeKey].min_timestamp = item.timestamp;
|
||||||
|
}
|
||||||
|
if (item.timestamp > acc[item.timeKey].max_timestamp) {
|
||||||
|
acc[item.timeKey].max_timestamp = item.timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
// Convert to array and add metadata
|
||||||
|
aggregatedArray = Object.values(aggregatedData)
|
||||||
|
.map((item) => ({
|
||||||
|
...item,
|
||||||
|
host_count: item.host_ids.size,
|
||||||
|
host_ids: Array.from(item.host_ids),
|
||||||
|
}))
|
||||||
|
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
|
||||||
|
} else {
|
||||||
|
// For specific host, show individual data points without aggregation
|
||||||
|
// But still group by timeKey to handle multiple reports from same host in same time period
|
||||||
|
const hostAggregatedData = processedData.reduce((acc, item) => {
|
||||||
|
if (!acc[item.timeKey]) {
|
||||||
|
acc[item.timeKey] = {
|
||||||
|
timeKey: item.timeKey,
|
||||||
|
total_packages: 0,
|
||||||
|
packages_count: 0,
|
||||||
|
security_count: 0,
|
||||||
|
record_count: 0,
|
||||||
|
host_ids: new Set([item.host_id]),
|
||||||
|
min_timestamp: item.timestamp,
|
||||||
|
max_timestamp: item.timestamp,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// For same host, take the latest values (not sum)
|
||||||
|
// This handles cases where a host reports multiple times in the same time period
|
||||||
|
if (item.timestamp > acc[item.timeKey].max_timestamp) {
|
||||||
|
acc[item.timeKey].total_packages = item.total_packages;
|
||||||
|
acc[item.timeKey].packages_count = item.packages_count;
|
||||||
|
acc[item.timeKey].security_count = item.security_count;
|
||||||
|
acc[item.timeKey].max_timestamp = item.timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
acc[item.timeKey].record_count += 1;
|
||||||
|
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
// Convert to array
|
||||||
|
aggregatedArray = Object.values(hostAggregatedData)
|
||||||
|
.map((item) => ({
|
||||||
|
...item,
|
||||||
|
host_count: item.host_ids.size,
|
||||||
|
host_ids: Array.from(item.host_ids),
|
||||||
|
}))
|
||||||
|
.sort((a, b) => a.timeKey.localeCompare(b.timeKey));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle sparse data by filling missing time periods
|
||||||
|
const fillMissingPeriods = (data, daysInt) => {
|
||||||
|
const filledData = [];
|
||||||
|
const startDate = new Date();
|
||||||
|
startDate.setDate(startDate.getDate() - daysInt);
|
||||||
|
|
||||||
|
const dataMap = new Map(data.map((item) => [item.timeKey, item]));
|
||||||
|
|
||||||
|
const endDate = new Date();
|
||||||
|
const currentDate = new Date(startDate);
|
||||||
|
|
||||||
|
// Find the last known values for interpolation
|
||||||
|
let lastKnownValues = null;
|
||||||
|
if (data.length > 0) {
|
||||||
|
lastKnownValues = {
|
||||||
|
total_packages: data[0].total_packages,
|
||||||
|
packages_count: data[0].packages_count,
|
||||||
|
security_count: data[0].security_count,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
while (currentDate <= endDate) {
|
||||||
|
let timeKey;
|
||||||
|
if (daysInt <= 1) {
|
||||||
|
timeKey = currentDate.toISOString().substring(0, 13); // Hourly
|
||||||
|
currentDate.setHours(currentDate.getHours() + 1);
|
||||||
|
} else {
|
||||||
|
timeKey = currentDate.toISOString().split("T")[0]; // Daily
|
||||||
|
currentDate.setDate(currentDate.getDate() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dataMap.has(timeKey)) {
|
||||||
|
const item = dataMap.get(timeKey);
|
||||||
|
filledData.push(item);
|
||||||
|
// Update last known values
|
||||||
|
lastKnownValues = {
|
||||||
|
total_packages: item.total_packages,
|
||||||
|
packages_count: item.packages_count,
|
||||||
|
security_count: item.security_count,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// For missing periods, use the last known values (interpolation)
|
||||||
|
// This creates a continuous line instead of gaps
|
||||||
|
filledData.push({
|
||||||
|
timeKey,
|
||||||
|
total_packages: lastKnownValues?.total_packages || 0,
|
||||||
|
packages_count: lastKnownValues?.packages_count || 0,
|
||||||
|
security_count: lastKnownValues?.security_count || 0,
|
||||||
|
record_count: 0,
|
||||||
|
host_count: 0,
|
||||||
|
host_ids: [],
|
||||||
|
min_timestamp: null,
|
||||||
|
max_timestamp: null,
|
||||||
|
isInterpolated: true, // Mark as interpolated for debugging
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filledData;
|
||||||
|
};
|
||||||
|
|
||||||
|
const finalProcessedData = fillMissingPeriods(aggregatedArray, daysInt);
|
||||||
|
|
||||||
|
// Get hosts list for dropdown
|
||||||
const hostsList = await prisma.hosts.findMany({
|
const hostsList = await prisma.hosts.findMany({
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
friendly_name: true,
|
friendly_name: true,
|
||||||
hostname: true,
|
hostname: true,
|
||||||
|
last_update: true,
|
||||||
|
status: true,
|
||||||
},
|
},
|
||||||
orderBy: {
|
orderBy: {
|
||||||
friendly_name: "asc",
|
friendly_name: "asc",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Get current package state for offline fallback
|
||||||
|
let currentPackageState = null;
|
||||||
|
if (hostId && hostId !== "all" && hostId !== "undefined") {
|
||||||
|
// Get current package counts for specific host
|
||||||
|
const currentState = await prisma.host_packages.aggregate({
|
||||||
|
where: {
|
||||||
|
host_id: hostId,
|
||||||
|
},
|
||||||
|
_count: {
|
||||||
|
id: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get counts for boolean fields separately
|
||||||
|
const outdatedCount = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
host_id: hostId,
|
||||||
|
needs_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const securityCount = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
host_id: hostId,
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
currentPackageState = {
|
||||||
|
total_packages: currentState._count.id,
|
||||||
|
packages_count: outdatedCount,
|
||||||
|
security_count: securityCount,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// Get current package counts for all hosts
|
||||||
|
// Total packages = count of unique packages installed on at least one host
|
||||||
|
const totalPackagesCount = await prisma.packages.count({
|
||||||
|
where: {
|
||||||
|
host_packages: {
|
||||||
|
some: {}, // At least one host has this package
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get counts for boolean fields separately
|
||||||
|
const outdatedCount = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
needs_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const securityCount = await prisma.host_packages.count({
|
||||||
|
where: {
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
currentPackageState = {
|
||||||
|
total_packages: totalPackagesCount,
|
||||||
|
packages_count: outdatedCount,
|
||||||
|
security_count: securityCount,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// Format data for chart
|
// Format data for chart
|
||||||
const chartData = {
|
const chartData = {
|
||||||
labels: [],
|
labels: [],
|
||||||
datasets: [
|
datasets: [
|
||||||
{
|
{
|
||||||
label: "Total Packages",
|
label: needsAggregation
|
||||||
|
? "Total Packages (All Hosts)"
|
||||||
|
: "Total Packages",
|
||||||
data: [],
|
data: [],
|
||||||
borderColor: "#3B82F6", // Blue
|
borderColor: "#3B82F6", // Blue
|
||||||
backgroundColor: "rgba(59, 130, 246, 0.1)",
|
backgroundColor: "rgba(59, 130, 246, 0.1)",
|
||||||
tension: 0.4,
|
tension: 0.4,
|
||||||
hidden: true, // Hidden by default
|
hidden: true, // Hidden by default
|
||||||
|
spanGaps: true, // Connect lines across missing data
|
||||||
|
pointRadius: 3,
|
||||||
|
pointHoverRadius: 5,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: "Outdated Packages",
|
label: needsAggregation
|
||||||
|
? "Total Outdated Packages"
|
||||||
|
: "Outdated Packages",
|
||||||
data: [],
|
data: [],
|
||||||
borderColor: "#F59E0B", // Orange
|
borderColor: "#F59E0B", // Orange
|
||||||
backgroundColor: "rgba(245, 158, 11, 0.1)",
|
backgroundColor: "rgba(245, 158, 11, 0.1)",
|
||||||
tension: 0.4,
|
tension: 0.4,
|
||||||
|
spanGaps: true, // Connect lines across missing data
|
||||||
|
pointRadius: 3,
|
||||||
|
pointHoverRadius: 5,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: "Security Packages",
|
label: needsAggregation
|
||||||
|
? "Total Security Packages"
|
||||||
|
: "Security Packages",
|
||||||
data: [],
|
data: [],
|
||||||
borderColor: "#EF4444", // Red
|
borderColor: "#EF4444", // Red
|
||||||
backgroundColor: "rgba(239, 68, 68, 0.1)",
|
backgroundColor: "rgba(239, 68, 68, 0.1)",
|
||||||
tension: 0.4,
|
tension: 0.4,
|
||||||
|
spanGaps: true, // Connect lines across missing data
|
||||||
|
pointRadius: 3,
|
||||||
|
pointHoverRadius: 5,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
// Process aggregated data
|
// Process aggregated data
|
||||||
processedData.forEach((item) => {
|
finalProcessedData.forEach((item) => {
|
||||||
chartData.labels.push(item.timeKey);
|
chartData.labels.push(item.timeKey);
|
||||||
chartData.datasets[0].data.push(item.total_packages);
|
chartData.datasets[0].data.push(item.total_packages);
|
||||||
chartData.datasets[1].data.push(item.packages_count);
|
chartData.datasets[1].data.push(item.packages_count);
|
||||||
chartData.datasets[2].data.push(item.security_count);
|
chartData.datasets[2].data.push(item.security_count);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Calculate data quality metrics
|
||||||
|
const dataQuality = {
|
||||||
|
totalRecords: trendsData.length,
|
||||||
|
validRecords: processedData.length,
|
||||||
|
aggregatedPoints: aggregatedArray.length,
|
||||||
|
filledPoints: finalProcessedData.length,
|
||||||
|
recordsWithNullTotal: trendsData.filter(
|
||||||
|
(r) => r.total_packages === null,
|
||||||
|
).length,
|
||||||
|
recordsWithInvalidData: trendsData.length - processedData.length,
|
||||||
|
successfulReports: trendsData.filter((r) => r.status === "success")
|
||||||
|
.length,
|
||||||
|
failedReports: trendsData.filter((r) => r.status === "error").length,
|
||||||
|
};
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
chartData,
|
chartData,
|
||||||
hosts: hostsList,
|
hosts: hostsList,
|
||||||
period: daysInt,
|
period: daysInt,
|
||||||
hostId: hostId || "all",
|
hostId: hostId || "all",
|
||||||
|
currentPackageState,
|
||||||
|
dataQuality,
|
||||||
|
aggregationInfo: {
|
||||||
|
hasData: aggregatedArray.length > 0,
|
||||||
|
hasGaps: finalProcessedData.some((item) => item.record_count === 0),
|
||||||
|
lastDataPoint:
|
||||||
|
aggregatedArray.length > 0
|
||||||
|
? aggregatedArray[aggregatedArray.length - 1]
|
||||||
|
: null,
|
||||||
|
aggregationMode: needsAggregation
|
||||||
|
? "sum_across_hosts"
|
||||||
|
: "individual_host_data",
|
||||||
|
explanation: needsAggregation
|
||||||
|
? "Data is summed across all hosts for each time period"
|
||||||
|
: "Data shows individual host values without cross-host aggregation",
|
||||||
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching package trends:", error);
|
console.error("Error fetching package trends:", error);
|
||||||
@@ -603,4 +958,348 @@ router.get(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Diagnostic endpoint to investigate package spikes
|
||||||
|
router.get(
|
||||||
|
"/package-spike-analysis",
|
||||||
|
authenticateToken,
|
||||||
|
requireViewHosts,
|
||||||
|
async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { date, time, hours = 2 } = req.query;
|
||||||
|
|
||||||
|
if (!date || !time) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error:
|
||||||
|
"Date and time parameters are required. Format: date=2025-10-17&time=18:00",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the specific date and time
|
||||||
|
const targetDateTime = new Date(`${date}T${time}:00`);
|
||||||
|
const startTime = new Date(targetDateTime);
|
||||||
|
startTime.setHours(startTime.getHours() - parseInt(hours, 10));
|
||||||
|
const endTime = new Date(targetDateTime);
|
||||||
|
endTime.setHours(endTime.getHours() + parseInt(hours, 10));
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Analyzing package spike around ${targetDateTime.toISOString()}`,
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
`Time range: ${startTime.toISOString()} to ${endTime.toISOString()}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get all update history records in the time window
|
||||||
|
const spikeData = await prisma.update_history.findMany({
|
||||||
|
where: {
|
||||||
|
timestamp: {
|
||||||
|
gte: startTime,
|
||||||
|
lte: endTime,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
host_id: true,
|
||||||
|
timestamp: true,
|
||||||
|
packages_count: true,
|
||||||
|
security_count: true,
|
||||||
|
total_packages: true,
|
||||||
|
status: true,
|
||||||
|
error_message: true,
|
||||||
|
execution_time: true,
|
||||||
|
payload_size_kb: true,
|
||||||
|
hosts: {
|
||||||
|
select: {
|
||||||
|
friendly_name: true,
|
||||||
|
hostname: true,
|
||||||
|
os_type: true,
|
||||||
|
os_version: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
timestamp: "asc",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Analyze the data
|
||||||
|
const analysis = {
|
||||||
|
timeWindow: {
|
||||||
|
start: startTime.toISOString(),
|
||||||
|
end: endTime.toISOString(),
|
||||||
|
target: targetDateTime.toISOString(),
|
||||||
|
},
|
||||||
|
totalRecords: spikeData.length,
|
||||||
|
successfulReports: spikeData.filter((r) => r.status === "success")
|
||||||
|
.length,
|
||||||
|
failedReports: spikeData.filter((r) => r.status === "error").length,
|
||||||
|
uniqueHosts: [...new Set(spikeData.map((r) => r.host_id))].length,
|
||||||
|
hosts: {},
|
||||||
|
timeline: [],
|
||||||
|
summary: {
|
||||||
|
maxPackagesCount: 0,
|
||||||
|
maxSecurityCount: 0,
|
||||||
|
maxTotalPackages: 0,
|
||||||
|
avgPackagesCount: 0,
|
||||||
|
avgSecurityCount: 0,
|
||||||
|
avgTotalPackages: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Group by host and analyze each host's behavior
|
||||||
|
spikeData.forEach((record) => {
|
||||||
|
const hostId = record.host_id;
|
||||||
|
if (!analysis.hosts[hostId]) {
|
||||||
|
analysis.hosts[hostId] = {
|
||||||
|
hostInfo: record.hosts,
|
||||||
|
records: [],
|
||||||
|
summary: {
|
||||||
|
totalReports: 0,
|
||||||
|
successfulReports: 0,
|
||||||
|
failedReports: 0,
|
||||||
|
maxPackagesCount: 0,
|
||||||
|
maxSecurityCount: 0,
|
||||||
|
maxTotalPackages: 0,
|
||||||
|
avgPackagesCount: 0,
|
||||||
|
avgSecurityCount: 0,
|
||||||
|
avgTotalPackages: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
analysis.hosts[hostId].records.push({
|
||||||
|
timestamp: record.timestamp,
|
||||||
|
packages_count: record.packages_count,
|
||||||
|
security_count: record.security_count,
|
||||||
|
total_packages: record.total_packages,
|
||||||
|
status: record.status,
|
||||||
|
error_message: record.error_message,
|
||||||
|
execution_time: record.execution_time,
|
||||||
|
payload_size_kb: record.payload_size_kb,
|
||||||
|
});
|
||||||
|
|
||||||
|
analysis.hosts[hostId].summary.totalReports++;
|
||||||
|
if (record.status === "success") {
|
||||||
|
analysis.hosts[hostId].summary.successfulReports++;
|
||||||
|
analysis.hosts[hostId].summary.maxPackagesCount = Math.max(
|
||||||
|
analysis.hosts[hostId].summary.maxPackagesCount,
|
||||||
|
record.packages_count,
|
||||||
|
);
|
||||||
|
analysis.hosts[hostId].summary.maxSecurityCount = Math.max(
|
||||||
|
analysis.hosts[hostId].summary.maxSecurityCount,
|
||||||
|
record.security_count,
|
||||||
|
);
|
||||||
|
analysis.hosts[hostId].summary.maxTotalPackages = Math.max(
|
||||||
|
analysis.hosts[hostId].summary.maxTotalPackages,
|
||||||
|
record.total_packages || 0,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
analysis.hosts[hostId].summary.failedReports++;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Calculate averages for each host
|
||||||
|
Object.keys(analysis.hosts).forEach((hostId) => {
|
||||||
|
const host = analysis.hosts[hostId];
|
||||||
|
const successfulRecords = host.records.filter(
|
||||||
|
(r) => r.status === "success",
|
||||||
|
);
|
||||||
|
|
||||||
|
if (successfulRecords.length > 0) {
|
||||||
|
host.summary.avgPackagesCount = Math.round(
|
||||||
|
successfulRecords.reduce((sum, r) => sum + r.packages_count, 0) /
|
||||||
|
successfulRecords.length,
|
||||||
|
);
|
||||||
|
host.summary.avgSecurityCount = Math.round(
|
||||||
|
successfulRecords.reduce((sum, r) => sum + r.security_count, 0) /
|
||||||
|
successfulRecords.length,
|
||||||
|
);
|
||||||
|
host.summary.avgTotalPackages = Math.round(
|
||||||
|
successfulRecords.reduce(
|
||||||
|
(sum, r) => sum + (r.total_packages || 0),
|
||||||
|
0,
|
||||||
|
) / successfulRecords.length,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create timeline with hourly/daily aggregation
|
||||||
|
const timelineMap = new Map();
|
||||||
|
spikeData.forEach((record) => {
|
||||||
|
const timeKey = record.timestamp.toISOString().substring(0, 13); // Hourly
|
||||||
|
if (!timelineMap.has(timeKey)) {
|
||||||
|
timelineMap.set(timeKey, {
|
||||||
|
timestamp: timeKey,
|
||||||
|
totalReports: 0,
|
||||||
|
successfulReports: 0,
|
||||||
|
failedReports: 0,
|
||||||
|
totalPackagesCount: 0,
|
||||||
|
totalSecurityCount: 0,
|
||||||
|
totalTotalPackages: 0,
|
||||||
|
uniqueHosts: new Set(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const timelineEntry = timelineMap.get(timeKey);
|
||||||
|
timelineEntry.totalReports++;
|
||||||
|
timelineEntry.uniqueHosts.add(record.host_id);
|
||||||
|
|
||||||
|
if (record.status === "success") {
|
||||||
|
timelineEntry.successfulReports++;
|
||||||
|
timelineEntry.totalPackagesCount += record.packages_count;
|
||||||
|
timelineEntry.totalSecurityCount += record.security_count;
|
||||||
|
timelineEntry.totalTotalPackages += record.total_packages || 0;
|
||||||
|
} else {
|
||||||
|
timelineEntry.failedReports++;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Convert timeline map to array
|
||||||
|
analysis.timeline = Array.from(timelineMap.values())
|
||||||
|
.map((entry) => ({
|
||||||
|
...entry,
|
||||||
|
uniqueHosts: entry.uniqueHosts.size,
|
||||||
|
}))
|
||||||
|
.sort((a, b) => a.timestamp.localeCompare(b.timestamp));
|
||||||
|
|
||||||
|
// Calculate overall summary
|
||||||
|
const successfulRecords = spikeData.filter((r) => r.status === "success");
|
||||||
|
if (successfulRecords.length > 0) {
|
||||||
|
analysis.summary.maxPackagesCount = Math.max(
|
||||||
|
...successfulRecords.map((r) => r.packages_count),
|
||||||
|
);
|
||||||
|
analysis.summary.maxSecurityCount = Math.max(
|
||||||
|
...successfulRecords.map((r) => r.security_count),
|
||||||
|
);
|
||||||
|
analysis.summary.maxTotalPackages = Math.max(
|
||||||
|
...successfulRecords.map((r) => r.total_packages || 0),
|
||||||
|
);
|
||||||
|
analysis.summary.avgPackagesCount = Math.round(
|
||||||
|
successfulRecords.reduce((sum, r) => sum + r.packages_count, 0) /
|
||||||
|
successfulRecords.length,
|
||||||
|
);
|
||||||
|
analysis.summary.avgSecurityCount = Math.round(
|
||||||
|
successfulRecords.reduce((sum, r) => sum + r.security_count, 0) /
|
||||||
|
successfulRecords.length,
|
||||||
|
);
|
||||||
|
analysis.summary.avgTotalPackages = Math.round(
|
||||||
|
successfulRecords.reduce(
|
||||||
|
(sum, r) => sum + (r.total_packages || 0),
|
||||||
|
0,
|
||||||
|
) / successfulRecords.length,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Identify potential causes of the spike
|
||||||
|
const potentialCauses = [];
|
||||||
|
|
||||||
|
// Check for hosts with unusually high package counts
|
||||||
|
Object.keys(analysis.hosts).forEach((hostId) => {
|
||||||
|
const host = analysis.hosts[hostId];
|
||||||
|
if (
|
||||||
|
host.summary.maxPackagesCount >
|
||||||
|
analysis.summary.avgPackagesCount * 2
|
||||||
|
) {
|
||||||
|
potentialCauses.push({
|
||||||
|
type: "high_package_count",
|
||||||
|
hostId,
|
||||||
|
hostName: host.hostInfo.friendly_name || host.hostInfo.hostname,
|
||||||
|
value: host.summary.maxPackagesCount,
|
||||||
|
avg: analysis.summary.avgPackagesCount,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check for multiple hosts reporting at the same time (this explains the 500 vs 59 discrepancy)
|
||||||
|
const concurrentReports = analysis.timeline.filter(
|
||||||
|
(entry) => entry.uniqueHosts > 1,
|
||||||
|
);
|
||||||
|
if (concurrentReports.length > 0) {
|
||||||
|
potentialCauses.push({
|
||||||
|
type: "concurrent_reports",
|
||||||
|
description:
|
||||||
|
"Multiple hosts reported simultaneously - this explains why chart shows higher numbers than individual host reports",
|
||||||
|
count: concurrentReports.length,
|
||||||
|
details: concurrentReports.map((entry) => ({
|
||||||
|
timestamp: entry.timestamp,
|
||||||
|
totalPackagesCount: entry.totalPackagesCount,
|
||||||
|
uniqueHosts: entry.uniqueHosts,
|
||||||
|
avgPerHost: Math.round(
|
||||||
|
entry.totalPackagesCount / entry.uniqueHosts,
|
||||||
|
),
|
||||||
|
})),
|
||||||
|
explanation:
|
||||||
|
"The chart sums package counts across all hosts. If multiple hosts report at the same time, the chart shows the total sum, not individual host counts.",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for failed reports that might indicate system issues
|
||||||
|
if (analysis.failedReports > 0) {
|
||||||
|
potentialCauses.push({
|
||||||
|
type: "failed_reports",
|
||||||
|
count: analysis.failedReports,
|
||||||
|
percentage: Math.round(
|
||||||
|
(analysis.failedReports / analysis.totalRecords) * 100,
|
||||||
|
),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add aggregation explanation
|
||||||
|
const aggregationExplanation = {
|
||||||
|
type: "aggregation_explanation",
|
||||||
|
description: "Chart Aggregation Logic",
|
||||||
|
details: {
|
||||||
|
howItWorks:
|
||||||
|
"The package trends chart sums package counts across all hosts for each time period",
|
||||||
|
individualHosts:
|
||||||
|
"Each host reports its own package count (e.g., 59 packages)",
|
||||||
|
chartDisplay:
|
||||||
|
"Chart shows the sum of all hosts' package counts (e.g., 59 + other hosts = 500)",
|
||||||
|
timeGrouping:
|
||||||
|
"Multiple hosts reporting in the same hour/day are aggregated together",
|
||||||
|
},
|
||||||
|
example: {
|
||||||
|
host1: "Host A reports 59 outdated packages",
|
||||||
|
host2: "Host B reports 120 outdated packages",
|
||||||
|
host3: "Host C reports 321 outdated packages",
|
||||||
|
chartShows: "Chart displays 500 total packages (59+120+321)",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
potentialCauses.push(aggregationExplanation);
|
||||||
|
|
||||||
|
// Add specific host breakdown if a host ID is provided
|
||||||
|
let specificHostAnalysis = null;
|
||||||
|
if (req.query.hostId) {
|
||||||
|
const hostId = req.query.hostId;
|
||||||
|
const hostData = analysis.hosts[hostId];
|
||||||
|
if (hostData) {
|
||||||
|
specificHostAnalysis = {
|
||||||
|
hostId,
|
||||||
|
hostInfo: hostData.hostInfo,
|
||||||
|
summary: hostData.summary,
|
||||||
|
records: hostData.records,
|
||||||
|
explanation: `This host reported ${hostData.summary.maxPackagesCount} outdated packages, but the chart shows ${analysis.summary.maxPackagesCount} because it sums across all hosts that reported at the same time.`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
analysis,
|
||||||
|
potentialCauses,
|
||||||
|
specificHostAnalysis,
|
||||||
|
recommendations: [
|
||||||
|
"Check if any hosts had major package updates around this time",
|
||||||
|
"Verify if any new hosts were added to the system",
|
||||||
|
"Check for system maintenance or updates that might have triggered package checks",
|
||||||
|
"Review any automation or scheduled tasks that run around 6pm",
|
||||||
|
"Check if any repositories were updated or new packages were released",
|
||||||
|
"Remember: Chart shows SUM of all hosts' package counts, not individual host counts",
|
||||||
|
],
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error analyzing package spike:", error);
|
||||||
|
res.status(500).json({ error: "Failed to analyze package spike" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = router;
|
||||||
|
|||||||
779
backend/src/routes/dockerRoutes.js
Normal file
779
backend/src/routes/dockerRoutes.js
Normal file
@@ -0,0 +1,779 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
|
const prisma = getPrismaClient();
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Helper function to convert BigInt fields to strings for JSON serialization
|
||||||
|
const convertBigIntToString = (obj) => {
|
||||||
|
if (obj === null || obj === undefined) return obj;
|
||||||
|
|
||||||
|
if (typeof obj === "bigint") {
|
||||||
|
return obj.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(obj)) {
|
||||||
|
return obj.map(convertBigIntToString);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof obj === "object") {
|
||||||
|
const converted = {};
|
||||||
|
for (const key in obj) {
|
||||||
|
converted[key] = convertBigIntToString(obj[key]);
|
||||||
|
}
|
||||||
|
return converted;
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
};
|
||||||
|
|
||||||
|
// GET /api/v1/docker/dashboard - Get Docker dashboard statistics
|
||||||
|
router.get("/dashboard", authenticateToken, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
// Get total hosts with Docker containers
|
||||||
|
const hostsWithDocker = await prisma.docker_containers.groupBy({
|
||||||
|
by: ["host_id"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total containers
|
||||||
|
const totalContainers = await prisma.docker_containers.count();
|
||||||
|
|
||||||
|
// Get running containers
|
||||||
|
const runningContainers = await prisma.docker_containers.count({
|
||||||
|
where: { status: "running" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total images
|
||||||
|
const totalImages = await prisma.docker_images.count();
|
||||||
|
|
||||||
|
// Get available updates
|
||||||
|
const availableUpdates = await prisma.docker_image_updates.count();
|
||||||
|
|
||||||
|
// Get containers by status
|
||||||
|
const containersByStatus = await prisma.docker_containers.groupBy({
|
||||||
|
by: ["status"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get images by source
|
||||||
|
const imagesBySource = await prisma.docker_images.groupBy({
|
||||||
|
by: ["source"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json({
|
||||||
|
stats: {
|
||||||
|
totalHostsWithDocker: hostsWithDocker.length,
|
||||||
|
totalContainers,
|
||||||
|
runningContainers,
|
||||||
|
totalImages,
|
||||||
|
availableUpdates,
|
||||||
|
},
|
||||||
|
containersByStatus,
|
||||||
|
imagesBySource,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching Docker dashboard:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch Docker dashboard" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/containers - Get all containers with filters
|
||||||
|
router.get("/containers", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { status, hostId, imageId, search, page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (status) where.status = status;
|
||||||
|
if (hostId) where.host_id = hostId;
|
||||||
|
if (imageId) where.image_id = imageId;
|
||||||
|
if (search) {
|
||||||
|
where.OR = [
|
||||||
|
{ name: { contains: search, mode: "insensitive" } },
|
||||||
|
{ image_name: { contains: search, mode: "insensitive" } },
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [containers, total] = await Promise.all([
|
||||||
|
prisma.docker_containers.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
docker_images: true,
|
||||||
|
},
|
||||||
|
orderBy: { updated_at: "desc" },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_containers.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get host information for each container
|
||||||
|
const hostIds = [...new Set(containers.map((c) => c.host_id))];
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
select: { id: true, friendly_name: true, hostname: true, ip: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const hostsMap = hosts.reduce((acc, host) => {
|
||||||
|
acc[host.id] = host;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
const containersWithHosts = containers.map((container) => ({
|
||||||
|
...container,
|
||||||
|
host: hostsMap[container.host_id],
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
containers: containersWithHosts,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching containers:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch containers" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/containers/:id - Get container detail
|
||||||
|
router.get("/containers/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const container = await prisma.docker_containers.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
docker_images: {
|
||||||
|
include: {
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!container) {
|
||||||
|
return res.status(404).json({ error: "Container not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get host information
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id: container.host_id },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
friendly_name: true,
|
||||||
|
hostname: true,
|
||||||
|
ip: true,
|
||||||
|
os_type: true,
|
||||||
|
os_version: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get other containers using the same image
|
||||||
|
const similarContainers = await prisma.docker_containers.findMany({
|
||||||
|
where: {
|
||||||
|
image_id: container.image_id,
|
||||||
|
id: { not: id },
|
||||||
|
},
|
||||||
|
take: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
container: {
|
||||||
|
...container,
|
||||||
|
host,
|
||||||
|
},
|
||||||
|
similarContainers,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching container detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch container detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/images - Get all images with filters
|
||||||
|
router.get("/images", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { source, search, page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (source) where.source = source;
|
||||||
|
if (search) {
|
||||||
|
where.OR = [
|
||||||
|
{ repository: { contains: search, mode: "insensitive" } },
|
||||||
|
{ tag: { contains: search, mode: "insensitive" } },
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [images, total] = await Promise.all([
|
||||||
|
prisma.docker_images.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
docker_containers: true,
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
docker_image_updates: {
|
||||||
|
take: 1,
|
||||||
|
orderBy: { created_at: "desc" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { updated_at: "desc" },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_images.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get unique hosts using each image
|
||||||
|
const imagesWithHosts = await Promise.all(
|
||||||
|
images.map(async (image) => {
|
||||||
|
const containers = await prisma.docker_containers.findMany({
|
||||||
|
where: { image_id: image.id },
|
||||||
|
select: { host_id: true },
|
||||||
|
distinct: ["host_id"],
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
...image,
|
||||||
|
hostsCount: containers.length,
|
||||||
|
hasUpdates: image._count.docker_image_updates > 0,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
images: imagesWithHosts,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching images:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch images" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/images/:id - Get image detail
|
||||||
|
router.get("/images/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const image = await prisma.docker_images.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: {
|
||||||
|
docker_containers: {
|
||||||
|
take: 100,
|
||||||
|
},
|
||||||
|
docker_image_updates: {
|
||||||
|
orderBy: { created_at: "desc" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!image) {
|
||||||
|
return res.status(404).json({ error: "Image not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get unique hosts using this image
|
||||||
|
const hostIds = [...new Set(image.docker_containers.map((c) => c.host_id))];
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
select: { id: true, friendly_name: true, hostname: true, ip: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
image,
|
||||||
|
hosts,
|
||||||
|
totalContainers: image.docker_containers.length,
|
||||||
|
totalHosts: hosts.length,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching image detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch image detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/hosts - Get all hosts with Docker
|
||||||
|
router.get("/hosts", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { page = 1, limit = 50 } = req.query;
|
||||||
|
|
||||||
|
// Get hosts that have Docker containers
|
||||||
|
const hostsWithContainers = await prisma.docker_containers.groupBy({
|
||||||
|
by: ["host_id"],
|
||||||
|
_count: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hostIds = hostsWithContainers.map((h) => h.host_id);
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
orderBy: { friendly_name: "asc" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get container counts and statuses for each host
|
||||||
|
const hostsWithStats = await Promise.all(
|
||||||
|
hosts.map(async (host) => {
|
||||||
|
const [totalContainers, runningContainers, totalImages] =
|
||||||
|
await Promise.all([
|
||||||
|
prisma.docker_containers.count({
|
||||||
|
where: { host_id: host.id },
|
||||||
|
}),
|
||||||
|
prisma.docker_containers.count({
|
||||||
|
where: { host_id: host.id, status: "running" },
|
||||||
|
}),
|
||||||
|
prisma.docker_containers.findMany({
|
||||||
|
where: { host_id: host.id },
|
||||||
|
select: { image_id: true },
|
||||||
|
distinct: ["image_id"],
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...host,
|
||||||
|
dockerStats: {
|
||||||
|
totalContainers,
|
||||||
|
runningContainers,
|
||||||
|
totalImages: totalImages.length,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
hosts: hostsWithStats,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total: hostIds.length,
|
||||||
|
totalPages: Math.ceil(hostIds.length / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching Docker hosts:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch Docker hosts" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/hosts/:id - Get host Docker detail
|
||||||
|
router.get("/hosts/:id", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { id } = req.params;
|
||||||
|
|
||||||
|
const host = await prisma.hosts.findUnique({
|
||||||
|
where: { id },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(404).json({ error: "Host not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get containers on this host
|
||||||
|
const containers = await prisma.docker_containers.findMany({
|
||||||
|
where: { host_id: id },
|
||||||
|
include: {
|
||||||
|
docker_images: {
|
||||||
|
include: {
|
||||||
|
docker_image_updates: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { name: "asc" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get unique images on this host
|
||||||
|
const imageIds = [...new Set(containers.map((c) => c.image_id))].filter(
|
||||||
|
Boolean,
|
||||||
|
);
|
||||||
|
const images = await prisma.docker_images.findMany({
|
||||||
|
where: { id: { in: imageIds } },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get container statistics
|
||||||
|
const runningContainers = containers.filter(
|
||||||
|
(c) => c.status === "running",
|
||||||
|
).length;
|
||||||
|
const stoppedContainers = containers.filter(
|
||||||
|
(c) => c.status === "exited" || c.status === "stopped",
|
||||||
|
).length;
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
host,
|
||||||
|
containers,
|
||||||
|
images,
|
||||||
|
stats: {
|
||||||
|
totalContainers: containers.length,
|
||||||
|
runningContainers,
|
||||||
|
stoppedContainers,
|
||||||
|
totalImages: images.length,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching host Docker detail:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch host Docker detail" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/updates - Get available updates
|
||||||
|
router.get("/updates", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { page = 1, limit = 50, securityOnly = false } = req.query;
|
||||||
|
|
||||||
|
const where = {};
|
||||||
|
if (securityOnly === "true") {
|
||||||
|
where.is_security_update = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||||
|
const take = parseInt(limit, 10);
|
||||||
|
|
||||||
|
const [updates, total] = await Promise.all([
|
||||||
|
prisma.docker_image_updates.findMany({
|
||||||
|
where,
|
||||||
|
include: {
|
||||||
|
docker_images: {
|
||||||
|
include: {
|
||||||
|
docker_containers: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
host_id: true,
|
||||||
|
name: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: [{ is_security_update: "desc" }, { created_at: "desc" }],
|
||||||
|
skip,
|
||||||
|
take,
|
||||||
|
}),
|
||||||
|
prisma.docker_image_updates.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Get affected hosts for each update
|
||||||
|
const updatesWithHosts = await Promise.all(
|
||||||
|
updates.map(async (update) => {
|
||||||
|
const hostIds = [
|
||||||
|
...new Set(
|
||||||
|
update.docker_images.docker_containers.map((c) => c.host_id),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { id: { in: hostIds } },
|
||||||
|
select: { id: true, friendly_name: true, hostname: true },
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
...update,
|
||||||
|
affectedHosts: hosts,
|
||||||
|
affectedContainersCount:
|
||||||
|
update.docker_images.docker_containers.length,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
res.json(
|
||||||
|
convertBigIntToString({
|
||||||
|
updates: updatesWithHosts,
|
||||||
|
pagination: {
|
||||||
|
page: parseInt(page, 10),
|
||||||
|
limit: parseInt(limit, 10),
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching Docker updates:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch Docker updates" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// POST /api/v1/docker/collect - Collect Docker data from agent
|
||||||
|
router.post("/collect", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId, apiKey, containers, images, updates } = req.body;
|
||||||
|
|
||||||
|
// Validate API credentials
|
||||||
|
const host = await prisma.hosts.findFirst({
|
||||||
|
where: { api_id: apiId, api_key: apiKey },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!host) {
|
||||||
|
return res.status(401).json({ error: "Invalid API credentials" });
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Helper function to validate and parse dates
|
||||||
|
const parseDate = (dateString) => {
|
||||||
|
if (!dateString) return now;
|
||||||
|
const date = new Date(dateString);
|
||||||
|
return Number.isNaN(date.getTime()) ? now : date;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Process containers
|
||||||
|
if (containers && Array.isArray(containers)) {
|
||||||
|
for (const containerData of containers) {
|
||||||
|
const containerId = uuidv4();
|
||||||
|
|
||||||
|
// Find or create image
|
||||||
|
let imageId = null;
|
||||||
|
if (containerData.image_repository && containerData.image_tag) {
|
||||||
|
const image = await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: containerData.image_repository,
|
||||||
|
tag: containerData.image_tag,
|
||||||
|
image_id: containerData.image_id || "unknown",
|
||||||
|
source: containerData.image_source || "docker-hub",
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
imageId = image.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert container
|
||||||
|
await prisma.docker_containers.upsert({
|
||||||
|
where: {
|
||||||
|
host_id_container_id: {
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
last_checked: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: containerId,
|
||||||
|
host_id: host.id,
|
||||||
|
container_id: containerData.container_id,
|
||||||
|
name: containerData.name,
|
||||||
|
image_id: imageId,
|
||||||
|
image_name: containerData.image_name,
|
||||||
|
image_tag: containerData.image_tag || "latest",
|
||||||
|
status: containerData.status,
|
||||||
|
state: containerData.state,
|
||||||
|
ports: containerData.ports || null,
|
||||||
|
created_at: parseDate(containerData.created_at),
|
||||||
|
started_at: containerData.started_at
|
||||||
|
? parseDate(containerData.started_at)
|
||||||
|
: null,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process standalone images
|
||||||
|
if (images && Array.isArray(images)) {
|
||||||
|
for (const imageData of images) {
|
||||||
|
await prisma.docker_images.upsert({
|
||||||
|
where: {
|
||||||
|
repository_tag_image_id: {
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
last_checked: now,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
repository: imageData.repository,
|
||||||
|
tag: imageData.tag,
|
||||||
|
image_id: imageData.image_id,
|
||||||
|
digest: imageData.digest,
|
||||||
|
size_bytes: imageData.size_bytes
|
||||||
|
? BigInt(imageData.size_bytes)
|
||||||
|
: null,
|
||||||
|
source: imageData.source || "docker-hub",
|
||||||
|
created_at: parseDate(imageData.created_at),
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process updates
|
||||||
|
// First, get all images for this host to clean up old updates
|
||||||
|
const hostImageIds = await prisma.docker_containers
|
||||||
|
.findMany({
|
||||||
|
where: { host_id: host.id },
|
||||||
|
select: { image_id: true },
|
||||||
|
distinct: ["image_id"],
|
||||||
|
})
|
||||||
|
.then((results) => results.map((r) => r.image_id).filter(Boolean));
|
||||||
|
|
||||||
|
// Delete old updates for images on this host that are no longer reported
|
||||||
|
if (hostImageIds.length > 0) {
|
||||||
|
const reportedImageIds = [];
|
||||||
|
|
||||||
|
// Process new updates
|
||||||
|
if (updates && Array.isArray(updates)) {
|
||||||
|
for (const updateData of updates) {
|
||||||
|
// Find the image by repository, tag, and image_id
|
||||||
|
const image = await prisma.docker_images.findFirst({
|
||||||
|
where: {
|
||||||
|
repository: updateData.repository,
|
||||||
|
tag: updateData.current_tag,
|
||||||
|
image_id: updateData.image_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (image) {
|
||||||
|
reportedImageIds.push(image.id);
|
||||||
|
|
||||||
|
// Store digest info in changelog_url field as JSON for now
|
||||||
|
const digestInfo = JSON.stringify({
|
||||||
|
method: "digest_comparison",
|
||||||
|
current_digest: updateData.current_digest,
|
||||||
|
available_digest: updateData.available_digest,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upsert the update record
|
||||||
|
await prisma.docker_image_updates.upsert({
|
||||||
|
where: {
|
||||||
|
image_id_available_tag: {
|
||||||
|
image_id: image.id,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
updated_at: now,
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
severity: "digest_changed",
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
id: uuidv4(),
|
||||||
|
image_id: image.id,
|
||||||
|
current_tag: updateData.current_tag,
|
||||||
|
available_tag: updateData.available_tag,
|
||||||
|
severity: "digest_changed",
|
||||||
|
changelog_url: digestInfo,
|
||||||
|
updated_at: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove stale updates for images on this host that are no longer in the updates list
|
||||||
|
const imageIdsToCleanup = hostImageIds.filter(
|
||||||
|
(id) => !reportedImageIds.includes(id),
|
||||||
|
);
|
||||||
|
if (imageIdsToCleanup.length > 0) {
|
||||||
|
await prisma.docker_image_updates.deleteMany({
|
||||||
|
where: {
|
||||||
|
image_id: { in: imageIdsToCleanup },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.json({ success: true, message: "Docker data collected successfully" });
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error collecting Docker data:", error);
|
||||||
|
console.error("Error stack:", error.stack);
|
||||||
|
console.error("Request body:", JSON.stringify(req.body, null, 2));
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Failed to collect Docker data",
|
||||||
|
message: error.message,
|
||||||
|
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET /api/v1/docker/agent - Serve the Docker agent installation script
|
||||||
|
router.get("/agent", async (_req, res) => {
|
||||||
|
try {
|
||||||
|
const fs = require("node:fs");
|
||||||
|
const path = require("node:path");
|
||||||
|
const agentPath = path.join(
|
||||||
|
__dirname,
|
||||||
|
"../../..",
|
||||||
|
"agents",
|
||||||
|
"patchmon-docker-agent.sh",
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if file exists
|
||||||
|
if (!fs.existsSync(agentPath)) {
|
||||||
|
return res.status(404).json({ error: "Docker agent script not found" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read and serve the file
|
||||||
|
const agentScript = fs.readFileSync(agentPath, "utf8");
|
||||||
|
res.setHeader("Content-Type", "text/x-shellscript");
|
||||||
|
res.setHeader(
|
||||||
|
"Content-Disposition",
|
||||||
|
'inline; filename="patchmon-docker-agent.sh"',
|
||||||
|
);
|
||||||
|
res.send(agentScript);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error serving Docker agent:", error);
|
||||||
|
res.status(500).json({ error: "Failed to serve Docker agent script" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
246
backend/src/routes/gethomepageRoutes.js
Normal file
246
backend/src/routes/gethomepageRoutes.js
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
|
const bcrypt = require("bcryptjs");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
|
// Middleware to authenticate API key
|
||||||
|
const authenticateApiKey = async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
|
||||||
|
if (!authHeader || !authHeader.startsWith("Basic ")) {
|
||||||
|
return res
|
||||||
|
.status(401)
|
||||||
|
.json({ error: "Missing or invalid authorization header" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode base64 credentials
|
||||||
|
const base64Credentials = authHeader.split(" ")[1];
|
||||||
|
const credentials = Buffer.from(base64Credentials, "base64").toString(
|
||||||
|
"ascii",
|
||||||
|
);
|
||||||
|
const [apiKey, apiSecret] = credentials.split(":");
|
||||||
|
|
||||||
|
if (!apiKey || !apiSecret) {
|
||||||
|
return res.status(401).json({ error: "Invalid credentials format" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the token in database
|
||||||
|
const token = await prisma.auto_enrollment_tokens.findUnique({
|
||||||
|
where: { token_key: apiKey },
|
||||||
|
include: {
|
||||||
|
users: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
username: true,
|
||||||
|
role: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
console.log(`API key not found: ${apiKey}`);
|
||||||
|
return res.status(401).json({ error: "Invalid API key" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token is active
|
||||||
|
if (!token.is_active) {
|
||||||
|
return res.status(401).json({ error: "API key is disabled" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token has expired
|
||||||
|
if (token.expires_at && new Date(token.expires_at) < new Date()) {
|
||||||
|
return res.status(401).json({ error: "API key has expired" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if token is for gethomepage integration
|
||||||
|
if (token.metadata?.integration_type !== "gethomepage") {
|
||||||
|
return res.status(401).json({ error: "Invalid API key type" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the secret
|
||||||
|
const isValidSecret = await bcrypt.compare(apiSecret, token.token_secret);
|
||||||
|
if (!isValidSecret) {
|
||||||
|
return res.status(401).json({ error: "Invalid API secret" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check IP restrictions if any
|
||||||
|
if (token.allowed_ip_ranges && token.allowed_ip_ranges.length > 0) {
|
||||||
|
const clientIp = req.ip || req.connection.remoteAddress;
|
||||||
|
const forwardedFor = req.headers["x-forwarded-for"];
|
||||||
|
const realIp = req.headers["x-real-ip"];
|
||||||
|
|
||||||
|
// Get the actual client IP (considering proxies)
|
||||||
|
const actualClientIp = forwardedFor
|
||||||
|
? forwardedFor.split(",")[0].trim()
|
||||||
|
: realIp || clientIp;
|
||||||
|
|
||||||
|
const isAllowedIp = token.allowed_ip_ranges.some((range) => {
|
||||||
|
// Simple IP range check (can be enhanced for CIDR support)
|
||||||
|
return actualClientIp.startsWith(range) || actualClientIp === range;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!isAllowedIp) {
|
||||||
|
console.log(
|
||||||
|
`IP validation failed. Client IP: ${actualClientIp}, Allowed ranges: ${token.allowed_ip_ranges.join(", ")}`,
|
||||||
|
);
|
||||||
|
return res.status(403).json({ error: "IP address not allowed" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update last used timestamp
|
||||||
|
await prisma.auto_enrollment_tokens.update({
|
||||||
|
where: { id: token.id },
|
||||||
|
data: { last_used_at: new Date() },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Attach token info to request
|
||||||
|
req.apiToken = token;
|
||||||
|
next();
|
||||||
|
} catch (error) {
|
||||||
|
console.error("API key authentication error:", error);
|
||||||
|
res.status(500).json({ error: "Authentication failed" });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get homepage widget statistics
|
||||||
|
router.get("/stats", authenticateApiKey, async (_req, res) => {
|
||||||
|
try {
|
||||||
|
// Get total hosts count
|
||||||
|
const totalHosts = await prisma.hosts.count({
|
||||||
|
where: { status: "active" },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total unique packages that need updates (consistent with dashboard)
|
||||||
|
const totalOutdatedPackages = await prisma.packages.count({
|
||||||
|
where: {
|
||||||
|
host_packages: {
|
||||||
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get total repositories count
|
||||||
|
const totalRepos = await prisma.repositories.count({
|
||||||
|
where: { is_active: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get hosts that need updates (have outdated packages)
|
||||||
|
const hostsNeedingUpdates = await prisma.hosts.count({
|
||||||
|
where: {
|
||||||
|
status: "active",
|
||||||
|
host_packages: {
|
||||||
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get security updates count (unique packages - consistent with dashboard)
|
||||||
|
const securityUpdates = await prisma.packages.count({
|
||||||
|
where: {
|
||||||
|
host_packages: {
|
||||||
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get hosts with security updates
|
||||||
|
const hostsWithSecurityUpdates = await prisma.hosts.count({
|
||||||
|
where: {
|
||||||
|
status: "active",
|
||||||
|
host_packages: {
|
||||||
|
some: {
|
||||||
|
needs_update: true,
|
||||||
|
is_security_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get up-to-date hosts count
|
||||||
|
const upToDateHosts = totalHosts - hostsNeedingUpdates;
|
||||||
|
|
||||||
|
// Get recent update activity (last 24 hours)
|
||||||
|
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||||
|
const recentUpdates = await prisma.update_history.count({
|
||||||
|
where: {
|
||||||
|
timestamp: {
|
||||||
|
gte: oneDayAgo,
|
||||||
|
},
|
||||||
|
status: "success",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get OS distribution
|
||||||
|
const osDistribution = await prisma.hosts.groupBy({
|
||||||
|
by: ["os_type"],
|
||||||
|
where: { status: "active" },
|
||||||
|
_count: {
|
||||||
|
id: true,
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
_count: {
|
||||||
|
id: "desc",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Format OS distribution data
|
||||||
|
const osDistributionFormatted = osDistribution.map((os) => ({
|
||||||
|
name: os.os_type,
|
||||||
|
count: os._count.id,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Extract top 3 OS types for flat display in widgets
|
||||||
|
const top_os_1 = osDistributionFormatted[0] || { name: "None", count: 0 };
|
||||||
|
const top_os_2 = osDistributionFormatted[1] || { name: "None", count: 0 };
|
||||||
|
const top_os_3 = osDistributionFormatted[2] || { name: "None", count: 0 };
|
||||||
|
|
||||||
|
// Prepare response data
|
||||||
|
const stats = {
|
||||||
|
total_hosts: totalHosts,
|
||||||
|
total_outdated_packages: totalOutdatedPackages,
|
||||||
|
total_repos: totalRepos,
|
||||||
|
hosts_needing_updates: hostsNeedingUpdates,
|
||||||
|
up_to_date_hosts: upToDateHosts,
|
||||||
|
security_updates: securityUpdates,
|
||||||
|
hosts_with_security_updates: hostsWithSecurityUpdates,
|
||||||
|
recent_updates_24h: recentUpdates,
|
||||||
|
os_distribution: osDistributionFormatted,
|
||||||
|
// Flattened OS data for easy widget display
|
||||||
|
top_os_1_name: top_os_1.name,
|
||||||
|
top_os_1_count: top_os_1.count,
|
||||||
|
top_os_2_name: top_os_2.name,
|
||||||
|
top_os_2_count: top_os_2.count,
|
||||||
|
top_os_3_name: top_os_3.name,
|
||||||
|
top_os_3_count: top_os_3.count,
|
||||||
|
last_updated: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
res.json(stats);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching homepage stats:", error);
|
||||||
|
res.status(500).json({ error: "Failed to fetch statistics" });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Health check endpoint for the API
|
||||||
|
router.get("/health", authenticateApiKey, async (req, res) => {
|
||||||
|
res.json({
|
||||||
|
status: "ok",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
api_key: req.apiToken.token_name,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { body, validationResult } = require("express-validator");
|
const { body, validationResult } = require("express-validator");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const { randomUUID } = require("node:crypto");
|
const { randomUUID } = require("node:crypto");
|
||||||
const { authenticateToken } = require("../middleware/auth");
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
const { requireManageHosts } = require("../middleware/permissions");
|
const { requireManageHosts } = require("../middleware/permissions");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Get all host groups
|
// Get all host groups
|
||||||
router.get("/", authenticateToken, async (_req, res) => {
|
router.get("/", authenticateToken, async (_req, res) => {
|
||||||
@@ -15,7 +15,7 @@ router.get("/", authenticateToken, async (_req, res) => {
|
|||||||
include: {
|
include: {
|
||||||
_count: {
|
_count: {
|
||||||
select: {
|
select: {
|
||||||
hosts: true,
|
host_group_memberships: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -39,16 +39,20 @@ router.get("/:id", authenticateToken, async (req, res) => {
|
|||||||
const hostGroup = await prisma.host_groups.findUnique({
|
const hostGroup = await prisma.host_groups.findUnique({
|
||||||
where: { id },
|
where: { id },
|
||||||
include: {
|
include: {
|
||||||
hosts: {
|
host_group_memberships: {
|
||||||
select: {
|
include: {
|
||||||
id: true,
|
hosts: {
|
||||||
friendly_name: true,
|
select: {
|
||||||
hostname: true,
|
id: true,
|
||||||
ip: true,
|
friendly_name: true,
|
||||||
os_type: true,
|
hostname: true,
|
||||||
os_version: true,
|
ip: true,
|
||||||
status: true,
|
os_type: true,
|
||||||
last_update: true,
|
os_version: true,
|
||||||
|
status: true,
|
||||||
|
last_update: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -195,7 +199,7 @@ router.delete(
|
|||||||
include: {
|
include: {
|
||||||
_count: {
|
_count: {
|
||||||
select: {
|
select: {
|
||||||
hosts: true,
|
host_group_memberships: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -205,11 +209,10 @@ router.delete(
|
|||||||
return res.status(404).json({ error: "Host group not found" });
|
return res.status(404).json({ error: "Host group not found" });
|
||||||
}
|
}
|
||||||
|
|
||||||
// If host group has hosts, ungroup them first
|
// If host group has memberships, remove them first
|
||||||
if (existingGroup._count.hosts > 0) {
|
if (existingGroup._count.host_group_memberships > 0) {
|
||||||
await prisma.hosts.updateMany({
|
await prisma.host_group_memberships.deleteMany({
|
||||||
where: { host_group_id: id },
|
where: { host_group_id: id },
|
||||||
data: { host_group_id: null },
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -231,7 +234,13 @@ router.get("/:id/hosts", authenticateToken, async (req, res) => {
|
|||||||
const { id } = req.params;
|
const { id } = req.params;
|
||||||
|
|
||||||
const hosts = await prisma.hosts.findMany({
|
const hosts = await prisma.hosts.findMany({
|
||||||
where: { host_group_id: id },
|
where: {
|
||||||
|
host_group_memberships: {
|
||||||
|
some: {
|
||||||
|
host_group_id: id,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
select: {
|
select: {
|
||||||
id: true,
|
id: true,
|
||||||
friendly_name: true,
|
friendly_name: true,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Get all packages with their update status
|
// Get all packages with their update status
|
||||||
router.get("/", async (req, res) => {
|
router.get("/", async (req, res) => {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const { authenticateToken } = require("../middleware/auth");
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
const {
|
const {
|
||||||
requireManageSettings,
|
requireManageSettings,
|
||||||
@@ -7,7 +7,7 @@ const {
|
|||||||
} = require("../middleware/permissions");
|
} = require("../middleware/permissions");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Get all role permissions (allow users who can manage users to view roles)
|
// Get all role permissions (allow users who can manage users to view roles)
|
||||||
router.get(
|
router.get(
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { body, validationResult } = require("express-validator");
|
const { body, validationResult } = require("express-validator");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const { authenticateToken } = require("../middleware/auth");
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
const {
|
const {
|
||||||
requireViewHosts,
|
requireViewHosts,
|
||||||
@@ -8,7 +8,7 @@ const {
|
|||||||
} = require("../middleware/permissions");
|
} = require("../middleware/permissions");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Get all repositories with host count
|
// Get all repositories with host count
|
||||||
router.get("/", authenticateToken, requireViewHosts, async (_req, res) => {
|
router.get("/", authenticateToken, requireViewHosts, async (_req, res) => {
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const { createPrismaClient } = require("../config/database");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const { authenticateToken } = require("../middleware/auth");
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
|
||||||
const prisma = createPrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Global search endpoint
|
* Global search endpoint
|
||||||
|
|||||||
@@ -1,109 +1,16 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { body, validationResult } = require("express-validator");
|
const { body, validationResult } = require("express-validator");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const { authenticateToken } = require("../middleware/auth");
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
const { requireManageSettings } = require("../middleware/permissions");
|
const { requireManageSettings } = require("../middleware/permissions");
|
||||||
const { getSettings, updateSettings } = require("../services/settingsService");
|
const { getSettings, updateSettings } = require("../services/settingsService");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Function to trigger crontab updates on all hosts with auto-update enabled
|
// WebSocket broadcaster for agent policy updates (no longer used - queue-based delivery preferred)
|
||||||
async function triggerCrontabUpdates() {
|
// const { broadcastSettingsUpdate } = require("../services/agentWs");
|
||||||
try {
|
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||||
console.log(
|
|
||||||
"Triggering crontab updates on all hosts with auto-update enabled...",
|
|
||||||
);
|
|
||||||
|
|
||||||
// Get current settings for server URL
|
|
||||||
const settings = await getSettings();
|
|
||||||
const serverUrl = settings.server_url;
|
|
||||||
|
|
||||||
// Get all hosts that have auto-update enabled
|
|
||||||
const hosts = await prisma.hosts.findMany({
|
|
||||||
where: {
|
|
||||||
auto_update: true,
|
|
||||||
status: "active", // Only update active hosts
|
|
||||||
},
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
friendly_name: true,
|
|
||||||
api_id: true,
|
|
||||||
api_key: true,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log(`Found ${hosts.length} hosts with auto-update enabled`);
|
|
||||||
|
|
||||||
// For each host, we'll send a special update command that triggers crontab update
|
|
||||||
// This is done by sending a ping with a special flag
|
|
||||||
for (const host of hosts) {
|
|
||||||
try {
|
|
||||||
console.log(
|
|
||||||
`Triggering crontab update for host: ${host.friendly_name}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// We'll use the existing ping endpoint but add a special parameter
|
|
||||||
// The agent will detect this and run update-crontab command
|
|
||||||
const http = require("node:http");
|
|
||||||
const https = require("node:https");
|
|
||||||
|
|
||||||
const url = new URL(`${serverUrl}/api/v1/hosts/ping`);
|
|
||||||
const isHttps = url.protocol === "https:";
|
|
||||||
const client = isHttps ? https : http;
|
|
||||||
|
|
||||||
const postData = JSON.stringify({
|
|
||||||
triggerCrontabUpdate: true,
|
|
||||||
message: "Update interval changed, please update your crontab",
|
|
||||||
});
|
|
||||||
|
|
||||||
const options = {
|
|
||||||
hostname: url.hostname,
|
|
||||||
port: url.port || (isHttps ? 443 : 80),
|
|
||||||
path: url.pathname,
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"Content-Length": Buffer.byteLength(postData),
|
|
||||||
"X-API-ID": host.api_id,
|
|
||||||
"X-API-KEY": host.api_key,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const req = client.request(options, (res) => {
|
|
||||||
if (res.statusCode === 200) {
|
|
||||||
console.log(
|
|
||||||
`Successfully triggered crontab update for ${host.friendly_name}`,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
console.error(
|
|
||||||
`Failed to trigger crontab update for ${host.friendly_name}: ${res.statusCode}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
req.on("error", (error) => {
|
|
||||||
console.error(
|
|
||||||
`Error triggering crontab update for ${host.friendly_name}:`,
|
|
||||||
error.message,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
req.write(postData);
|
|
||||||
req.end();
|
|
||||||
} catch (error) {
|
|
||||||
console.error(
|
|
||||||
`Error triggering crontab update for ${host.friendly_name}:`,
|
|
||||||
error.message,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("Crontab update trigger completed");
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error in triggerCrontabUpdates:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helpers
|
// Helpers
|
||||||
function normalizeUpdateInterval(minutes) {
|
function normalizeUpdateInterval(minutes) {
|
||||||
@@ -290,15 +197,36 @@ router.put(
|
|||||||
|
|
||||||
console.log("Settings updated successfully:", updatedSettings);
|
console.log("Settings updated successfully:", updatedSettings);
|
||||||
|
|
||||||
// If update interval changed, trigger crontab updates on all hosts with auto-update enabled
|
// If update interval changed, enqueue persistent jobs for agents
|
||||||
if (
|
if (
|
||||||
updateInterval !== undefined &&
|
updateInterval !== undefined &&
|
||||||
oldUpdateInterval !== updateData.update_interval
|
oldUpdateInterval !== updateData.update_interval
|
||||||
) {
|
) {
|
||||||
console.log(
|
console.log(
|
||||||
`Update interval changed from ${oldUpdateInterval} to ${updateData.update_interval} minutes. Triggering crontab updates...`,
|
`Update interval changed from ${oldUpdateInterval} to ${updateData.update_interval} minutes. Enqueueing agent settings updates...`,
|
||||||
);
|
);
|
||||||
await triggerCrontabUpdates();
|
|
||||||
|
const hosts = await prisma.hosts.findMany({
|
||||||
|
where: { status: "active" },
|
||||||
|
select: { api_id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const queue = queueManager.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||||
|
const jobs = hosts.map((h) => ({
|
||||||
|
name: "settings_update",
|
||||||
|
data: {
|
||||||
|
api_id: h.api_id,
|
||||||
|
type: "settings_update",
|
||||||
|
update_interval: updateData.update_interval,
|
||||||
|
},
|
||||||
|
opts: { attempts: 10, backoff: { type: "exponential", delay: 5000 } },
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Bulk add jobs
|
||||||
|
await queue.addBulk(jobs);
|
||||||
|
|
||||||
|
// Note: Queue-based delivery handles retries and ensures reliable delivery
|
||||||
|
// No need for immediate broadcast as it would cause duplicate messages
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const speakeasy = require("speakeasy");
|
const speakeasy = require("speakeasy");
|
||||||
const QRCode = require("qrcode");
|
const QRCode = require("qrcode");
|
||||||
const { authenticateToken } = require("../middleware/auth");
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
const { body, validationResult } = require("express-validator");
|
const { body, validationResult } = require("express-validator");
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Generate TFA secret and QR code
|
// Generate TFA secret and QR code
|
||||||
router.get("/setup", authenticateToken, async (req, res) => {
|
router.get("/setup", authenticateToken, async (req, res) => {
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
const express = require("express");
|
const express = require("express");
|
||||||
const { authenticateToken } = require("../middleware/auth");
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
const { requireManageSettings } = require("../middleware/permissions");
|
const { requireManageSettings } = require("../middleware/permissions");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Default GitHub repository URL
|
// Default GitHub repository URL
|
||||||
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
|
const DEFAULT_GITHUB_REPO = "https://github.com/PatchMon/PatchMon.git";
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
@@ -14,13 +14,13 @@ const router = express.Router();
|
|||||||
function getCurrentVersion() {
|
function getCurrentVersion() {
|
||||||
try {
|
try {
|
||||||
const packageJson = require("../../package.json");
|
const packageJson = require("../../package.json");
|
||||||
return packageJson?.version || "1.2.7";
|
return packageJson?.version || "1.3.0";
|
||||||
} catch (packageError) {
|
} catch (packageError) {
|
||||||
console.warn(
|
console.warn(
|
||||||
"Could not read version from package.json, using fallback:",
|
"Could not read version from package.json, using fallback:",
|
||||||
packageError.message,
|
packageError.message,
|
||||||
);
|
);
|
||||||
return "1.2.7";
|
return "1.3.0";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -126,43 +126,61 @@ async function getLatestCommit(owner, repo) {
|
|||||||
|
|
||||||
// Helper function to get commit count difference
|
// Helper function to get commit count difference
|
||||||
async function getCommitDifference(owner, repo, currentVersion) {
|
async function getCommitDifference(owner, repo, currentVersion) {
|
||||||
try {
|
// Try both with and without 'v' prefix for compatibility
|
||||||
const currentVersionTag = `v${currentVersion}`;
|
const versionTags = [
|
||||||
// Compare main branch with the released version tag
|
currentVersion, // Try without 'v' first (new format)
|
||||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${currentVersionTag}...main`;
|
`v${currentVersion}`, // Try with 'v' prefix (old format)
|
||||||
|
];
|
||||||
|
|
||||||
const response = await fetch(apiUrl, {
|
for (const versionTag of versionTags) {
|
||||||
method: "GET",
|
try {
|
||||||
headers: {
|
// Compare main branch with the released version tag
|
||||||
Accept: "application/vnd.github.v3+json",
|
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${versionTag}...main`;
|
||||||
"User-Agent": `PatchMon-Server/${getCurrentVersion()}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
const response = await fetch(apiUrl, {
|
||||||
const errorText = await response.text();
|
method: "GET",
|
||||||
if (
|
headers: {
|
||||||
errorText.includes("rate limit") ||
|
Accept: "application/vnd.github.v3+json",
|
||||||
errorText.includes("API rate limit")
|
"User-Agent": `PatchMon-Server/${getCurrentVersion()}`,
|
||||||
) {
|
},
|
||||||
throw new Error("GitHub API rate limit exceeded");
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
if (
|
||||||
|
errorText.includes("rate limit") ||
|
||||||
|
errorText.includes("API rate limit")
|
||||||
|
) {
|
||||||
|
throw new Error("GitHub API rate limit exceeded");
|
||||||
|
}
|
||||||
|
// If 404, try next tag format
|
||||||
|
if (response.status === 404) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
throw new Error(
|
||||||
|
`GitHub API error: ${response.status} ${response.statusText}`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
throw new Error(
|
|
||||||
`GitHub API error: ${response.status} ${response.statusText}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const compareData = await response.json();
|
const compareData = await response.json();
|
||||||
return {
|
return {
|
||||||
commitsBehind: compareData.behind_by || 0, // How many commits main is behind release
|
commitsBehind: compareData.behind_by || 0, // How many commits main is behind release
|
||||||
commitsAhead: compareData.ahead_by || 0, // How many commits main is ahead of release
|
commitsAhead: compareData.ahead_by || 0, // How many commits main is ahead of release
|
||||||
totalCommits: compareData.total_commits || 0,
|
totalCommits: compareData.total_commits || 0,
|
||||||
branchInfo: "main branch vs release",
|
branchInfo: "main branch vs release",
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching commit difference:", error.message);
|
// If rate limit, throw immediately
|
||||||
throw error;
|
if (error.message.includes("rate limit")) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If all attempts failed, throw error
|
||||||
|
throw new Error(
|
||||||
|
`Could not find tag '${currentVersion}' or 'v${currentVersion}' in repository`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to compare version strings (semantic versioning)
|
// Helper function to compare version strings (semantic versioning)
|
||||||
@@ -274,11 +292,11 @@ router.get(
|
|||||||
) {
|
) {
|
||||||
console.log("GitHub API rate limited, providing fallback data");
|
console.log("GitHub API rate limited, providing fallback data");
|
||||||
latestRelease = {
|
latestRelease = {
|
||||||
tagName: "v1.2.7",
|
tagName: "v1.2.8",
|
||||||
version: "1.2.7",
|
version: "1.2.8",
|
||||||
publishedAt: "2025-10-02T17:12:53Z",
|
publishedAt: "2025-10-02T17:12:53Z",
|
||||||
htmlUrl:
|
htmlUrl:
|
||||||
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.7",
|
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.8",
|
||||||
};
|
};
|
||||||
latestCommit = {
|
latestCommit = {
|
||||||
sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd",
|
sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd",
|
||||||
@@ -296,10 +314,13 @@ router.get(
|
|||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
// Fall back to cached data for other errors
|
// Fall back to cached data for other errors
|
||||||
|
const githubRepoUrl = settings.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
||||||
latestRelease = settings.latest_version
|
latestRelease = settings.latest_version
|
||||||
? {
|
? {
|
||||||
version: settings.latest_version,
|
version: settings.latest_version,
|
||||||
tagName: `v${settings.latest_version}`,
|
tagName: `v${settings.latest_version}`,
|
||||||
|
publishedAt: null, // Only use date from GitHub API, not cached data
|
||||||
|
htmlUrl: `${githubRepoUrl.replace(/\.git$/, "")}/releases/tag/v${settings.latest_version}`,
|
||||||
}
|
}
|
||||||
: null;
|
: null;
|
||||||
}
|
}
|
||||||
|
|||||||
139
backend/src/routes/wsRoutes.js
Normal file
139
backend/src/routes/wsRoutes.js
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
const express = require("express");
|
||||||
|
const { authenticateToken } = require("../middleware/auth");
|
||||||
|
const {
|
||||||
|
getConnectionInfo,
|
||||||
|
subscribeToConnectionChanges,
|
||||||
|
} = require("../services/agentWs");
|
||||||
|
const {
|
||||||
|
validate_session,
|
||||||
|
update_session_activity,
|
||||||
|
} = require("../utils/session_manager");
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
// Get WebSocket connection status by api_id (no database access - pure memory lookup)
|
||||||
|
router.get("/status/:apiId", authenticateToken, async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId } = req.params;
|
||||||
|
|
||||||
|
// Direct in-memory check - no database query needed
|
||||||
|
const connectionInfo = getConnectionInfo(apiId);
|
||||||
|
|
||||||
|
// Minimal response for maximum speed
|
||||||
|
res.json({
|
||||||
|
success: true,
|
||||||
|
data: connectionInfo,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error fetching WebSocket status:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
error: "Failed to fetch WebSocket status",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Server-Sent Events endpoint for real-time status updates (no polling needed!)
|
||||||
|
router.get("/status/:apiId/stream", async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { apiId } = req.params;
|
||||||
|
|
||||||
|
// Manual authentication for SSE (EventSource doesn't support custom headers)
|
||||||
|
const token =
|
||||||
|
req.query.token || req.headers.authorization?.replace("Bearer ", "");
|
||||||
|
if (!token) {
|
||||||
|
return res.status(401).json({ error: "Authentication required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify token manually with session validation
|
||||||
|
const jwt = require("jsonwebtoken");
|
||||||
|
try {
|
||||||
|
const decoded = jwt.verify(token, process.env.JWT_SECRET);
|
||||||
|
|
||||||
|
// Validate session (same as regular auth middleware)
|
||||||
|
const validation = await validate_session(decoded.sessionId, token);
|
||||||
|
if (!validation.valid) {
|
||||||
|
return res.status(401).json({ error: "Invalid or expired session" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update session activity to prevent inactivity timeout
|
||||||
|
await update_session_activity(decoded.sessionId);
|
||||||
|
|
||||||
|
req.user = validation.user;
|
||||||
|
} catch (_err) {
|
||||||
|
return res.status(401).json({ error: "Invalid or expired token" });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[SSE] Client connected for api_id:", apiId);
|
||||||
|
|
||||||
|
// Set headers for SSE
|
||||||
|
res.setHeader("Content-Type", "text/event-stream");
|
||||||
|
res.setHeader("Cache-Control", "no-cache");
|
||||||
|
res.setHeader("Connection", "keep-alive");
|
||||||
|
res.setHeader("X-Accel-Buffering", "no"); // Disable nginx buffering
|
||||||
|
|
||||||
|
// Send initial status immediately
|
||||||
|
const initialInfo = getConnectionInfo(apiId);
|
||||||
|
res.write(`data: ${JSON.stringify(initialInfo)}\n\n`);
|
||||||
|
res.flushHeaders(); // Ensure headers are sent immediately
|
||||||
|
|
||||||
|
// Subscribe to connection changes for this specific api_id
|
||||||
|
const unsubscribe = subscribeToConnectionChanges(apiId, (_connected) => {
|
||||||
|
try {
|
||||||
|
// Push update to client instantly when status changes
|
||||||
|
const connectionInfo = getConnectionInfo(apiId);
|
||||||
|
console.log(
|
||||||
|
`[SSE] Pushing status change for ${apiId}: connected=${connectionInfo.connected} secure=${connectionInfo.secure}`,
|
||||||
|
);
|
||||||
|
res.write(`data: ${JSON.stringify(connectionInfo)}\n\n`);
|
||||||
|
} catch (err) {
|
||||||
|
console.error("[SSE] Error writing to stream:", err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Heartbeat to keep connection alive (every 30 seconds)
|
||||||
|
const heartbeat = setInterval(() => {
|
||||||
|
try {
|
||||||
|
res.write(": heartbeat\n\n");
|
||||||
|
} catch (err) {
|
||||||
|
console.error("[SSE] Error writing heartbeat:", err);
|
||||||
|
clearInterval(heartbeat);
|
||||||
|
}
|
||||||
|
}, 30000);
|
||||||
|
|
||||||
|
// Cleanup on client disconnect
|
||||||
|
req.on("close", () => {
|
||||||
|
console.log("[SSE] Client disconnected for api_id:", apiId);
|
||||||
|
clearInterval(heartbeat);
|
||||||
|
unsubscribe();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle errors - distinguish between different error types
|
||||||
|
req.on("error", (err) => {
|
||||||
|
// Only log non-connection-reset errors to reduce noise
|
||||||
|
if (err.code !== "ECONNRESET" && err.code !== "EPIPE") {
|
||||||
|
console.error("[SSE] Request error:", err);
|
||||||
|
} else {
|
||||||
|
console.log("[SSE] Client connection reset for api_id:", apiId);
|
||||||
|
}
|
||||||
|
clearInterval(heartbeat);
|
||||||
|
unsubscribe();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle response errors
|
||||||
|
res.on("error", (err) => {
|
||||||
|
if (err.code !== "ECONNRESET" && err.code !== "EPIPE") {
|
||||||
|
console.error("[SSE] Response error:", err);
|
||||||
|
}
|
||||||
|
clearInterval(heartbeat);
|
||||||
|
unsubscribe();
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[SSE] Unexpected error:", error);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.status(500).json({ error: "Internal server error" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
@@ -39,11 +39,12 @@ const express = require("express");
|
|||||||
const cors = require("cors");
|
const cors = require("cors");
|
||||||
const helmet = require("helmet");
|
const helmet = require("helmet");
|
||||||
const rateLimit = require("express-rate-limit");
|
const rateLimit = require("express-rate-limit");
|
||||||
|
const cookieParser = require("cookie-parser");
|
||||||
const {
|
const {
|
||||||
createPrismaClient,
|
getPrismaClient,
|
||||||
waitForDatabase,
|
waitForDatabase,
|
||||||
disconnectPrisma,
|
disconnectPrisma,
|
||||||
} = require("./config/database");
|
} = require("./config/prisma");
|
||||||
const winston = require("winston");
|
const winston = require("winston");
|
||||||
|
|
||||||
// Import routes
|
// Import routes
|
||||||
@@ -62,12 +63,20 @@ const versionRoutes = require("./routes/versionRoutes");
|
|||||||
const tfaRoutes = require("./routes/tfaRoutes");
|
const tfaRoutes = require("./routes/tfaRoutes");
|
||||||
const searchRoutes = require("./routes/searchRoutes");
|
const searchRoutes = require("./routes/searchRoutes");
|
||||||
const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
|
const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
|
||||||
const updateScheduler = require("./services/updateScheduler");
|
const gethomepageRoutes = require("./routes/gethomepageRoutes");
|
||||||
|
const automationRoutes = require("./routes/automationRoutes");
|
||||||
|
const dockerRoutes = require("./routes/dockerRoutes");
|
||||||
|
const wsRoutes = require("./routes/wsRoutes");
|
||||||
|
const agentVersionRoutes = require("./routes/agentVersionRoutes");
|
||||||
const { initSettings } = require("./services/settingsService");
|
const { initSettings } = require("./services/settingsService");
|
||||||
const { cleanup_expired_sessions } = require("./utils/session_manager");
|
const { queueManager } = require("./services/automation");
|
||||||
|
const { authenticateToken, requireAdmin } = require("./middleware/auth");
|
||||||
|
const { createBullBoard } = require("@bull-board/api");
|
||||||
|
const { BullMQAdapter } = require("@bull-board/api/bullMQAdapter");
|
||||||
|
const { ExpressAdapter } = require("@bull-board/express");
|
||||||
|
|
||||||
// Initialize Prisma client with optimized connection pooling for multiple instances
|
// Initialize Prisma client with optimized connection pooling for multiple instances
|
||||||
const prisma = createPrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Function to check and create default role permissions on startup
|
// Function to check and create default role permissions on startup
|
||||||
async function checkAndCreateRolePermissions() {
|
async function checkAndCreateRolePermissions() {
|
||||||
@@ -251,6 +260,10 @@ if (process.env.ENABLE_LOGGING === "true") {
|
|||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
const PORT = process.env.PORT || 3001;
|
const PORT = process.env.PORT || 3001;
|
||||||
|
const http = require("node:http");
|
||||||
|
const server = http.createServer(app);
|
||||||
|
const { init: initAgentWs } = require("./services/agentWs");
|
||||||
|
const agentVersionService = require("./services/agentVersionService");
|
||||||
|
|
||||||
// Trust proxy (needed when behind reverse proxy) and remove X-Powered-By
|
// Trust proxy (needed when behind reverse proxy) and remove X-Powered-By
|
||||||
if (process.env.TRUST_PROXY) {
|
if (process.env.TRUST_PROXY) {
|
||||||
@@ -328,9 +341,7 @@ const parseOrigins = (val) =>
|
|||||||
.map((s) => s.trim())
|
.map((s) => s.trim())
|
||||||
.filter(Boolean);
|
.filter(Boolean);
|
||||||
const allowedOrigins = parseOrigins(
|
const allowedOrigins = parseOrigins(
|
||||||
process.env.CORS_ORIGINS ||
|
process.env.CORS_ORIGINS || process.env.CORS_ORIGIN || "http://fabio:3000",
|
||||||
process.env.CORS_ORIGIN ||
|
|
||||||
"http://localhost:3000",
|
|
||||||
);
|
);
|
||||||
app.use(
|
app.use(
|
||||||
cors({
|
cors({
|
||||||
@@ -338,12 +349,17 @@ app.use(
|
|||||||
// Allow non-browser/SSR tools with no origin
|
// Allow non-browser/SSR tools with no origin
|
||||||
if (!origin) return callback(null, true);
|
if (!origin) return callback(null, true);
|
||||||
if (allowedOrigins.includes(origin)) return callback(null, true);
|
if (allowedOrigins.includes(origin)) return callback(null, true);
|
||||||
|
// Allow same-origin requests (e.g., Bull Board accessing its own API)
|
||||||
|
// This allows http://hostname:3001 to make requests to http://hostname:3001
|
||||||
|
if (origin?.includes(":3001")) return callback(null, true);
|
||||||
return callback(new Error("Not allowed by CORS"));
|
return callback(new Error("Not allowed by CORS"));
|
||||||
},
|
},
|
||||||
credentials: true,
|
credentials: true,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
app.use(limiter);
|
app.use(limiter);
|
||||||
|
// Cookie parser for Bull Board sessions
|
||||||
|
app.use(cookieParser());
|
||||||
// Reduce body size limits to reasonable defaults
|
// Reduce body size limits to reasonable defaults
|
||||||
app.use(express.json({ limit: process.env.JSON_BODY_LIMIT || "5mb" }));
|
app.use(express.json({ limit: process.env.JSON_BODY_LIMIT || "5mb" }));
|
||||||
app.use(
|
app.use(
|
||||||
@@ -422,12 +438,142 @@ app.use(
|
|||||||
authLimiter,
|
authLimiter,
|
||||||
autoEnrollmentRoutes,
|
autoEnrollmentRoutes,
|
||||||
);
|
);
|
||||||
|
app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/automation`, automationRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/docker`, dockerRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/ws`, wsRoutes);
|
||||||
|
app.use(`/api/${apiVersion}/agent`, agentVersionRoutes);
|
||||||
|
|
||||||
|
// Bull Board - will be populated after queue manager initializes
|
||||||
|
let bullBoardRouter = null;
|
||||||
|
const bullBoardSessions = new Map(); // Store authenticated sessions
|
||||||
|
|
||||||
|
// Mount Bull Board at /bullboard for cleaner URL
|
||||||
|
app.use(`/bullboard`, (_req, res, next) => {
|
||||||
|
// Relax COOP/COEP for Bull Board in non-production to avoid browser warnings
|
||||||
|
if (process.env.NODE_ENV !== "production") {
|
||||||
|
res.setHeader("Cross-Origin-Opener-Policy", "same-origin-allow-popups");
|
||||||
|
res.setHeader("Cross-Origin-Embedder-Policy", "unsafe-none");
|
||||||
|
}
|
||||||
|
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Authentication middleware for Bull Board
|
||||||
|
app.use(`/bullboard`, async (req, res, next) => {
|
||||||
|
// Skip authentication for static assets only
|
||||||
|
if (req.path.includes("/static/") || req.path.includes("/favicon")) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for bull-board-session cookie first
|
||||||
|
const sessionId = req.cookies["bull-board-session"];
|
||||||
|
if (sessionId) {
|
||||||
|
const session = bullBoardSessions.get(sessionId);
|
||||||
|
if (session && Date.now() - session.timestamp < 3600000) {
|
||||||
|
// 1 hour
|
||||||
|
// Valid session, extend it
|
||||||
|
session.timestamp = Date.now();
|
||||||
|
return next();
|
||||||
|
} else if (session) {
|
||||||
|
// Expired session, remove it
|
||||||
|
bullBoardSessions.delete(sessionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No valid session, check for token
|
||||||
|
let token = req.query.token;
|
||||||
|
if (!token && req.headers.authorization) {
|
||||||
|
token = req.headers.authorization.replace("Bearer ", "");
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no token, deny access
|
||||||
|
if (!token) {
|
||||||
|
return res.status(401).json({ error: "Access token required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add token to headers for authentication
|
||||||
|
req.headers.authorization = `Bearer ${token}`;
|
||||||
|
|
||||||
|
// Authenticate the user
|
||||||
|
return authenticateToken(req, res, (err) => {
|
||||||
|
if (err) {
|
||||||
|
return res.status(401).json({ error: "Authentication failed" });
|
||||||
|
}
|
||||||
|
return requireAdmin(req, res, (adminErr) => {
|
||||||
|
if (adminErr) {
|
||||||
|
return res.status(403).json({ error: "Admin access required" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authentication successful - create a session
|
||||||
|
const newSessionId = require("node:crypto")
|
||||||
|
.randomBytes(32)
|
||||||
|
.toString("hex");
|
||||||
|
bullBoardSessions.set(newSessionId, {
|
||||||
|
timestamp: Date.now(),
|
||||||
|
userId: req.user.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set session cookie
|
||||||
|
res.cookie("bull-board-session", newSessionId, {
|
||||||
|
httpOnly: true,
|
||||||
|
secure: process.env.NODE_ENV === "production",
|
||||||
|
sameSite: "lax",
|
||||||
|
maxAge: 3600000, // 1 hour
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clean up old sessions periodically
|
||||||
|
if (bullBoardSessions.size > 100) {
|
||||||
|
const now = Date.now();
|
||||||
|
for (const [sid, session] of bullBoardSessions.entries()) {
|
||||||
|
if (now - session.timestamp > 3600000) {
|
||||||
|
bullBoardSessions.delete(sid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
app.use(`/bullboard`, (req, res, next) => {
|
||||||
|
if (bullBoardRouter) {
|
||||||
|
return bullBoardRouter(req, res, next);
|
||||||
|
}
|
||||||
|
return res.status(503).json({ error: "Bull Board not initialized yet" });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Error handler specifically for Bull Board routes
|
||||||
|
app.use("/bullboard", (err, req, res, _next) => {
|
||||||
|
console.error("Bull Board error on", req.method, req.url);
|
||||||
|
console.error("Error details:", err.message);
|
||||||
|
console.error("Stack:", err.stack);
|
||||||
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
|
logger.error(`Bull Board error on ${req.method} ${req.url}:`, err);
|
||||||
|
}
|
||||||
|
res.status(500).json({
|
||||||
|
error: "Internal server error",
|
||||||
|
message: err.message,
|
||||||
|
path: req.path,
|
||||||
|
url: req.url,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// Error handling middleware
|
// Error handling middleware
|
||||||
app.use((err, _req, res, _next) => {
|
app.use((err, _req, res, _next) => {
|
||||||
if (process.env.ENABLE_LOGGING === "true") {
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
logger.error(err.stack);
|
logger.error(err.stack);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Special handling for CORS errors - always include the message
|
||||||
|
if (err.message?.includes("Not allowed by CORS")) {
|
||||||
|
return res.status(500).json({
|
||||||
|
error: "Something went wrong!",
|
||||||
|
message: err.message, // Always include CORS error message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
res.status(500).json({
|
res.status(500).json({
|
||||||
error: "Something went wrong!",
|
error: "Something went wrong!",
|
||||||
message: process.env.NODE_ENV === "development" ? err.message : undefined,
|
message: process.env.NODE_ENV === "development" ? err.message : undefined,
|
||||||
@@ -444,10 +590,7 @@ process.on("SIGINT", async () => {
|
|||||||
if (process.env.ENABLE_LOGGING === "true") {
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
logger.info("SIGINT received, shutting down gracefully");
|
logger.info("SIGINT received, shutting down gracefully");
|
||||||
}
|
}
|
||||||
if (app.locals.session_cleanup_interval) {
|
await queueManager.shutdown();
|
||||||
clearInterval(app.locals.session_cleanup_interval);
|
|
||||||
}
|
|
||||||
updateScheduler.stop();
|
|
||||||
await disconnectPrisma(prisma);
|
await disconnectPrisma(prisma);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
@@ -456,10 +599,7 @@ process.on("SIGTERM", async () => {
|
|||||||
if (process.env.ENABLE_LOGGING === "true") {
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
logger.info("SIGTERM received, shutting down gracefully");
|
logger.info("SIGTERM received, shutting down gracefully");
|
||||||
}
|
}
|
||||||
if (app.locals.session_cleanup_interval) {
|
await queueManager.shutdown();
|
||||||
clearInterval(app.locals.session_cleanup_interval);
|
|
||||||
}
|
|
||||||
updateScheduler.stop();
|
|
||||||
await disconnectPrisma(prisma);
|
await disconnectPrisma(prisma);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
@@ -728,34 +868,41 @@ async function startServer() {
|
|||||||
// Initialize dashboard preferences for all users
|
// Initialize dashboard preferences for all users
|
||||||
await initializeDashboardPreferences();
|
await initializeDashboardPreferences();
|
||||||
|
|
||||||
// Initial session cleanup
|
// Initialize BullMQ queue manager
|
||||||
await cleanup_expired_sessions();
|
await queueManager.initialize();
|
||||||
|
|
||||||
// Schedule session cleanup every hour
|
// Schedule recurring jobs
|
||||||
const session_cleanup_interval = setInterval(
|
await queueManager.scheduleAllJobs();
|
||||||
async () => {
|
|
||||||
try {
|
|
||||||
await cleanup_expired_sessions();
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Session cleanup error:", error);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
60 * 60 * 1000,
|
|
||||||
); // Every hour
|
|
||||||
|
|
||||||
app.listen(PORT, () => {
|
// Set up Bull Board for queue monitoring
|
||||||
|
const serverAdapter = new ExpressAdapter();
|
||||||
|
// Set basePath to match where we mount the router
|
||||||
|
serverAdapter.setBasePath("/bullboard");
|
||||||
|
|
||||||
|
const { QUEUE_NAMES } = require("./services/automation");
|
||||||
|
const bullAdapters = Object.values(QUEUE_NAMES).map(
|
||||||
|
(queueName) => new BullMQAdapter(queueManager.queues[queueName]),
|
||||||
|
);
|
||||||
|
|
||||||
|
createBullBoard({
|
||||||
|
queues: bullAdapters,
|
||||||
|
serverAdapter: serverAdapter,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set the router for the Bull Board middleware (secured middleware above)
|
||||||
|
bullBoardRouter = serverAdapter.getRouter();
|
||||||
|
console.log("✅ Bull Board mounted at /bullboard (secured)");
|
||||||
|
|
||||||
|
// Initialize WS layer with the underlying HTTP server
|
||||||
|
initAgentWs(server, prisma);
|
||||||
|
await agentVersionService.initialize();
|
||||||
|
|
||||||
|
server.listen(PORT, () => {
|
||||||
if (process.env.ENABLE_LOGGING === "true") {
|
if (process.env.ENABLE_LOGGING === "true") {
|
||||||
logger.info(`Server running on port ${PORT}`);
|
logger.info(`Server running on port ${PORT}`);
|
||||||
logger.info(`Environment: ${process.env.NODE_ENV}`);
|
logger.info(`Environment: ${process.env.NODE_ENV}`);
|
||||||
logger.info("✅ Session cleanup scheduled (every hour)");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start update scheduler
|
|
||||||
updateScheduler.start();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Store interval for cleanup on shutdown
|
|
||||||
app.locals.session_cleanup_interval = session_cleanup_interval;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("❌ Failed to start server:", error.message);
|
console.error("❌ Failed to start server:", error.message);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
|
|||||||
725
backend/src/services/agentVersionService.js
Normal file
725
backend/src/services/agentVersionService.js
Normal file
@@ -0,0 +1,725 @@
|
|||||||
|
const axios = require("axios");
|
||||||
|
const fs = require("node:fs").promises;
|
||||||
|
const path = require("node:path");
|
||||||
|
const { exec } = require("node:child_process");
|
||||||
|
const { promisify } = require("node:util");
|
||||||
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
|
// Simple semver comparison function
|
||||||
|
function compareVersions(version1, version2) {
|
||||||
|
const v1parts = version1.split(".").map(Number);
|
||||||
|
const v2parts = version2.split(".").map(Number);
|
||||||
|
|
||||||
|
// Ensure both arrays have the same length
|
||||||
|
while (v1parts.length < 3) v1parts.push(0);
|
||||||
|
while (v2parts.length < 3) v2parts.push(0);
|
||||||
|
|
||||||
|
for (let i = 0; i < 3; i++) {
|
||||||
|
if (v1parts[i] > v2parts[i]) return 1;
|
||||||
|
if (v1parts[i] < v2parts[i]) return -1;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
const crypto = require("node:crypto");
|
||||||
|
|
||||||
|
class AgentVersionService {
|
||||||
|
constructor() {
|
||||||
|
this.githubApiUrl =
|
||||||
|
"https://api.github.com/repos/PatchMon/PatchMon-agent/releases";
|
||||||
|
this.agentsDir = path.resolve(__dirname, "../../../agents");
|
||||||
|
this.supportedArchitectures = [
|
||||||
|
"linux-amd64",
|
||||||
|
"linux-arm64",
|
||||||
|
"linux-386",
|
||||||
|
"linux-arm",
|
||||||
|
];
|
||||||
|
this.currentVersion = null;
|
||||||
|
this.latestVersion = null;
|
||||||
|
this.lastChecked = null;
|
||||||
|
this.checkInterval = 30 * 60 * 1000; // 30 minutes
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize() {
|
||||||
|
try {
|
||||||
|
// Ensure agents directory exists
|
||||||
|
await fs.mkdir(this.agentsDir, { recursive: true });
|
||||||
|
|
||||||
|
console.log("🔍 Testing GitHub API connectivity...");
|
||||||
|
try {
|
||||||
|
const testResponse = await axios.get(
|
||||||
|
"https://api.github.com/repos/PatchMon/PatchMon-agent/releases",
|
||||||
|
{
|
||||||
|
timeout: 5000,
|
||||||
|
headers: {
|
||||||
|
"User-Agent": "PatchMon-Server/1.0",
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
`✅ GitHub API accessible - found ${testResponse.data.length} releases`,
|
||||||
|
);
|
||||||
|
} catch (testError) {
|
||||||
|
console.error("❌ GitHub API not accessible:", testError.message);
|
||||||
|
if (testError.response) {
|
||||||
|
console.error(
|
||||||
|
"❌ Status:",
|
||||||
|
testError.response.status,
|
||||||
|
testError.response.statusText,
|
||||||
|
);
|
||||||
|
if (testError.response.status === 403) {
|
||||||
|
console.log("⚠️ GitHub API rate limit exceeded - will retry later");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current agent version by executing the binary
|
||||||
|
await this.getCurrentAgentVersion();
|
||||||
|
|
||||||
|
// Try to check for updates, but don't fail initialization if GitHub API is unavailable
|
||||||
|
try {
|
||||||
|
await this.checkForUpdates();
|
||||||
|
} catch (updateError) {
|
||||||
|
console.log(
|
||||||
|
"⚠️ Failed to check for updates on startup, will retry later:",
|
||||||
|
updateError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set up periodic checking
|
||||||
|
setInterval(() => {
|
||||||
|
this.checkForUpdates().catch((error) => {
|
||||||
|
console.log("⚠️ Periodic update check failed:", error.message);
|
||||||
|
});
|
||||||
|
}, this.checkInterval);
|
||||||
|
|
||||||
|
console.log("✅ Agent Version Service initialized");
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
"❌ Failed to initialize Agent Version Service:",
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getCurrentAgentVersion() {
|
||||||
|
try {
|
||||||
|
console.log("🔍 Getting current agent version...");
|
||||||
|
|
||||||
|
// Try to find the agent binary in agents/ folder only (what gets distributed)
|
||||||
|
const possiblePaths = [
|
||||||
|
path.join(this.agentsDir, "patchmon-agent-linux-amd64"),
|
||||||
|
path.join(this.agentsDir, "patchmon-agent"),
|
||||||
|
];
|
||||||
|
|
||||||
|
let agentPath = null;
|
||||||
|
for (const testPath of possiblePaths) {
|
||||||
|
try {
|
||||||
|
await fs.access(testPath);
|
||||||
|
agentPath = testPath;
|
||||||
|
console.log(`✅ Found agent binary at: ${testPath}`);
|
||||||
|
break;
|
||||||
|
} catch {
|
||||||
|
// Path doesn't exist, continue to next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!agentPath) {
|
||||||
|
console.log(
|
||||||
|
"⚠️ No agent binary found in agents/ folder, current version will be unknown",
|
||||||
|
);
|
||||||
|
console.log("💡 Use the Download Updates button to get agent binaries");
|
||||||
|
this.currentVersion = null;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute the agent binary with help flag to get version info
|
||||||
|
try {
|
||||||
|
const { stdout, stderr } = await execAsync(`${agentPath} --help`, {
|
||||||
|
timeout: 10000,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (stderr) {
|
||||||
|
console.log("⚠️ Agent help stderr:", stderr);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse version from help output (e.g., "PatchMon Agent v1.3.0")
|
||||||
|
const versionMatch = stdout.match(
|
||||||
|
/PatchMon Agent v([0-9]+\.[0-9]+\.[0-9]+)/i,
|
||||||
|
);
|
||||||
|
if (versionMatch) {
|
||||||
|
this.currentVersion = versionMatch[1];
|
||||||
|
console.log(`✅ Current agent version: ${this.currentVersion}`);
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
"⚠️ Could not parse version from agent help output:",
|
||||||
|
stdout,
|
||||||
|
);
|
||||||
|
this.currentVersion = null;
|
||||||
|
}
|
||||||
|
} catch (execError) {
|
||||||
|
console.error("❌ Failed to execute agent binary:", execError.message);
|
||||||
|
this.currentVersion = null;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to get current agent version:", error.message);
|
||||||
|
this.currentVersion = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async checkForUpdates() {
|
||||||
|
try {
|
||||||
|
console.log("🔍 Checking for agent updates...");
|
||||||
|
|
||||||
|
const response = await axios.get(this.githubApiUrl, {
|
||||||
|
timeout: 10000,
|
||||||
|
headers: {
|
||||||
|
"User-Agent": "PatchMon-Server/1.0",
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`📡 GitHub API response status: ${response.status}`);
|
||||||
|
console.log(`📦 Found ${response.data.length} releases`);
|
||||||
|
|
||||||
|
const releases = response.data;
|
||||||
|
if (releases.length === 0) {
|
||||||
|
console.log("ℹ️ No releases found");
|
||||||
|
this.latestVersion = null;
|
||||||
|
this.lastChecked = new Date();
|
||||||
|
return {
|
||||||
|
latestVersion: null,
|
||||||
|
currentVersion: this.currentVersion,
|
||||||
|
hasUpdate: false,
|
||||||
|
lastChecked: this.lastChecked,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const latestRelease = releases[0];
|
||||||
|
this.latestVersion = latestRelease.tag_name.replace("v", ""); // Remove 'v' prefix
|
||||||
|
this.lastChecked = new Date();
|
||||||
|
|
||||||
|
console.log(`📦 Latest agent version: ${this.latestVersion}`);
|
||||||
|
|
||||||
|
// Don't download binaries automatically - only when explicitly requested
|
||||||
|
console.log(
|
||||||
|
"ℹ️ Skipping automatic binary download - binaries will be downloaded on demand",
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
latestVersion: this.latestVersion,
|
||||||
|
currentVersion: this.currentVersion,
|
||||||
|
hasUpdate: this.currentVersion !== this.latestVersion,
|
||||||
|
lastChecked: this.lastChecked,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to check for updates:", error.message);
|
||||||
|
if (error.response) {
|
||||||
|
console.error(
|
||||||
|
"❌ GitHub API error:",
|
||||||
|
error.response.status,
|
||||||
|
error.response.statusText,
|
||||||
|
);
|
||||||
|
console.error(
|
||||||
|
"❌ Rate limit info:",
|
||||||
|
error.response.headers["x-ratelimit-remaining"],
|
||||||
|
"/",
|
||||||
|
error.response.headers["x-ratelimit-limit"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadBinariesToAgentsFolder(release) {
|
||||||
|
try {
|
||||||
|
console.log(
|
||||||
|
`⬇️ Downloading binaries for version ${release.tag_name} to agents folder...`,
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const arch of this.supportedArchitectures) {
|
||||||
|
const assetName = `patchmon-agent-${arch}`;
|
||||||
|
const asset = release.assets.find((a) => a.name === assetName);
|
||||||
|
|
||||||
|
if (!asset) {
|
||||||
|
console.warn(`⚠️ Binary not found for architecture: ${arch}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const binaryPath = path.join(this.agentsDir, assetName);
|
||||||
|
|
||||||
|
console.log(`⬇️ Downloading ${assetName}...`);
|
||||||
|
|
||||||
|
const response = await axios.get(asset.browser_download_url, {
|
||||||
|
responseType: "stream",
|
||||||
|
timeout: 60000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const writer = require("node:fs").createWriteStream(binaryPath);
|
||||||
|
response.data.pipe(writer);
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
writer.on("finish", resolve);
|
||||||
|
writer.on("error", reject);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Make executable
|
||||||
|
await fs.chmod(binaryPath, "755");
|
||||||
|
|
||||||
|
console.log(`✅ Downloaded: ${assetName} to agents folder`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
"❌ Failed to download binaries to agents folder:",
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadBinaryForVersion(version, architecture) {
|
||||||
|
try {
|
||||||
|
console.log(
|
||||||
|
`⬇️ Downloading binary for version ${version} architecture ${architecture}...`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get the release info from GitHub
|
||||||
|
const response = await axios.get(this.githubApiUrl, {
|
||||||
|
timeout: 10000,
|
||||||
|
headers: {
|
||||||
|
"User-Agent": "PatchMon-Server/1.0",
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const releases = response.data;
|
||||||
|
const release = releases.find(
|
||||||
|
(r) => r.tag_name.replace("v", "") === version,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!release) {
|
||||||
|
throw new Error(`Release ${version} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const assetName = `patchmon-agent-${architecture}`;
|
||||||
|
const asset = release.assets.find((a) => a.name === assetName);
|
||||||
|
|
||||||
|
if (!asset) {
|
||||||
|
throw new Error(`Binary not found for architecture: ${architecture}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const binaryPath = path.join(
|
||||||
|
this.agentBinariesDir,
|
||||||
|
`${release.tag_name}-${assetName}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(`⬇️ Downloading ${assetName}...`);
|
||||||
|
|
||||||
|
const downloadResponse = await axios.get(asset.browser_download_url, {
|
||||||
|
responseType: "stream",
|
||||||
|
timeout: 60000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const writer = require("node:fs").createWriteStream(binaryPath);
|
||||||
|
downloadResponse.data.pipe(writer);
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
writer.on("finish", resolve);
|
||||||
|
writer.on("error", reject);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Make executable
|
||||||
|
await fs.chmod(binaryPath, "755");
|
||||||
|
|
||||||
|
console.log(`✅ Downloaded: ${assetName}`);
|
||||||
|
return binaryPath;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to download binary ${version}-${architecture}:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getBinaryPath(version, architecture) {
|
||||||
|
const binaryName = `patchmon-agent-${architecture}`;
|
||||||
|
const binaryPath = path.join(this.agentsDir, binaryName);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fs.access(binaryPath);
|
||||||
|
return binaryPath;
|
||||||
|
} catch {
|
||||||
|
throw new Error(`Binary not found: ${binaryName} version ${version}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async serveBinary(version, architecture, res) {
|
||||||
|
try {
|
||||||
|
// Check if binary exists, if not download it
|
||||||
|
const binaryPath = await this.getBinaryPath(version, architecture);
|
||||||
|
const stats = await fs.stat(binaryPath);
|
||||||
|
|
||||||
|
res.setHeader("Content-Type", "application/octet-stream");
|
||||||
|
res.setHeader(
|
||||||
|
"Content-Disposition",
|
||||||
|
`attachment; filename="patchmon-agent-${architecture}"`,
|
||||||
|
);
|
||||||
|
res.setHeader("Content-Length", stats.size);
|
||||||
|
|
||||||
|
// Add cache headers
|
||||||
|
res.setHeader("Cache-Control", "public, max-age=3600");
|
||||||
|
res.setHeader("ETag", `"${version}-${architecture}"`);
|
||||||
|
|
||||||
|
const stream = require("node:fs").createReadStream(binaryPath);
|
||||||
|
stream.pipe(res);
|
||||||
|
} catch (_error) {
|
||||||
|
// Binary doesn't exist, try to download it
|
||||||
|
console.log(
|
||||||
|
`⬇️ Binary not found locally, attempting to download ${version}-${architecture}...`,
|
||||||
|
);
|
||||||
|
try {
|
||||||
|
await this.downloadBinaryForVersion(version, architecture);
|
||||||
|
// Retry serving the binary
|
||||||
|
const binaryPath = await this.getBinaryPath(version, architecture);
|
||||||
|
const stats = await fs.stat(binaryPath);
|
||||||
|
|
||||||
|
res.setHeader("Content-Type", "application/octet-stream");
|
||||||
|
res.setHeader(
|
||||||
|
"Content-Disposition",
|
||||||
|
`attachment; filename="patchmon-agent-${architecture}"`,
|
||||||
|
);
|
||||||
|
res.setHeader("Content-Length", stats.size);
|
||||||
|
res.setHeader("Cache-Control", "public, max-age=3600");
|
||||||
|
res.setHeader("ETag", `"${version}-${architecture}"`);
|
||||||
|
|
||||||
|
const stream = require("node:fs").createReadStream(binaryPath);
|
||||||
|
stream.pipe(res);
|
||||||
|
} catch (downloadError) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to download binary ${version}-${architecture}:`,
|
||||||
|
downloadError.message,
|
||||||
|
);
|
||||||
|
res
|
||||||
|
.status(404)
|
||||||
|
.json({ error: "Binary not found and could not be downloaded" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getVersionInfo() {
|
||||||
|
let hasUpdate = false;
|
||||||
|
let updateStatus = "unknown";
|
||||||
|
let effectiveLatestVersion = this.currentVersion; // Always use local version if available
|
||||||
|
|
||||||
|
// If we have a local version, use it as the latest regardless of GitHub
|
||||||
|
if (this.currentVersion) {
|
||||||
|
effectiveLatestVersion = this.currentVersion;
|
||||||
|
console.log(
|
||||||
|
`🔄 Using local agent version ${this.currentVersion} as latest`,
|
||||||
|
);
|
||||||
|
} else if (this.latestVersion) {
|
||||||
|
// Fallback to GitHub version only if no local version
|
||||||
|
effectiveLatestVersion = this.latestVersion;
|
||||||
|
console.log(
|
||||||
|
`🔄 No local version found, using GitHub version ${this.latestVersion}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.currentVersion && effectiveLatestVersion) {
|
||||||
|
const comparison = compareVersions(
|
||||||
|
this.currentVersion,
|
||||||
|
effectiveLatestVersion,
|
||||||
|
);
|
||||||
|
if (comparison < 0) {
|
||||||
|
hasUpdate = true;
|
||||||
|
updateStatus = "update-available";
|
||||||
|
} else if (comparison > 0) {
|
||||||
|
hasUpdate = false;
|
||||||
|
updateStatus = "newer-version";
|
||||||
|
} else {
|
||||||
|
hasUpdate = false;
|
||||||
|
updateStatus = "up-to-date";
|
||||||
|
}
|
||||||
|
} else if (effectiveLatestVersion && !this.currentVersion) {
|
||||||
|
hasUpdate = true;
|
||||||
|
updateStatus = "no-agent";
|
||||||
|
} else if (this.currentVersion && !effectiveLatestVersion) {
|
||||||
|
// We have a current version but no latest version (GitHub API unavailable)
|
||||||
|
hasUpdate = false;
|
||||||
|
updateStatus = "github-unavailable";
|
||||||
|
} else if (!this.currentVersion && !effectiveLatestVersion) {
|
||||||
|
updateStatus = "no-data";
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
currentVersion: this.currentVersion,
|
||||||
|
latestVersion: effectiveLatestVersion,
|
||||||
|
hasUpdate: hasUpdate,
|
||||||
|
updateStatus: updateStatus,
|
||||||
|
lastChecked: this.lastChecked,
|
||||||
|
supportedArchitectures: this.supportedArchitectures,
|
||||||
|
status: effectiveLatestVersion ? "ready" : "no-releases",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async refreshCurrentVersion() {
|
||||||
|
await this.getCurrentAgentVersion();
|
||||||
|
return this.currentVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
async downloadLatestUpdate() {
|
||||||
|
try {
|
||||||
|
console.log("⬇️ Downloading latest agent update...");
|
||||||
|
|
||||||
|
// First check for updates to get the latest release info
|
||||||
|
const _updateInfo = await this.checkForUpdates();
|
||||||
|
|
||||||
|
if (!this.latestVersion) {
|
||||||
|
throw new Error("No latest version available to download");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the release info from GitHub
|
||||||
|
const response = await axios.get(this.githubApiUrl, {
|
||||||
|
timeout: 10000,
|
||||||
|
headers: {
|
||||||
|
"User-Agent": "PatchMon-Server/1.0",
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const releases = response.data;
|
||||||
|
const latestRelease = releases[0];
|
||||||
|
|
||||||
|
if (!latestRelease) {
|
||||||
|
throw new Error("No releases found");
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`⬇️ Downloading binaries for version ${latestRelease.tag_name}...`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Download binaries for all architectures directly to agents folder
|
||||||
|
await this.downloadBinariesToAgentsFolder(latestRelease);
|
||||||
|
|
||||||
|
console.log("✅ Latest update downloaded successfully");
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
version: this.latestVersion,
|
||||||
|
downloadedArchitectures: this.supportedArchitectures,
|
||||||
|
message: `Successfully downloaded version ${this.latestVersion}`,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to download latest update:", error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAvailableVersions() {
|
||||||
|
// No local caching - only return latest from GitHub
|
||||||
|
if (this.latestVersion) {
|
||||||
|
return [this.latestVersion];
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
async getBinaryInfo(version, architecture) {
|
||||||
|
try {
|
||||||
|
// Always use local version if it matches the requested version
|
||||||
|
if (version === this.currentVersion && this.currentVersion) {
|
||||||
|
const binaryPath = await this.getBinaryPath(
|
||||||
|
this.currentVersion,
|
||||||
|
architecture,
|
||||||
|
);
|
||||||
|
const stats = await fs.stat(binaryPath);
|
||||||
|
|
||||||
|
// Calculate file hash
|
||||||
|
const fileBuffer = await fs.readFile(binaryPath);
|
||||||
|
const hash = crypto
|
||||||
|
.createHash("sha256")
|
||||||
|
.update(fileBuffer)
|
||||||
|
.digest("hex");
|
||||||
|
|
||||||
|
return {
|
||||||
|
version: this.currentVersion,
|
||||||
|
architecture,
|
||||||
|
size: stats.size,
|
||||||
|
hash,
|
||||||
|
lastModified: stats.mtime,
|
||||||
|
path: binaryPath,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// For other versions, try to find them in the agents folder
|
||||||
|
const binaryPath = await this.getBinaryPath(version, architecture);
|
||||||
|
const stats = await fs.stat(binaryPath);
|
||||||
|
|
||||||
|
// Calculate file hash
|
||||||
|
const fileBuffer = await fs.readFile(binaryPath);
|
||||||
|
const hash = crypto.createHash("sha256").update(fileBuffer).digest("hex");
|
||||||
|
|
||||||
|
return {
|
||||||
|
version,
|
||||||
|
architecture,
|
||||||
|
size: stats.size,
|
||||||
|
hash,
|
||||||
|
lastModified: stats.mtime,
|
||||||
|
path: binaryPath,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to get binary info: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if an agent needs an update and push notification if needed
|
||||||
|
* @param {string} agentApiId - The agent's API ID
|
||||||
|
* @param {string} agentVersion - The agent's current version
|
||||||
|
* @param {boolean} force - Force update regardless of version
|
||||||
|
* @returns {Object} Update check result
|
||||||
|
*/
|
||||||
|
async checkAndPushAgentUpdate(agentApiId, agentVersion, force = false) {
|
||||||
|
try {
|
||||||
|
console.log(
|
||||||
|
`🔍 Checking update for agent ${agentApiId} (version: ${agentVersion})`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get current server version info
|
||||||
|
const versionInfo = await this.getVersionInfo();
|
||||||
|
|
||||||
|
if (!versionInfo.latestVersion) {
|
||||||
|
console.log(`⚠️ No latest version available for agent ${agentApiId}`);
|
||||||
|
return {
|
||||||
|
needsUpdate: false,
|
||||||
|
reason: "no-latest-version",
|
||||||
|
message: "No latest version available on server",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare versions
|
||||||
|
const comparison = compareVersions(
|
||||||
|
agentVersion,
|
||||||
|
versionInfo.latestVersion,
|
||||||
|
);
|
||||||
|
const needsUpdate = force || comparison < 0;
|
||||||
|
|
||||||
|
if (needsUpdate) {
|
||||||
|
console.log(
|
||||||
|
`📤 Agent ${agentApiId} needs update: ${agentVersion} → ${versionInfo.latestVersion}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Import agentWs service to push notification
|
||||||
|
const { pushUpdateNotification } = require("./agentWs");
|
||||||
|
|
||||||
|
const updateInfo = {
|
||||||
|
version: versionInfo.latestVersion,
|
||||||
|
force: force,
|
||||||
|
downloadUrl: `/api/v1/agent/binary/${versionInfo.latestVersion}/linux-amd64`,
|
||||||
|
message: force
|
||||||
|
? "Force update requested"
|
||||||
|
: `Update available: ${versionInfo.latestVersion}`,
|
||||||
|
};
|
||||||
|
|
||||||
|
const pushed = pushUpdateNotification(agentApiId, updateInfo);
|
||||||
|
|
||||||
|
if (pushed) {
|
||||||
|
console.log(`✅ Update notification pushed to agent ${agentApiId}`);
|
||||||
|
return {
|
||||||
|
needsUpdate: true,
|
||||||
|
reason: force ? "force-update" : "version-outdated",
|
||||||
|
message: `Update notification sent: ${agentVersion} → ${versionInfo.latestVersion}`,
|
||||||
|
targetVersion: versionInfo.latestVersion,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
`⚠️ Failed to push update notification to agent ${agentApiId} (not connected)`,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
needsUpdate: true,
|
||||||
|
reason: "agent-offline",
|
||||||
|
message: "Agent needs update but is not connected",
|
||||||
|
targetVersion: versionInfo.latestVersion,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(`✅ Agent ${agentApiId} is up to date: ${agentVersion}`);
|
||||||
|
return {
|
||||||
|
needsUpdate: false,
|
||||||
|
reason: "up-to-date",
|
||||||
|
message: `Agent is up to date: ${agentVersion}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to check update for agent ${agentApiId}:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
needsUpdate: false,
|
||||||
|
reason: "error",
|
||||||
|
message: `Error checking update: ${error.message}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check and push updates to all connected agents
|
||||||
|
* @param {boolean} force - Force update regardless of version
|
||||||
|
* @returns {Object} Bulk update result
|
||||||
|
*/
|
||||||
|
async checkAndPushUpdatesToAll(force = false) {
|
||||||
|
try {
|
||||||
|
console.log(
|
||||||
|
`🔍 Checking updates for all connected agents (force: ${force})`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Import agentWs service to get connected agents
|
||||||
|
const { pushUpdateNotificationToAll } = require("./agentWs");
|
||||||
|
|
||||||
|
const versionInfo = await this.getVersionInfo();
|
||||||
|
|
||||||
|
if (!versionInfo.latestVersion) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
message: "No latest version available on server",
|
||||||
|
updatedAgents: 0,
|
||||||
|
totalAgents: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateInfo = {
|
||||||
|
version: versionInfo.latestVersion,
|
||||||
|
force: force,
|
||||||
|
downloadUrl: `/api/v1/agent/binary/${versionInfo.latestVersion}/linux-amd64`,
|
||||||
|
message: force
|
||||||
|
? "Force update requested for all agents"
|
||||||
|
: `Update available: ${versionInfo.latestVersion}`,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await pushUpdateNotificationToAll(updateInfo);
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`✅ Bulk update notification sent to ${result.notifiedCount} agents`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
message: `Update notifications sent to ${result.notifiedCount} agents`,
|
||||||
|
updatedAgents: result.notifiedCount,
|
||||||
|
totalAgents: result.totalAgents,
|
||||||
|
targetVersion: versionInfo.latestVersion,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to push updates to all agents:", error.message);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
message: `Error pushing updates: ${error.message}`,
|
||||||
|
updatedAgents: 0,
|
||||||
|
totalAgents: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = new AgentVersionService();
|
||||||
252
backend/src/services/agentWs.js
Normal file
252
backend/src/services/agentWs.js
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
// Lightweight WebSocket hub for agent connections
|
||||||
|
// Auth: X-API-ID / X-API-KEY headers on the upgrade request
|
||||||
|
|
||||||
|
const WebSocket = require("ws");
|
||||||
|
const url = require("node:url");
|
||||||
|
|
||||||
|
// Connection registry by api_id
|
||||||
|
const apiIdToSocket = new Map();
|
||||||
|
|
||||||
|
// Connection metadata (secure/insecure)
|
||||||
|
// Map<api_id, { ws: WebSocket, secure: boolean }>
|
||||||
|
const connectionMetadata = new Map();
|
||||||
|
|
||||||
|
// Subscribers for connection status changes (for SSE)
|
||||||
|
// Map<api_id, Set<callback>>
|
||||||
|
const connectionChangeSubscribers = new Map();
|
||||||
|
|
||||||
|
let wss;
|
||||||
|
let prisma;
|
||||||
|
|
||||||
|
function init(server, prismaClient) {
|
||||||
|
prisma = prismaClient;
|
||||||
|
wss = new WebSocket.Server({ noServer: true });
|
||||||
|
|
||||||
|
// Handle HTTP upgrade events and authenticate before accepting WS
|
||||||
|
server.on("upgrade", async (request, socket, head) => {
|
||||||
|
try {
|
||||||
|
const { pathname } = url.parse(request.url);
|
||||||
|
if (!pathname || !pathname.startsWith("/api/")) {
|
||||||
|
socket.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expected path: /api/{v}/agents/ws
|
||||||
|
const parts = pathname.split("/").filter(Boolean); // [api, v1, agents, ws]
|
||||||
|
if (parts.length !== 4 || parts[2] !== "agents" || parts[3] !== "ws") {
|
||||||
|
socket.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const apiId = request.headers["x-api-id"];
|
||||||
|
const apiKey = request.headers["x-api-key"];
|
||||||
|
if (!apiId || !apiKey) {
|
||||||
|
socket.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate credentials
|
||||||
|
const host = await prisma.hosts.findUnique({ where: { api_id: apiId } });
|
||||||
|
if (!host || host.api_key !== apiKey) {
|
||||||
|
socket.destroy();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
wss.handleUpgrade(request, socket, head, (ws) => {
|
||||||
|
ws.apiId = apiId;
|
||||||
|
|
||||||
|
// Detect if connection is secure (wss://) or not (ws://)
|
||||||
|
const isSecure =
|
||||||
|
socket.encrypted || request.headers["x-forwarded-proto"] === "https";
|
||||||
|
|
||||||
|
apiIdToSocket.set(apiId, ws);
|
||||||
|
connectionMetadata.set(apiId, { ws, secure: isSecure });
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[agent-ws] connected api_id=${apiId} protocol=${isSecure ? "wss" : "ws"} total=${apiIdToSocket.size}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Notify subscribers of connection
|
||||||
|
notifyConnectionChange(apiId, true);
|
||||||
|
|
||||||
|
ws.on("message", () => {
|
||||||
|
// Currently we don't need to handle agent->server messages
|
||||||
|
});
|
||||||
|
|
||||||
|
ws.on("close", () => {
|
||||||
|
const existing = apiIdToSocket.get(apiId);
|
||||||
|
if (existing === ws) {
|
||||||
|
apiIdToSocket.delete(apiId);
|
||||||
|
connectionMetadata.delete(apiId);
|
||||||
|
// Notify subscribers of disconnection
|
||||||
|
notifyConnectionChange(apiId, false);
|
||||||
|
}
|
||||||
|
console.log(
|
||||||
|
`[agent-ws] disconnected api_id=${apiId} total=${apiIdToSocket.size}`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Optional: greet/ack
|
||||||
|
safeSend(ws, JSON.stringify({ type: "connected" }));
|
||||||
|
});
|
||||||
|
} catch (_err) {
|
||||||
|
try {
|
||||||
|
socket.destroy();
|
||||||
|
} catch {
|
||||||
|
/* ignore */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function safeSend(ws, data) {
|
||||||
|
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||||
|
try {
|
||||||
|
ws.send(data);
|
||||||
|
} catch {
|
||||||
|
/* ignore */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function broadcastSettingsUpdate(newInterval) {
|
||||||
|
const payload = JSON.stringify({
|
||||||
|
type: "settings_update",
|
||||||
|
update_interval: newInterval,
|
||||||
|
});
|
||||||
|
for (const [, ws] of apiIdToSocket) {
|
||||||
|
safeSend(ws, payload);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushReportNow(apiId) {
|
||||||
|
const ws = apiIdToSocket.get(apiId);
|
||||||
|
safeSend(ws, JSON.stringify({ type: "report_now" }));
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushSettingsUpdate(apiId, newInterval) {
|
||||||
|
const ws = apiIdToSocket.get(apiId);
|
||||||
|
safeSend(
|
||||||
|
ws,
|
||||||
|
JSON.stringify({ type: "settings_update", update_interval: newInterval }),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushUpdateNotification(apiId, updateInfo) {
|
||||||
|
const ws = apiIdToSocket.get(apiId);
|
||||||
|
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||||
|
safeSend(
|
||||||
|
ws,
|
||||||
|
JSON.stringify({
|
||||||
|
type: "update_notification",
|
||||||
|
version: updateInfo.version,
|
||||||
|
force: updateInfo.force || false,
|
||||||
|
downloadUrl: updateInfo.downloadUrl,
|
||||||
|
message: updateInfo.message,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
console.log(
|
||||||
|
`📤 Pushed update notification to agent ${apiId}: version ${updateInfo.version}`,
|
||||||
|
);
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
`⚠️ Agent ${apiId} not connected, cannot push update notification`,
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function pushUpdateNotificationToAll(updateInfo) {
|
||||||
|
let notifiedCount = 0;
|
||||||
|
let failedCount = 0;
|
||||||
|
|
||||||
|
for (const [apiId, ws] of apiIdToSocket) {
|
||||||
|
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||||
|
try {
|
||||||
|
safeSend(
|
||||||
|
ws,
|
||||||
|
JSON.stringify({
|
||||||
|
type: "update_notification",
|
||||||
|
version: updateInfo.version,
|
||||||
|
force: updateInfo.force || false,
|
||||||
|
message: updateInfo.message,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
notifiedCount++;
|
||||||
|
console.log(
|
||||||
|
`📤 Pushed update notification to agent ${apiId}: version ${updateInfo.version}`,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
failedCount++;
|
||||||
|
console.error(`❌ Failed to notify agent ${apiId}:`, error.message);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
failedCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`📤 Update notification sent to ${notifiedCount} agents, ${failedCount} failed`,
|
||||||
|
);
|
||||||
|
return { notifiedCount, failedCount };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify all subscribers when connection status changes
|
||||||
|
function notifyConnectionChange(apiId, connected) {
|
||||||
|
const subscribers = connectionChangeSubscribers.get(apiId);
|
||||||
|
if (subscribers) {
|
||||||
|
for (const callback of subscribers) {
|
||||||
|
try {
|
||||||
|
callback(connected);
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`[agent-ws] error notifying subscriber:`, err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subscribe to connection status changes for a specific api_id
|
||||||
|
function subscribeToConnectionChanges(apiId, callback) {
|
||||||
|
if (!connectionChangeSubscribers.has(apiId)) {
|
||||||
|
connectionChangeSubscribers.set(apiId, new Set());
|
||||||
|
}
|
||||||
|
connectionChangeSubscribers.get(apiId).add(callback);
|
||||||
|
|
||||||
|
// Return unsubscribe function
|
||||||
|
return () => {
|
||||||
|
const subscribers = connectionChangeSubscribers.get(apiId);
|
||||||
|
if (subscribers) {
|
||||||
|
subscribers.delete(callback);
|
||||||
|
if (subscribers.size === 0) {
|
||||||
|
connectionChangeSubscribers.delete(apiId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
init,
|
||||||
|
broadcastSettingsUpdate,
|
||||||
|
pushReportNow,
|
||||||
|
pushSettingsUpdate,
|
||||||
|
pushUpdateNotification,
|
||||||
|
pushUpdateNotificationToAll,
|
||||||
|
// Expose read-only view of connected agents
|
||||||
|
getConnectedApiIds: () => Array.from(apiIdToSocket.keys()),
|
||||||
|
isConnected: (apiId) => {
|
||||||
|
const ws = apiIdToSocket.get(apiId);
|
||||||
|
return !!ws && ws.readyState === WebSocket.OPEN;
|
||||||
|
},
|
||||||
|
// Get connection info including protocol (ws/wss)
|
||||||
|
getConnectionInfo: (apiId) => {
|
||||||
|
const metadata = connectionMetadata.get(apiId);
|
||||||
|
if (!metadata) {
|
||||||
|
return { connected: false, secure: false };
|
||||||
|
}
|
||||||
|
const connected = metadata.ws.readyState === WebSocket.OPEN;
|
||||||
|
return { connected, secure: metadata.secure };
|
||||||
|
},
|
||||||
|
// Subscribe to connection status changes (for SSE)
|
||||||
|
subscribeToConnectionChanges,
|
||||||
|
};
|
||||||
153
backend/src/services/automation/githubUpdateCheck.js
Normal file
153
backend/src/services/automation/githubUpdateCheck.js
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
const { compareVersions, checkPublicRepo } = require("./shared/utils");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GitHub Update Check Automation
|
||||||
|
* Checks for new releases on GitHub using HTTPS API
|
||||||
|
*/
|
||||||
|
class GitHubUpdateCheck {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "github-update-check";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process GitHub update check job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🔍 Starting GitHub update check...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get settings
|
||||||
|
const settings = await prisma.settings.findFirst();
|
||||||
|
const DEFAULT_GITHUB_REPO = "https://github.com/PatchMon/PatchMon.git";
|
||||||
|
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
||||||
|
let owner, repo;
|
||||||
|
|
||||||
|
// Parse GitHub repository URL (supports both HTTPS and SSH formats)
|
||||||
|
if (repoUrl.includes("git@github.com:")) {
|
||||||
|
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
|
||||||
|
if (match) {
|
||||||
|
[, owner, repo] = match;
|
||||||
|
}
|
||||||
|
} else if (repoUrl.includes("github.com/")) {
|
||||||
|
const match = repoUrl.match(
|
||||||
|
/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/,
|
||||||
|
);
|
||||||
|
if (match) {
|
||||||
|
[, owner, repo] = match;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!owner || !repo) {
|
||||||
|
throw new Error("Could not parse GitHub repository URL");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always use HTTPS GitHub API (simpler and more reliable)
|
||||||
|
const latestVersion = await checkPublicRepo(owner, repo);
|
||||||
|
|
||||||
|
if (!latestVersion) {
|
||||||
|
throw new Error("Could not determine latest version");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read version from package.json
|
||||||
|
let currentVersion = "1.3.0"; // fallback
|
||||||
|
try {
|
||||||
|
const packageJson = require("../../../package.json");
|
||||||
|
if (packageJson?.version) {
|
||||||
|
currentVersion = packageJson.version;
|
||||||
|
}
|
||||||
|
} catch (packageError) {
|
||||||
|
console.warn(
|
||||||
|
"Could not read version from package.json:",
|
||||||
|
packageError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isUpdateAvailable =
|
||||||
|
compareVersions(latestVersion, currentVersion) > 0;
|
||||||
|
|
||||||
|
// Update settings with check results
|
||||||
|
await prisma.settings.update({
|
||||||
|
where: { id: settings.id },
|
||||||
|
data: {
|
||||||
|
last_update_check: new Date(),
|
||||||
|
update_available: isUpdateAvailable,
|
||||||
|
latest_version: latestVersion,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ GitHub update check completed in ${executionTime}ms - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
currentVersion,
|
||||||
|
latestVersion,
|
||||||
|
isUpdateAvailable,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ GitHub update check failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update last check time even on error
|
||||||
|
try {
|
||||||
|
const settings = await prisma.settings.findFirst();
|
||||||
|
if (settings) {
|
||||||
|
await prisma.settings.update({
|
||||||
|
where: { id: settings.id },
|
||||||
|
data: {
|
||||||
|
last_update_check: new Date(),
|
||||||
|
update_available: false,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (updateError) {
|
||||||
|
console.error(
|
||||||
|
"❌ Error updating last check time:",
|
||||||
|
updateError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring GitHub update check (daily at midnight)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"github-update-check",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 0 * * *" }, // Daily at midnight
|
||||||
|
jobId: "github-update-check-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ GitHub update check scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual GitHub update check
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"github-update-check-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual GitHub update check triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = GitHubUpdateCheck;
|
||||||
388
backend/src/services/automation/index.js
Normal file
388
backend/src/services/automation/index.js
Normal file
@@ -0,0 +1,388 @@
|
|||||||
|
const { Queue, Worker } = require("bullmq");
|
||||||
|
const { redis, redisConnection } = require("./shared/redis");
|
||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
const agentWs = require("../agentWs");
|
||||||
|
|
||||||
|
// Import automation classes
|
||||||
|
const GitHubUpdateCheck = require("./githubUpdateCheck");
|
||||||
|
const SessionCleanup = require("./sessionCleanup");
|
||||||
|
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
|
||||||
|
const OrphanedPackageCleanup = require("./orphanedPackageCleanup");
|
||||||
|
|
||||||
|
// Queue names
|
||||||
|
const QUEUE_NAMES = {
|
||||||
|
GITHUB_UPDATE_CHECK: "github-update-check",
|
||||||
|
SESSION_CLEANUP: "session-cleanup",
|
||||||
|
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
|
||||||
|
ORPHANED_PACKAGE_CLEANUP: "orphaned-package-cleanup",
|
||||||
|
AGENT_COMMANDS: "agent-commands",
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main Queue Manager
|
||||||
|
* Manages all BullMQ queues and workers
|
||||||
|
*/
|
||||||
|
class QueueManager {
|
||||||
|
constructor() {
|
||||||
|
this.queues = {};
|
||||||
|
this.workers = {};
|
||||||
|
this.automations = {};
|
||||||
|
this.isInitialized = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all queues, workers, and automations
|
||||||
|
*/
|
||||||
|
async initialize() {
|
||||||
|
try {
|
||||||
|
console.log("✅ Redis connection successful");
|
||||||
|
|
||||||
|
// Initialize queues
|
||||||
|
await this.initializeQueues();
|
||||||
|
|
||||||
|
// Initialize automation classes
|
||||||
|
await this.initializeAutomations();
|
||||||
|
|
||||||
|
// Initialize workers
|
||||||
|
await this.initializeWorkers();
|
||||||
|
|
||||||
|
// Setup event listeners
|
||||||
|
this.setupEventListeners();
|
||||||
|
|
||||||
|
this.isInitialized = true;
|
||||||
|
console.log("✅ Queue manager initialized successfully");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Failed to initialize queue manager:", error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all queues
|
||||||
|
*/
|
||||||
|
async initializeQueues() {
|
||||||
|
for (const [_key, queueName] of Object.entries(QUEUE_NAMES)) {
|
||||||
|
this.queues[queueName] = new Queue(queueName, {
|
||||||
|
connection: redisConnection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
removeOnComplete: 50, // Keep last 50 completed jobs
|
||||||
|
removeOnFail: 20, // Keep last 20 failed jobs
|
||||||
|
attempts: 3, // Retry failed jobs 3 times
|
||||||
|
backoff: {
|
||||||
|
type: "exponential",
|
||||||
|
delay: 2000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`✅ Queue '${queueName}' initialized`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize automation classes
|
||||||
|
*/
|
||||||
|
async initializeAutomations() {
|
||||||
|
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new GitHubUpdateCheck(
|
||||||
|
this,
|
||||||
|
);
|
||||||
|
this.automations[QUEUE_NAMES.SESSION_CLEANUP] = new SessionCleanup(this);
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] =
|
||||||
|
new OrphanedRepoCleanup(this);
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] =
|
||||||
|
new OrphanedPackageCleanup(this);
|
||||||
|
|
||||||
|
console.log("✅ All automation classes initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize all workers
|
||||||
|
*/
|
||||||
|
async initializeWorkers() {
|
||||||
|
// Optimized worker options to reduce Redis connections
|
||||||
|
const workerOptions = {
|
||||||
|
connection: redisConnection,
|
||||||
|
concurrency: 1, // Keep concurrency low to reduce connections
|
||||||
|
// Connection optimization
|
||||||
|
maxStalledCount: 1,
|
||||||
|
stalledInterval: 30000,
|
||||||
|
// Reduce connection churn
|
||||||
|
settings: {
|
||||||
|
stalledInterval: 30000,
|
||||||
|
maxStalledCount: 1,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// GitHub Update Check Worker
|
||||||
|
this.workers[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new Worker(
|
||||||
|
QUEUE_NAMES.GITHUB_UPDATE_CHECK,
|
||||||
|
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
|
||||||
|
),
|
||||||
|
workerOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Session Cleanup Worker
|
||||||
|
this.workers[QUEUE_NAMES.SESSION_CLEANUP] = new Worker(
|
||||||
|
QUEUE_NAMES.SESSION_CLEANUP,
|
||||||
|
this.automations[QUEUE_NAMES.SESSION_CLEANUP].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.SESSION_CLEANUP],
|
||||||
|
),
|
||||||
|
workerOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Orphaned Repo Cleanup Worker
|
||||||
|
this.workers[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] = new Worker(
|
||||||
|
QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
|
||||||
|
),
|
||||||
|
workerOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Orphaned Package Cleanup Worker
|
||||||
|
this.workers[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] = new Worker(
|
||||||
|
QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP,
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].process.bind(
|
||||||
|
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||||
|
),
|
||||||
|
workerOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Agent Commands Worker
|
||||||
|
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
|
||||||
|
QUEUE_NAMES.AGENT_COMMANDS,
|
||||||
|
async (job) => {
|
||||||
|
const { api_id, type } = job.data;
|
||||||
|
console.log(`Processing agent command: ${type} for ${api_id}`);
|
||||||
|
|
||||||
|
// Send command via WebSocket based on type
|
||||||
|
if (type === "report_now") {
|
||||||
|
agentWs.pushReportNow(api_id);
|
||||||
|
} else if (type === "settings_update") {
|
||||||
|
// For settings update, we need additional data
|
||||||
|
const { update_interval } = job.data;
|
||||||
|
agentWs.pushSettingsUpdate(api_id, update_interval);
|
||||||
|
} else {
|
||||||
|
console.error(`Unknown agent command type: ${type}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
workerOptions,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
"✅ All workers initialized with optimized connection settings",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup event listeners for all queues
|
||||||
|
*/
|
||||||
|
setupEventListeners() {
|
||||||
|
for (const queueName of Object.values(QUEUE_NAMES)) {
|
||||||
|
const queue = this.queues[queueName];
|
||||||
|
queue.on("error", (error) => {
|
||||||
|
console.error(`❌ Queue '${queueName}' experienced an error:`, error);
|
||||||
|
});
|
||||||
|
queue.on("failed", (job, err) => {
|
||||||
|
console.error(
|
||||||
|
`❌ Job '${job.id}' in queue '${queueName}' failed:`,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
queue.on("completed", (job) => {
|
||||||
|
console.log(`✅ Job '${job.id}' in queue '${queueName}' completed.`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
console.log("✅ Queue events initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule all recurring jobs
|
||||||
|
*/
|
||||||
|
async scheduleAllJobs() {
|
||||||
|
await this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
|
||||||
|
await this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].schedule();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manual job triggers
|
||||||
|
*/
|
||||||
|
async triggerGitHubUpdateCheck() {
|
||||||
|
return this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerSessionCleanup() {
|
||||||
|
return this.automations[QUEUE_NAMES.SESSION_CLEANUP].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerOrphanedRepoCleanup() {
|
||||||
|
return this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
async triggerOrphanedPackageCleanup() {
|
||||||
|
return this.automations[
|
||||||
|
QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP
|
||||||
|
].triggerManual();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get queue statistics
|
||||||
|
*/
|
||||||
|
async getQueueStats(queueName) {
|
||||||
|
const queue = this.queues[queueName];
|
||||||
|
if (!queue) {
|
||||||
|
throw new Error(`Queue ${queueName} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||||
|
queue.getWaiting(),
|
||||||
|
queue.getActive(),
|
||||||
|
queue.getCompleted(),
|
||||||
|
queue.getFailed(),
|
||||||
|
queue.getDelayed(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
waiting: waiting.length,
|
||||||
|
active: active.length,
|
||||||
|
completed: completed.length,
|
||||||
|
failed: failed.length,
|
||||||
|
delayed: delayed.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all queue statistics
|
||||||
|
*/
|
||||||
|
async getAllQueueStats() {
|
||||||
|
const stats = {};
|
||||||
|
for (const queueName of Object.values(QUEUE_NAMES)) {
|
||||||
|
stats[queueName] = await this.getQueueStats(queueName);
|
||||||
|
}
|
||||||
|
return stats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get recent jobs for a queue
|
||||||
|
*/
|
||||||
|
async getRecentJobs(queueName, limit = 10) {
|
||||||
|
const queue = this.queues[queueName];
|
||||||
|
if (!queue) {
|
||||||
|
throw new Error(`Queue ${queueName} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [completed, failed] = await Promise.all([
|
||||||
|
queue.getCompleted(0, limit - 1),
|
||||||
|
queue.getFailed(0, limit - 1),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return [...completed, ...failed]
|
||||||
|
.sort((a, b) => new Date(b.finishedOn) - new Date(a.finishedOn))
|
||||||
|
.slice(0, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get jobs for a specific host (by API ID)
|
||||||
|
*/
|
||||||
|
async getHostJobs(apiId, limit = 20) {
|
||||||
|
const queue = this.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||||
|
if (!queue) {
|
||||||
|
throw new Error(`Queue ${QUEUE_NAMES.AGENT_COMMANDS} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[getHostJobs] Looking for jobs with api_id: ${apiId}`);
|
||||||
|
|
||||||
|
// Get active queue status (waiting, active, delayed, failed)
|
||||||
|
const [waiting, active, delayed, failed] = await Promise.all([
|
||||||
|
queue.getWaiting(),
|
||||||
|
queue.getActive(),
|
||||||
|
queue.getDelayed(),
|
||||||
|
queue.getFailed(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Filter by API ID
|
||||||
|
const filterByApiId = (jobs) =>
|
||||||
|
jobs.filter((job) => job.data && job.data.api_id === apiId);
|
||||||
|
|
||||||
|
const waitingCount = filterByApiId(waiting).length;
|
||||||
|
const activeCount = filterByApiId(active).length;
|
||||||
|
const delayedCount = filterByApiId(delayed).length;
|
||||||
|
const failedCount = filterByApiId(failed).length;
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[getHostJobs] Queue status - Waiting: ${waitingCount}, Active: ${activeCount}, Delayed: ${delayedCount}, Failed: ${failedCount}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get job history from database (shows all attempts and status changes)
|
||||||
|
const jobHistory = await prisma.job_history.findMany({
|
||||||
|
where: {
|
||||||
|
api_id: apiId,
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
created_at: "desc",
|
||||||
|
},
|
||||||
|
take: limit,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`[getHostJobs] Found ${jobHistory.length} job history records for api_id: ${apiId}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
waiting: waitingCount,
|
||||||
|
active: activeCount,
|
||||||
|
delayed: delayedCount,
|
||||||
|
failed: failedCount,
|
||||||
|
jobHistory: jobHistory.map((job) => ({
|
||||||
|
id: job.id,
|
||||||
|
job_id: job.job_id,
|
||||||
|
job_name: job.job_name,
|
||||||
|
status: job.status,
|
||||||
|
attempt_number: job.attempt_number,
|
||||||
|
error_message: job.error_message,
|
||||||
|
output: job.output,
|
||||||
|
created_at: job.created_at,
|
||||||
|
updated_at: job.updated_at,
|
||||||
|
completed_at: job.completed_at,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Graceful shutdown
|
||||||
|
*/
|
||||||
|
async shutdown() {
|
||||||
|
console.log("🛑 Shutting down queue manager...");
|
||||||
|
|
||||||
|
for (const queueName of Object.keys(this.queues)) {
|
||||||
|
try {
|
||||||
|
await this.queues[queueName].close();
|
||||||
|
} catch (e) {
|
||||||
|
console.warn(
|
||||||
|
`⚠️ Failed to close queue '${queueName}':`,
|
||||||
|
e?.message || e,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (this.workers?.[queueName]) {
|
||||||
|
try {
|
||||||
|
await this.workers[queueName].close();
|
||||||
|
} catch (e) {
|
||||||
|
console.warn(
|
||||||
|
`⚠️ Failed to close worker for '${queueName}':`,
|
||||||
|
e?.message || e,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await redis.quit();
|
||||||
|
console.log("✅ Queue manager shutdown complete");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const queueManager = new QueueManager();
|
||||||
|
|
||||||
|
module.exports = { queueManager, QUEUE_NAMES };
|
||||||
116
backend/src/services/automation/orphanedPackageCleanup.js
Normal file
116
backend/src/services/automation/orphanedPackageCleanup.js
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Orphaned Package Cleanup Automation
|
||||||
|
* Removes packages with no associated hosts
|
||||||
|
*/
|
||||||
|
class OrphanedPackageCleanup {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "orphaned-package-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process orphaned package cleanup job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🧹 Starting orphaned package cleanup...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find packages with 0 hosts
|
||||||
|
const orphanedPackages = await prisma.packages.findMany({
|
||||||
|
where: {
|
||||||
|
host_packages: {
|
||||||
|
none: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
host_packages: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let deletedCount = 0;
|
||||||
|
const deletedPackages = [];
|
||||||
|
|
||||||
|
// Delete orphaned packages
|
||||||
|
for (const pkg of orphanedPackages) {
|
||||||
|
try {
|
||||||
|
await prisma.packages.delete({
|
||||||
|
where: { id: pkg.id },
|
||||||
|
});
|
||||||
|
deletedCount++;
|
||||||
|
deletedPackages.push({
|
||||||
|
id: pkg.id,
|
||||||
|
name: pkg.name,
|
||||||
|
description: pkg.description,
|
||||||
|
category: pkg.category,
|
||||||
|
latest_version: pkg.latest_version,
|
||||||
|
});
|
||||||
|
console.log(
|
||||||
|
`🗑️ Deleted orphaned package: ${pkg.name} (${pkg.latest_version})`,
|
||||||
|
);
|
||||||
|
} catch (deleteError) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to delete package ${pkg.id}:`,
|
||||||
|
deleteError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Orphaned package cleanup completed in ${executionTime}ms - Deleted ${deletedCount} packages`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
deletedCount,
|
||||||
|
deletedPackages,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Orphaned package cleanup failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring orphaned package cleanup (daily at 3 AM)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-package-cleanup",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 3 * * *" }, // Daily at 3 AM
|
||||||
|
jobId: "orphaned-package-cleanup-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Orphaned package cleanup scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual orphaned package cleanup
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-package-cleanup-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual orphaned package cleanup triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = OrphanedPackageCleanup;
|
||||||
114
backend/src/services/automation/orphanedRepoCleanup.js
Normal file
114
backend/src/services/automation/orphanedRepoCleanup.js
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Orphaned Repository Cleanup Automation
|
||||||
|
* Removes repositories with no associated hosts
|
||||||
|
*/
|
||||||
|
class OrphanedRepoCleanup {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "orphaned-repo-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process orphaned repository cleanup job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🧹 Starting orphaned repository cleanup...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find repositories with 0 hosts
|
||||||
|
const orphanedRepos = await prisma.repositories.findMany({
|
||||||
|
where: {
|
||||||
|
host_repositories: {
|
||||||
|
none: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
_count: {
|
||||||
|
select: {
|
||||||
|
host_repositories: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let deletedCount = 0;
|
||||||
|
const deletedRepos = [];
|
||||||
|
|
||||||
|
// Delete orphaned repositories
|
||||||
|
for (const repo of orphanedRepos) {
|
||||||
|
try {
|
||||||
|
await prisma.repositories.delete({
|
||||||
|
where: { id: repo.id },
|
||||||
|
});
|
||||||
|
deletedCount++;
|
||||||
|
deletedRepos.push({
|
||||||
|
id: repo.id,
|
||||||
|
name: repo.name,
|
||||||
|
url: repo.url,
|
||||||
|
});
|
||||||
|
console.log(
|
||||||
|
`🗑️ Deleted orphaned repository: ${repo.name} (${repo.url})`,
|
||||||
|
);
|
||||||
|
} catch (deleteError) {
|
||||||
|
console.error(
|
||||||
|
`❌ Failed to delete repository ${repo.id}:`,
|
||||||
|
deleteError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Orphaned repository cleanup completed in ${executionTime}ms - Deleted ${deletedCount} repositories`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
deletedCount,
|
||||||
|
deletedRepos,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Orphaned repository cleanup failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring orphaned repository cleanup (daily at 2 AM)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-repo-cleanup",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 2 * * *" }, // Daily at 2 AM
|
||||||
|
jobId: "orphaned-repo-cleanup-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Orphaned repository cleanup scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual orphaned repository cleanup
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"orphaned-repo-cleanup-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual orphaned repository cleanup triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = OrphanedRepoCleanup;
|
||||||
77
backend/src/services/automation/sessionCleanup.js
Normal file
77
backend/src/services/automation/sessionCleanup.js
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
const { prisma } = require("./shared/prisma");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session Cleanup Automation
|
||||||
|
* Cleans up expired user sessions
|
||||||
|
*/
|
||||||
|
class SessionCleanup {
|
||||||
|
constructor(queueManager) {
|
||||||
|
this.queueManager = queueManager;
|
||||||
|
this.queueName = "session-cleanup";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process session cleanup job
|
||||||
|
*/
|
||||||
|
async process(_job) {
|
||||||
|
const startTime = Date.now();
|
||||||
|
console.log("🧹 Starting session cleanup...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await prisma.user_sessions.deleteMany({
|
||||||
|
where: {
|
||||||
|
OR: [{ expires_at: { lt: new Date() } }, { is_revoked: true }],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.log(
|
||||||
|
`✅ Session cleanup completed in ${executionTime}ms - Cleaned up ${result.count} expired sessions`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
sessionsCleaned: result.count,
|
||||||
|
executionTime,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
const executionTime = Date.now() - startTime;
|
||||||
|
console.error(
|
||||||
|
`❌ Session cleanup failed after ${executionTime}ms:`,
|
||||||
|
error.message,
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule recurring session cleanup (every hour)
|
||||||
|
*/
|
||||||
|
async schedule() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"session-cleanup",
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
repeat: { cron: "0 * * * *" }, // Every hour
|
||||||
|
jobId: "session-cleanup-recurring",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
console.log("✅ Session cleanup scheduled");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger manual session cleanup
|
||||||
|
*/
|
||||||
|
async triggerManual() {
|
||||||
|
const job = await this.queueManager.queues[this.queueName].add(
|
||||||
|
"session-cleanup-manual",
|
||||||
|
{},
|
||||||
|
{ priority: 1 },
|
||||||
|
);
|
||||||
|
console.log("✅ Manual session cleanup triggered");
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SessionCleanup;
|
||||||
5
backend/src/services/automation/shared/prisma.js
Normal file
5
backend/src/services/automation/shared/prisma.js
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
const { getPrismaClient } = require("../../../config/prisma");
|
||||||
|
|
||||||
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
|
module.exports = { prisma };
|
||||||
56
backend/src/services/automation/shared/redis.js
Normal file
56
backend/src/services/automation/shared/redis.js
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
const IORedis = require("ioredis");
|
||||||
|
|
||||||
|
// Redis connection configuration with connection pooling
|
||||||
|
const redisConnection = {
|
||||||
|
host: process.env.REDIS_HOST || "localhost",
|
||||||
|
port: parseInt(process.env.REDIS_PORT, 10) || 6379,
|
||||||
|
password: process.env.REDIS_PASSWORD || undefined,
|
||||||
|
username: process.env.REDIS_USER || undefined,
|
||||||
|
db: parseInt(process.env.REDIS_DB, 10) || 0,
|
||||||
|
// Connection pooling settings
|
||||||
|
lazyConnect: true,
|
||||||
|
keepAlive: 30000,
|
||||||
|
connectTimeout: 30000, // Increased from 10s to 30s
|
||||||
|
commandTimeout: 30000, // Increased from 5s to 30s
|
||||||
|
enableReadyCheck: false,
|
||||||
|
// Reduce connection churn
|
||||||
|
family: 4, // Force IPv4
|
||||||
|
// Retry settings
|
||||||
|
retryDelayOnClusterDown: 300,
|
||||||
|
retryDelayOnFailover: 100,
|
||||||
|
maxRetriesPerRequest: null, // BullMQ requires this to be null
|
||||||
|
// Connection pool settings
|
||||||
|
maxLoadingTimeout: 30000,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create Redis connection with singleton pattern
|
||||||
|
let redisInstance = null;
|
||||||
|
|
||||||
|
function getRedisConnection() {
|
||||||
|
if (!redisInstance) {
|
||||||
|
redisInstance = new IORedis(redisConnection);
|
||||||
|
|
||||||
|
// Handle graceful shutdown
|
||||||
|
process.on("beforeExit", async () => {
|
||||||
|
await redisInstance.quit();
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on("SIGINT", async () => {
|
||||||
|
await redisInstance.quit();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on("SIGTERM", async () => {
|
||||||
|
await redisInstance.quit();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return redisInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
redis: getRedisConnection(),
|
||||||
|
redisConnection,
|
||||||
|
getRedisConnection,
|
||||||
|
};
|
||||||
82
backend/src/services/automation/shared/utils.js
Normal file
82
backend/src/services/automation/shared/utils.js
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
// Common utilities for automation jobs
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare two semantic versions
|
||||||
|
* @param {string} version1 - First version
|
||||||
|
* @param {string} version2 - Second version
|
||||||
|
* @returns {number} - 1 if version1 > version2, -1 if version1 < version2, 0 if equal
|
||||||
|
*/
|
||||||
|
function compareVersions(version1, version2) {
|
||||||
|
const v1parts = version1.split(".").map(Number);
|
||||||
|
const v2parts = version2.split(".").map(Number);
|
||||||
|
|
||||||
|
const maxLength = Math.max(v1parts.length, v2parts.length);
|
||||||
|
|
||||||
|
for (let i = 0; i < maxLength; i++) {
|
||||||
|
const v1part = v1parts[i] || 0;
|
||||||
|
const v2part = v2parts[i] || 0;
|
||||||
|
|
||||||
|
if (v1part > v2part) return 1;
|
||||||
|
if (v1part < v2part) return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check public GitHub repository for latest release
|
||||||
|
* @param {string} owner - Repository owner
|
||||||
|
* @param {string} repo - Repository name
|
||||||
|
* @returns {Promise<string|null>} - Latest version or null
|
||||||
|
*/
|
||||||
|
async function checkPublicRepo(owner, repo) {
|
||||||
|
try {
|
||||||
|
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
||||||
|
|
||||||
|
let currentVersion = "1.3.0"; // fallback
|
||||||
|
try {
|
||||||
|
const packageJson = require("../../../package.json");
|
||||||
|
if (packageJson?.version) {
|
||||||
|
currentVersion = packageJson.version;
|
||||||
|
}
|
||||||
|
} catch (packageError) {
|
||||||
|
console.warn(
|
||||||
|
"Could not read version from package.json for User-Agent, using fallback:",
|
||||||
|
packageError.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(httpsRepoUrl, {
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
Accept: "application/vnd.github.v3+json",
|
||||||
|
"User-Agent": `PatchMon-Server/${currentVersion}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
if (
|
||||||
|
errorText.includes("rate limit") ||
|
||||||
|
errorText.includes("API rate limit")
|
||||||
|
) {
|
||||||
|
console.log("⚠️ GitHub API rate limit exceeded, skipping update check");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw new Error(
|
||||||
|
`GitHub API error: ${response.status} ${response.statusText}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const releaseData = await response.json();
|
||||||
|
return releaseData.tag_name.replace("v", "");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("GitHub API error:", error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
compareVersions,
|
||||||
|
checkPublicRepo,
|
||||||
|
};
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
const { v4: uuidv4 } = require("uuid");
|
const { v4: uuidv4 } = require("uuid");
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
// Cached settings instance
|
// Cached settings instance
|
||||||
let cachedSettings = null;
|
let cachedSettings = null;
|
||||||
|
|||||||
@@ -1,295 +0,0 @@
|
|||||||
const { PrismaClient } = require("@prisma/client");
|
|
||||||
const { exec } = require("node:child_process");
|
|
||||||
const { promisify } = require("node:util");
|
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
class UpdateScheduler {
|
|
||||||
constructor() {
|
|
||||||
this.isRunning = false;
|
|
||||||
this.intervalId = null;
|
|
||||||
this.checkInterval = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start the scheduler
|
|
||||||
start() {
|
|
||||||
if (this.isRunning) {
|
|
||||||
console.log("Update scheduler is already running");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("🔄 Starting update scheduler...");
|
|
||||||
this.isRunning = true;
|
|
||||||
|
|
||||||
// Run initial check
|
|
||||||
this.checkForUpdates();
|
|
||||||
|
|
||||||
// Schedule regular checks
|
|
||||||
this.intervalId = setInterval(() => {
|
|
||||||
this.checkForUpdates();
|
|
||||||
}, this.checkInterval);
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
`✅ Update scheduler started - checking every ${this.checkInterval / (60 * 60 * 1000)} hours`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop the scheduler
|
|
||||||
stop() {
|
|
||||||
if (!this.isRunning) {
|
|
||||||
console.log("Update scheduler is not running");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("🛑 Stopping update scheduler...");
|
|
||||||
this.isRunning = false;
|
|
||||||
|
|
||||||
if (this.intervalId) {
|
|
||||||
clearInterval(this.intervalId);
|
|
||||||
this.intervalId = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("✅ Update scheduler stopped");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for updates
|
|
||||||
async checkForUpdates() {
|
|
||||||
try {
|
|
||||||
console.log("🔍 Checking for updates...");
|
|
||||||
|
|
||||||
// Get settings
|
|
||||||
const settings = await prisma.settings.findFirst();
|
|
||||||
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
|
|
||||||
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
|
||||||
let owner, repo;
|
|
||||||
|
|
||||||
if (repoUrl.includes("git@github.com:")) {
|
|
||||||
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
|
|
||||||
if (match) {
|
|
||||||
[, owner, repo] = match;
|
|
||||||
}
|
|
||||||
} else if (repoUrl.includes("github.com/")) {
|
|
||||||
const match = repoUrl.match(
|
|
||||||
/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/,
|
|
||||||
);
|
|
||||||
if (match) {
|
|
||||||
[, owner, repo] = match;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!owner || !repo) {
|
|
||||||
console.log(
|
|
||||||
"⚠️ Could not parse GitHub repository URL, skipping update check",
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let latestVersion;
|
|
||||||
const isPrivate = settings.repositoryType === "private";
|
|
||||||
|
|
||||||
if (isPrivate) {
|
|
||||||
// Use SSH for private repositories
|
|
||||||
latestVersion = await this.checkPrivateRepo(settings, owner, repo);
|
|
||||||
} else {
|
|
||||||
// Use GitHub API for public repositories
|
|
||||||
latestVersion = await this.checkPublicRepo(owner, repo);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!latestVersion) {
|
|
||||||
console.log(
|
|
||||||
"⚠️ Could not determine latest version, skipping update check",
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read version from package.json dynamically
|
|
||||||
let currentVersion = "1.2.7"; // fallback
|
|
||||||
try {
|
|
||||||
const packageJson = require("../../package.json");
|
|
||||||
if (packageJson?.version) {
|
|
||||||
currentVersion = packageJson.version;
|
|
||||||
}
|
|
||||||
} catch (packageError) {
|
|
||||||
console.warn(
|
|
||||||
"Could not read version from package.json, using fallback:",
|
|
||||||
packageError.message,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const isUpdateAvailable =
|
|
||||||
this.compareVersions(latestVersion, currentVersion) > 0;
|
|
||||||
|
|
||||||
// Update settings with check results
|
|
||||||
await prisma.settings.update({
|
|
||||||
where: { id: settings.id },
|
|
||||||
data: {
|
|
||||||
last_update_check: new Date(),
|
|
||||||
update_available: isUpdateAvailable,
|
|
||||||
latest_version: latestVersion,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
`✅ Update check completed - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`,
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
console.error("❌ Error checking for updates:", error.message);
|
|
||||||
|
|
||||||
// Update last check time even on error
|
|
||||||
try {
|
|
||||||
const settings = await prisma.settings.findFirst();
|
|
||||||
if (settings) {
|
|
||||||
await prisma.settings.update({
|
|
||||||
where: { id: settings.id },
|
|
||||||
data: {
|
|
||||||
last_update_check: new Date(),
|
|
||||||
update_available: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (updateError) {
|
|
||||||
console.error(
|
|
||||||
"❌ Error updating last check time:",
|
|
||||||
updateError.message,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check private repository using SSH
|
|
||||||
async checkPrivateRepo(settings, owner, repo) {
|
|
||||||
try {
|
|
||||||
let sshKeyPath = settings.sshKeyPath;
|
|
||||||
|
|
||||||
// Try to find SSH key if not configured
|
|
||||||
if (!sshKeyPath) {
|
|
||||||
const possibleKeyPaths = [
|
|
||||||
"/root/.ssh/id_ed25519",
|
|
||||||
"/root/.ssh/id_rsa",
|
|
||||||
"/home/patchmon/.ssh/id_ed25519",
|
|
||||||
"/home/patchmon/.ssh/id_rsa",
|
|
||||||
"/var/www/.ssh/id_ed25519",
|
|
||||||
"/var/www/.ssh/id_rsa",
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const path of possibleKeyPaths) {
|
|
||||||
try {
|
|
||||||
require("node:fs").accessSync(path);
|
|
||||||
sshKeyPath = path;
|
|
||||||
break;
|
|
||||||
} catch {
|
|
||||||
// Key not found at this path, try next
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!sshKeyPath) {
|
|
||||||
throw new Error("No SSH deploy key found");
|
|
||||||
}
|
|
||||||
|
|
||||||
const sshRepoUrl = `git@github.com:${owner}/${repo}.git`;
|
|
||||||
const env = {
|
|
||||||
...process.env,
|
|
||||||
GIT_SSH_COMMAND: `ssh -i ${sshKeyPath} -o StrictHostKeyChecking=no -o IdentitiesOnly=yes`,
|
|
||||||
};
|
|
||||||
|
|
||||||
const { stdout: sshLatestTag } = await execAsync(
|
|
||||||
`git ls-remote --tags --sort=-version:refname ${sshRepoUrl} | head -n 1 | sed 's/.*refs\\/tags\\///' | sed 's/\\^{}//'`,
|
|
||||||
{
|
|
||||||
timeout: 10000,
|
|
||||||
env: env,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
return sshLatestTag.trim().replace("v", "");
|
|
||||||
} catch (error) {
|
|
||||||
console.error("SSH Git error:", error.message);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check public repository using GitHub API
|
|
||||||
async checkPublicRepo(owner, repo) {
|
|
||||||
try {
|
|
||||||
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
|
||||||
|
|
||||||
// Get current version for User-Agent
|
|
||||||
let currentVersion = "1.2.7"; // fallback
|
|
||||||
try {
|
|
||||||
const packageJson = require("../../package.json");
|
|
||||||
if (packageJson?.version) {
|
|
||||||
currentVersion = packageJson.version;
|
|
||||||
}
|
|
||||||
} catch (packageError) {
|
|
||||||
console.warn(
|
|
||||||
"Could not read version from package.json for User-Agent, using fallback:",
|
|
||||||
packageError.message,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(httpsRepoUrl, {
|
|
||||||
method: "GET",
|
|
||||||
headers: {
|
|
||||||
Accept: "application/vnd.github.v3+json",
|
|
||||||
"User-Agent": `PatchMon-Server/${currentVersion}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text();
|
|
||||||
if (
|
|
||||||
errorText.includes("rate limit") ||
|
|
||||||
errorText.includes("API rate limit")
|
|
||||||
) {
|
|
||||||
console.log(
|
|
||||||
"⚠️ GitHub API rate limit exceeded, skipping update check",
|
|
||||||
);
|
|
||||||
return null; // Return null instead of throwing error
|
|
||||||
}
|
|
||||||
throw new Error(
|
|
||||||
`GitHub API error: ${response.status} ${response.statusText}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const releaseData = await response.json();
|
|
||||||
return releaseData.tag_name.replace("v", "");
|
|
||||||
} catch (error) {
|
|
||||||
console.error("GitHub API error:", error.message);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compare version strings (semantic versioning)
|
|
||||||
compareVersions(version1, version2) {
|
|
||||||
const v1parts = version1.split(".").map(Number);
|
|
||||||
const v2parts = version2.split(".").map(Number);
|
|
||||||
|
|
||||||
const maxLength = Math.max(v1parts.length, v2parts.length);
|
|
||||||
|
|
||||||
for (let i = 0; i < maxLength; i++) {
|
|
||||||
const v1part = v1parts[i] || 0;
|
|
||||||
const v2part = v2parts[i] || 0;
|
|
||||||
|
|
||||||
if (v1part > v2part) return 1;
|
|
||||||
if (v1part < v2part) return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get scheduler status
|
|
||||||
getStatus() {
|
|
||||||
return {
|
|
||||||
isRunning: this.isRunning,
|
|
||||||
checkInterval: this.checkInterval,
|
|
||||||
nextCheck: this.isRunning
|
|
||||||
? new Date(Date.now() + this.checkInterval)
|
|
||||||
: null,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create singleton instance
|
|
||||||
const updateScheduler = new UpdateScheduler();
|
|
||||||
|
|
||||||
module.exports = updateScheduler;
|
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
const jwt = require("jsonwebtoken");
|
const jwt = require("jsonwebtoken");
|
||||||
const crypto = require("node:crypto");
|
const crypto = require("node:crypto");
|
||||||
const { PrismaClient } = require("@prisma/client");
|
const { getPrismaClient } = require("../config/prisma");
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
const prisma = getPrismaClient();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Session Manager - Handles secure session management with inactivity timeout
|
* Session Manager - Handles secure session management with inactivity timeout
|
||||||
|
|||||||
@@ -2,9 +2,10 @@
|
|||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
PatchMon is a containerised application that monitors system patches and updates. The application consists of three main services:
|
PatchMon is a containerised application that monitors system patches and updates. The application consists of four main services:
|
||||||
|
|
||||||
- **Database**: PostgreSQL 17
|
- **Database**: PostgreSQL 17
|
||||||
|
- **Redis**: Redis 7 for BullMQ job queues and caching
|
||||||
- **Backend**: Node.js API server
|
- **Backend**: Node.js API server
|
||||||
- **Frontend**: React application served via NGINX
|
- **Frontend**: React application served via NGINX
|
||||||
|
|
||||||
@@ -38,21 +39,31 @@ These tags are available for both backend and frontend images as they are versio
|
|||||||
environment:
|
environment:
|
||||||
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
|
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
|
||||||
```
|
```
|
||||||
4. Generate a strong JWT secret. You can do this like so:
|
4. Set a Redis password in the Redis service command where it says:
|
||||||
|
```yaml
|
||||||
|
command: redis-server --requirepass your-redis-password-here
|
||||||
|
```
|
||||||
|
Note: The Redis service uses a hardcoded password in the command line for better reliability and to avoid environment variable parsing issues.
|
||||||
|
5. Update the corresponding `REDIS_PASSWORD` in the backend service where it says:
|
||||||
|
```yaml
|
||||||
|
environment:
|
||||||
|
REDIS_PASSWORD: your-redis-password-here
|
||||||
|
```
|
||||||
|
6. Generate a strong JWT secret. You can do this like so:
|
||||||
```bash
|
```bash
|
||||||
openssl rand -hex 64
|
openssl rand -hex 64
|
||||||
```
|
```
|
||||||
5. Set a JWT secret in the backend service where it says:
|
7. Set a JWT secret in the backend service where it says:
|
||||||
```yaml
|
```yaml
|
||||||
environment:
|
environment:
|
||||||
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE
|
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE
|
||||||
```
|
```
|
||||||
6. Configure environment variables (see [Configuration](#configuration) section)
|
8. Configure environment variables (see [Configuration](#configuration) section)
|
||||||
7. Start the application:
|
9. Start the application:
|
||||||
```bash
|
```bash
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
```
|
```
|
||||||
8. Access the application at `http://localhost:3000`
|
10. Access the application at `http://localhost:3000`
|
||||||
|
|
||||||
## Updating
|
## Updating
|
||||||
|
|
||||||
@@ -106,6 +117,15 @@ When you do this, updating to a new version requires manually updating the image
|
|||||||
| `POSTGRES_USER` | Database user | `patchmon_user` |
|
| `POSTGRES_USER` | Database user | `patchmon_user` |
|
||||||
| `POSTGRES_PASSWORD` | Database password | **MUST BE SET!** |
|
| `POSTGRES_PASSWORD` | Database password | **MUST BE SET!** |
|
||||||
|
|
||||||
|
#### Redis Service
|
||||||
|
|
||||||
|
| Variable | Description | Default |
|
||||||
|
| -------------- | ------------------ | ---------------- |
|
||||||
|
| `REDIS_PASSWORD` | Redis password | **MUST BE SET!** |
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> The Redis service uses a hardcoded password in the command line (`redis-server --requirepass your-password`) instead of environment variables or configuration files. This approach eliminates parsing issues and provides better reliability. The password must be set in both the Redis command and the backend service environment variables.
|
||||||
|
|
||||||
#### Backend Service
|
#### Backend Service
|
||||||
|
|
||||||
##### Database Configuration
|
##### Database Configuration
|
||||||
@@ -116,6 +136,15 @@ When you do this, updating to a new version requires manually updating the image
|
|||||||
| `PM_DB_CONN_MAX_ATTEMPTS` | Maximum database connection attempts | `30` |
|
| `PM_DB_CONN_MAX_ATTEMPTS` | Maximum database connection attempts | `30` |
|
||||||
| `PM_DB_CONN_WAIT_INTERVAL` | Wait interval between connection attempts in seconds | `2` |
|
| `PM_DB_CONN_WAIT_INTERVAL` | Wait interval between connection attempts in seconds | `2` |
|
||||||
|
|
||||||
|
##### Redis Configuration
|
||||||
|
|
||||||
|
| Variable | Description | Default |
|
||||||
|
| --------------- | ------------------------------ | ------- |
|
||||||
|
| `REDIS_HOST` | Redis server hostname | `redis` |
|
||||||
|
| `REDIS_PORT` | Redis server port | `6379` |
|
||||||
|
| `REDIS_PASSWORD` | Redis authentication password | **MUST BE UPDATED WITH YOUR REDIS_PASSWORD!** |
|
||||||
|
| `REDIS_DB` | Redis database number | `0` |
|
||||||
|
|
||||||
##### Authentication & Security
|
##### Authentication & Security
|
||||||
|
|
||||||
| Variable | Description | Default |
|
| Variable | Description | Default |
|
||||||
@@ -165,9 +194,10 @@ When you do this, updating to a new version requires manually updating the image
|
|||||||
|
|
||||||
### Volumes
|
### Volumes
|
||||||
|
|
||||||
The compose file creates two Docker volumes:
|
The compose file creates three Docker volumes:
|
||||||
|
|
||||||
* `postgres_data`: PostgreSQL's data directory.
|
* `postgres_data`: PostgreSQL's data directory.
|
||||||
|
* `redis_data`: Redis's data directory.
|
||||||
* `agent_files`: PatchMon's agent files.
|
* `agent_files`: PatchMon's agent files.
|
||||||
|
|
||||||
If you wish to bind either if their respective container paths to a host path rather than a Docker volume, you can do so in the Docker Compose file.
|
If you wish to bind either if their respective container paths to a host path rather than a Docker volume, you can do so in the Docker Compose file.
|
||||||
@@ -201,6 +231,7 @@ For development with live reload and source code mounting:
|
|||||||
- Frontend: `http://localhost:3000`
|
- Frontend: `http://localhost:3000`
|
||||||
- Backend API: `http://localhost:3001`
|
- Backend API: `http://localhost:3001`
|
||||||
- Database: `localhost:5432`
|
- Database: `localhost:5432`
|
||||||
|
- Redis: `localhost:6379`
|
||||||
|
|
||||||
## Development Docker Compose
|
## Development Docker Compose
|
||||||
|
|
||||||
@@ -254,6 +285,7 @@ docker compose -f docker/docker-compose.dev.yml up -d --build
|
|||||||
### Development Ports
|
### Development Ports
|
||||||
The development setup exposes additional ports for debugging:
|
The development setup exposes additional ports for debugging:
|
||||||
- **Database**: `5432` - Direct PostgreSQL access
|
- **Database**: `5432` - Direct PostgreSQL access
|
||||||
|
- **Redis**: `6379` - Direct Redis access
|
||||||
- **Backend**: `3001` - API server with development features
|
- **Backend**: `3001` - API server with development features
|
||||||
- **Frontend**: `3000` - React development server with hot reload
|
- **Frontend**: `3000` - React development server with hot reload
|
||||||
|
|
||||||
@@ -277,8 +309,8 @@ The development setup exposes additional ports for debugging:
|
|||||||
- **Prisma Schema Changes**: Backend service restarts automatically
|
- **Prisma Schema Changes**: Backend service restarts automatically
|
||||||
|
|
||||||
4. **Database Access**: Connect database client directly to `localhost:5432`
|
4. **Database Access**: Connect database client directly to `localhost:5432`
|
||||||
|
5. **Redis Access**: Connect Redis client directly to `localhost:6379`
|
||||||
5. **Debug**: If started with `docker compose [...] up -d` or `docker compose [...] watch`, check logs manually:
|
6. **Debug**: If started with `docker compose [...] up -d` or `docker compose [...] watch`, check logs manually:
|
||||||
```bash
|
```bash
|
||||||
docker compose -f docker/docker-compose.dev.yml logs -f
|
docker compose -f docker/docker-compose.dev.yml logs -f
|
||||||
```
|
```
|
||||||
@@ -288,6 +320,6 @@ The development setup exposes additional ports for debugging:
|
|||||||
|
|
||||||
- **Hot Reload**: Automatic code synchronization and service restarts
|
- **Hot Reload**: Automatic code synchronization and service restarts
|
||||||
- **Enhanced Logging**: Detailed logs for debugging
|
- **Enhanced Logging**: Detailed logs for debugging
|
||||||
- **Direct Access**: Exposed ports for database and API debugging
|
- **Direct Access**: Exposed ports for database, Redis, and API debugging
|
||||||
- **Health Checks**: Built-in health monitoring for services
|
- **Health Checks**: Built-in health monitoring for services
|
||||||
- **Volume Persistence**: Development data persists between restarts
|
- **Volume Persistence**: Development data persists between restarts
|
||||||
|
|||||||
@@ -1 +1,3 @@
|
|||||||
**/env.example
|
**/env.example
|
||||||
|
**/.env
|
||||||
|
**/.env.*
|
||||||
|
|||||||
@@ -8,19 +8,97 @@ log() {
|
|||||||
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" >&2
|
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" >&2
|
||||||
}
|
}
|
||||||
|
|
||||||
# Copy files from agents_backup to agents if agents directory is empty and no .sh files are present
|
# Function to extract version from agent script
|
||||||
if [ -d "/app/agents" ] && [ -z "$(find /app/agents -maxdepth 1 -type f -name '*.sh' | head -n 1)" ]; then
|
get_agent_version() {
|
||||||
if [ -d "/app/agents_backup" ]; then
|
local file="$1"
|
||||||
log "Agents directory is empty, copying from backup..."
|
if [ -f "$file" ]; then
|
||||||
cp -r /app/agents_backup/* /app/agents/
|
grep -m 1 '^AGENT_VERSION=' "$file" | cut -d'"' -f2 2>/dev/null || echo "0.0.0"
|
||||||
else
|
else
|
||||||
log "Warning: agents_backup directory not found"
|
echo "0.0.0"
|
||||||
fi
|
fi
|
||||||
else
|
}
|
||||||
log "Agents directory already contains files, skipping copy"
|
|
||||||
fi
|
|
||||||
|
|
||||||
log "Starting PatchMon Backend (${NODE_ENV:-production})..."
|
# Function to compare versions (returns 0 if $1 > $2)
|
||||||
|
version_greater() {
|
||||||
|
# Use sort -V for version comparison
|
||||||
|
test "$(printf '%s\n' "$1" "$2" | sort -V | tail -n1)" = "$1" && test "$1" != "$2"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check and update agent files if necessary
|
||||||
|
update_agents() {
|
||||||
|
local backup_agent="/app/agents_backup/patchmon-agent.sh"
|
||||||
|
local current_agent="/app/agents/patchmon-agent.sh"
|
||||||
|
|
||||||
|
# Check if agents directory exists
|
||||||
|
if [ ! -d "/app/agents" ]; then
|
||||||
|
log "ERROR: /app/agents directory not found"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if backup exists
|
||||||
|
if [ ! -d "/app/agents_backup" ]; then
|
||||||
|
log "WARNING: agents_backup directory not found, skipping agent update"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get versions
|
||||||
|
local backup_version=$(get_agent_version "$backup_agent")
|
||||||
|
local current_version=$(get_agent_version "$current_agent")
|
||||||
|
|
||||||
|
log "Agent version check:"
|
||||||
|
log " Image version: ${backup_version}"
|
||||||
|
log " Volume version: ${current_version}"
|
||||||
|
|
||||||
|
# Determine if update is needed
|
||||||
|
local needs_update=0
|
||||||
|
|
||||||
|
# Case 1: No agents in volume (first time setup)
|
||||||
|
if [ -z "$(find /app/agents -maxdepth 1 -type f -name '*.sh' 2>/dev/null | head -n 1)" ]; then
|
||||||
|
log "Agents directory is empty - performing initial copy"
|
||||||
|
needs_update=1
|
||||||
|
# Case 2: Backup version is newer
|
||||||
|
elif version_greater "$backup_version" "$current_version"; then
|
||||||
|
log "Newer agent version available (${backup_version} > ${current_version})"
|
||||||
|
needs_update=1
|
||||||
|
else
|
||||||
|
log "Agents are up to date"
|
||||||
|
needs_update=0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Perform update if needed
|
||||||
|
if [ $needs_update -eq 1 ]; then
|
||||||
|
log "Updating agents to version ${backup_version}..."
|
||||||
|
|
||||||
|
# Create backup of existing agents if they exist
|
||||||
|
if [ -f "$current_agent" ]; then
|
||||||
|
local backup_timestamp=$(date +%Y%m%d_%H%M%S)
|
||||||
|
local backup_name="/app/agents/patchmon-agent.sh.backup.${backup_timestamp}"
|
||||||
|
cp "$current_agent" "$backup_name" 2>/dev/null || true
|
||||||
|
log "Previous agent backed up to: $(basename $backup_name)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Copy new agents
|
||||||
|
cp -r /app/agents_backup/* /app/agents/
|
||||||
|
|
||||||
|
# Make agent binaries executable
|
||||||
|
chmod +x /app/agents/patchmon-agent-linux-* 2>/dev/null || true
|
||||||
|
|
||||||
|
# Verify update
|
||||||
|
local new_version=$(get_agent_version "$current_agent")
|
||||||
|
if [ "$new_version" = "$backup_version" ]; then
|
||||||
|
log "✅ Agents successfully updated to version ${new_version}"
|
||||||
|
else
|
||||||
|
log "⚠️ Warning: Agent update may have failed (expected: ${backup_version}, got: ${new_version})"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main execution
|
||||||
|
log "PatchMon Backend Container Starting..."
|
||||||
|
log "Environment: ${NODE_ENV:-production}"
|
||||||
|
|
||||||
|
# Update agents (version-aware)
|
||||||
|
update_agents
|
||||||
|
|
||||||
log "Running database migrations..."
|
log "Running database migrations..."
|
||||||
npx prisma migrate deploy
|
npx prisma migrate deploy
|
||||||
|
|||||||
@@ -18,6 +18,22 @@ services:
|
|||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 7
|
retries: 7
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
command: redis-server --requirepass 1NS3CU6E_DEV_R3DIS_PASSW0RD
|
||||||
|
environment:
|
||||||
|
REDIS_PASSWORD: 1NS3CU6E_DEV_R3DIS_PASSW0RD
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
|
volumes:
|
||||||
|
- ./compose_dev_data/redis:/data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "1NS3CU6E_DEV_R3DIS_PASSW0RD", "ping"]
|
||||||
|
interval: 3s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 7
|
||||||
|
|
||||||
backend:
|
backend:
|
||||||
build:
|
build:
|
||||||
context: ..
|
context: ..
|
||||||
@@ -34,6 +50,11 @@ services:
|
|||||||
SERVER_HOST: localhost
|
SERVER_HOST: localhost
|
||||||
SERVER_PORT: 3000
|
SERVER_PORT: 3000
|
||||||
CORS_ORIGIN: http://localhost:3000
|
CORS_ORIGIN: http://localhost:3000
|
||||||
|
# Redis Configuration
|
||||||
|
REDIS_HOST: redis
|
||||||
|
REDIS_PORT: 6379
|
||||||
|
REDIS_PASSWORD: 1NS3CU6E_DEV_R3DIS_PASSW0RD
|
||||||
|
REDIS_DB: 0
|
||||||
ports:
|
ports:
|
||||||
- "3001:3001"
|
- "3001:3001"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -41,6 +62,8 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
database:
|
database:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
develop:
|
develop:
|
||||||
watch:
|
watch:
|
||||||
- action: sync
|
- action: sync
|
||||||
|
|||||||
@@ -1,3 +1,19 @@
|
|||||||
|
# Change 3 Passwords in this file:
|
||||||
|
# Generate passwords with 'openssl rand -hex 64'
|
||||||
|
#
|
||||||
|
# 1. The database password in the environment variable POSTGRES_PASSWORD
|
||||||
|
# 2. The redis password in the command redis-server --requirepass your-redis-password-here
|
||||||
|
# 3. The jwt secret in the environment variable JWT_SECRET
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# Change 2 URL areas in this file:
|
||||||
|
# 1. Setup your CORS_ORIGIN to what url you will use for accessing PatchMon frontend url
|
||||||
|
# 2. Setup your SERVER_PROTOCOL, SERVER_HOST and SERVER_PORT to what you will use for linux agents to access PatchMon
|
||||||
|
#
|
||||||
|
# This is generally the same as your CORS_ORIGIN url , in some cases it might be different - SERVER_* variables are used in the scripts for Server connection.
|
||||||
|
# You can also change this in the front-end but in the case of docker-compose - it is overwritten by the variables set here.
|
||||||
|
|
||||||
|
|
||||||
name: patchmon
|
name: patchmon
|
||||||
|
|
||||||
services:
|
services:
|
||||||
@@ -7,7 +23,7 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
POSTGRES_DB: patchmon_db
|
POSTGRES_DB: patchmon_db
|
||||||
POSTGRES_USER: patchmon_user
|
POSTGRES_USER: patchmon_user
|
||||||
POSTGRES_PASSWORD: # CREATE A STRONG PASSWORD AND PUT IT HERE
|
POSTGRES_PASSWORD: # CREATE A STRONG DB PASSWORD AND PUT IT HERE
|
||||||
volumes:
|
volumes:
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
healthcheck:
|
healthcheck:
|
||||||
@@ -16,6 +32,18 @@ services:
|
|||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 7
|
retries: 7
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
command: redis-server --requirepass your-redis-password-here # CHANGE THIS TO YOUR REDIS PASSWORD
|
||||||
|
volumes:
|
||||||
|
- redis_data:/data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "your-redis-password-here", "ping"] # CHANGE THIS TO YOUR REDIS PASSWORD
|
||||||
|
interval: 3s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 7
|
||||||
|
|
||||||
backend:
|
backend:
|
||||||
image: ghcr.io/patchmon/patchmon-backend:latest
|
image: ghcr.io/patchmon/patchmon-backend:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@@ -23,16 +51,23 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
LOG_LEVEL: info
|
LOG_LEVEL: info
|
||||||
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
|
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
|
||||||
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE - Generate with 'openssl rand -hex 64'
|
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE
|
||||||
SERVER_PROTOCOL: http
|
SERVER_PROTOCOL: http
|
||||||
SERVER_HOST: localhost
|
SERVER_HOST: localhost
|
||||||
SERVER_PORT: 3000
|
SERVER_PORT: 3000
|
||||||
CORS_ORIGIN: http://localhost:3000
|
CORS_ORIGIN: http://localhost:3000
|
||||||
|
# Redis Configuration
|
||||||
|
REDIS_HOST: redis
|
||||||
|
REDIS_PORT: 6379
|
||||||
|
REDIS_PASSWORD: your-redis-password-here
|
||||||
|
REDIS_DB: 0
|
||||||
volumes:
|
volumes:
|
||||||
- agent_files:/app/agents
|
- agent_files:/app/agents
|
||||||
depends_on:
|
depends_on:
|
||||||
database:
|
database:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: ghcr.io/patchmon/patchmon-frontend:latest
|
image: ghcr.io/patchmon/patchmon-frontend:latest
|
||||||
@@ -45,4 +80,5 @@ services:
|
|||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
|
redis_data:
|
||||||
agent_files:
|
agent_files:
|
||||||
|
|||||||
@@ -24,6 +24,35 @@ server {
|
|||||||
add_header X-XSS-Protection "1; mode=block" always;
|
add_header X-XSS-Protection "1; mode=block" always;
|
||||||
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||||
|
|
||||||
|
# Bull Board proxy - must come before the root location to avoid conflicts
|
||||||
|
location /bullboard {
|
||||||
|
proxy_pass http://${BACKEND_HOST}:${BACKEND_PORT};
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host $host;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
proxy_connect_timeout 75s;
|
||||||
|
|
||||||
|
# Preserve original client IP through proxy chain
|
||||||
|
proxy_set_header X-Original-Forwarded-For $http_x_forwarded_for;
|
||||||
|
|
||||||
|
# CORS headers for Bull Board
|
||||||
|
add_header Access-Control-Allow-Origin * always;
|
||||||
|
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||||
|
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||||
|
|
||||||
|
# Handle preflight requests
|
||||||
|
if ($request_method = 'OPTIONS') {
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
# Handle client-side routing
|
# Handle client-side routing
|
||||||
location / {
|
location / {
|
||||||
try_files $uri $uri/ /index.html;
|
try_files $uri $uri/ /index.html;
|
||||||
@@ -38,10 +67,18 @@ server {
|
|||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header X-Forwarded-Host $host;
|
proxy_set_header X-Forwarded-Host $host;
|
||||||
|
|
||||||
|
# For the Websocket connection:
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
proxy_connect_timeout 75s;
|
||||||
|
|
||||||
# Preserve original client IP through proxy chain
|
# Preserve original client IP through proxy chain
|
||||||
proxy_set_header X-Original-Forwarded-For $http_x_forwarded_for;
|
proxy_set_header X-Original-Forwarded-For $http_x_forwarded_for;
|
||||||
|
|
||||||
# CORS headers for API calls
|
# CORS headers for API calls - even though backend is doing it
|
||||||
add_header Access-Control-Allow-Origin * always;
|
add_header Access-Control-Allow-Origin * always;
|
||||||
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||||
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||||
@@ -52,8 +89,9 @@ server {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# Static assets caching
|
|
||||||
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
|
# Static assets caching (exclude Bull Board assets)
|
||||||
|
location ~* ^/(?!bullboard).*\.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
|
||||||
expires 1y;
|
expires 1y;
|
||||||
add_header Cache-Control "public, immutable";
|
add_header Cache-Control "public, immutable";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon-frontend",
|
"name": "patchmon-frontend",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "1.2.7",
|
"version": "1.3.0",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|||||||
23
frontend/public/assets/bull-board-logo.svg
Normal file
23
frontend/public/assets/bull-board-logo.svg
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36">
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="18" />
|
||||||
|
<circle fill="#FFF" cx="18" cy="18" r="13.5" />
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="10" />
|
||||||
|
<circle fill="#FFF" cx="18" cy="18" r="6" />
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="3" />
|
||||||
|
<path
|
||||||
|
opacity=".2"
|
||||||
|
d="M18.24 18.282l13.144 11.754s-2.647 3.376-7.89 5.109L17.579 18.42l.661-.138z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#FFAC33"
|
||||||
|
d="M18.294 19a.994.994 0 01-.704-1.699l.563-.563a.995.995 0 011.408 1.407l-.564.563a.987.987 0 01-.703.292z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#55ACEE"
|
||||||
|
d="M24.016 6.981c-.403 2.079 0 4.691 0 4.691l7.054-7.388c.291-1.454-.528-3.932-1.718-4.238-1.19-.306-4.079.803-5.336 6.935zm5.003 5.003c-2.079.403-4.691 0-4.691 0l7.388-7.054c1.454-.291 3.932.528 4.238 1.718.306 1.19-.803 4.079-6.935 5.336z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#3A87C2"
|
||||||
|
d="M32.798 4.485L21.176 17.587c-.362.362-1.673.882-2.51.046-.836-.836-.419-2.08-.057-2.443L31.815 3.501s.676-.635 1.159-.152-.176 1.136-.176 1.136z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 1.1 KiB |
@@ -18,9 +18,15 @@ const Login = lazy(() => import("./pages/Login"));
|
|||||||
const PackageDetail = lazy(() => import("./pages/PackageDetail"));
|
const PackageDetail = lazy(() => import("./pages/PackageDetail"));
|
||||||
const Packages = lazy(() => import("./pages/Packages"));
|
const Packages = lazy(() => import("./pages/Packages"));
|
||||||
const Profile = lazy(() => import("./pages/Profile"));
|
const Profile = lazy(() => import("./pages/Profile"));
|
||||||
const Queue = lazy(() => import("./pages/Queue"));
|
const Automation = lazy(() => import("./pages/Automation"));
|
||||||
const Repositories = lazy(() => import("./pages/Repositories"));
|
const Repositories = lazy(() => import("./pages/Repositories"));
|
||||||
const RepositoryDetail = lazy(() => import("./pages/RepositoryDetail"));
|
const RepositoryDetail = lazy(() => import("./pages/RepositoryDetail"));
|
||||||
|
const Docker = lazy(() => import("./pages/Docker"));
|
||||||
|
const DockerContainerDetail = lazy(
|
||||||
|
() => import("./pages/docker/ContainerDetail"),
|
||||||
|
);
|
||||||
|
const DockerImageDetail = lazy(() => import("./pages/docker/ImageDetail"));
|
||||||
|
const DockerHostDetail = lazy(() => import("./pages/docker/HostDetail"));
|
||||||
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
|
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
|
||||||
const Integrations = lazy(() => import("./pages/settings/Integrations"));
|
const Integrations = lazy(() => import("./pages/settings/Integrations"));
|
||||||
const Notifications = lazy(() => import("./pages/settings/Notifications"));
|
const Notifications = lazy(() => import("./pages/settings/Notifications"));
|
||||||
@@ -137,11 +143,51 @@ function AppRoutes() {
|
|||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
<Route
|
<Route
|
||||||
path="/queue"
|
path="/automation"
|
||||||
element={
|
element={
|
||||||
<ProtectedRoute requirePermission="can_view_hosts">
|
<ProtectedRoute requirePermission="can_view_hosts">
|
||||||
<Layout>
|
<Layout>
|
||||||
<Queue />
|
<Automation />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<Docker />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/containers/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerContainerDetail />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/images/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerImageDetail />
|
||||||
|
</Layout>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/docker/hosts/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute requirePermission="can_view_reports">
|
||||||
|
<Layout>
|
||||||
|
<DockerHostDetail />
|
||||||
</Layout>
|
</Layout>
|
||||||
</ProtectedRoute>
|
</ProtectedRoute>
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { AlertCircle, CheckCircle, Shield, UserPlus } from "lucide-react";
|
|||||||
import { useId, useState } from "react";
|
import { useId, useState } from "react";
|
||||||
import { useNavigate } from "react-router-dom";
|
import { useNavigate } from "react-router-dom";
|
||||||
import { useAuth } from "../contexts/AuthContext";
|
import { useAuth } from "../contexts/AuthContext";
|
||||||
|
import { isCorsError } from "../utils/api";
|
||||||
|
|
||||||
const FirstTimeAdminSetup = () => {
|
const FirstTimeAdminSetup = () => {
|
||||||
const { login, setAuthState } = useAuth();
|
const { login, setAuthState } = useAuth();
|
||||||
@@ -121,11 +122,39 @@ const FirstTimeAdminSetup = () => {
|
|||||||
}, 2000);
|
}, 2000);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
setError(data.error || "Failed to create admin user");
|
// Handle HTTP error responses (like 500 CORS errors)
|
||||||
|
console.log("HTTP error response:", response.status, data);
|
||||||
|
|
||||||
|
// Check if this is a CORS error based on the response data
|
||||||
|
if (
|
||||||
|
data.message?.includes("Not allowed by CORS") ||
|
||||||
|
data.message?.includes("CORS") ||
|
||||||
|
data.error?.includes("CORS")
|
||||||
|
) {
|
||||||
|
setError(
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
setError(data.error || "Failed to create admin user");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Setup error:", error);
|
console.error("Setup error:", error);
|
||||||
setError("Network error. Please try again.");
|
// Check for CORS/network errors first
|
||||||
|
if (isCorsError(error)) {
|
||||||
|
setError(
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
);
|
||||||
|
} else if (
|
||||||
|
error.name === "TypeError" &&
|
||||||
|
error.message?.includes("Failed to fetch")
|
||||||
|
) {
|
||||||
|
setError(
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
setError("Network error. Please try again.");
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
|
|||||||
283
frontend/src/components/InlineMultiGroupEdit.jsx
Normal file
283
frontend/src/components/InlineMultiGroupEdit.jsx
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
import { Check, ChevronDown, Edit2, X } from "lucide-react";
|
||||||
|
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||||
|
|
||||||
|
const InlineMultiGroupEdit = ({
|
||||||
|
value = [], // Array of group IDs
|
||||||
|
onSave,
|
||||||
|
onCancel,
|
||||||
|
options = [],
|
||||||
|
className = "",
|
||||||
|
disabled = false,
|
||||||
|
}) => {
|
||||||
|
const [isEditing, setIsEditing] = useState(false);
|
||||||
|
const [selectedValues, setSelectedValues] = useState(value);
|
||||||
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
const [error, setError] = useState("");
|
||||||
|
const [isOpen, setIsOpen] = useState(false);
|
||||||
|
const [dropdownPosition, setDropdownPosition] = useState({
|
||||||
|
top: 0,
|
||||||
|
left: 0,
|
||||||
|
width: 0,
|
||||||
|
});
|
||||||
|
const dropdownRef = useRef(null);
|
||||||
|
const buttonRef = useRef(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (isEditing && dropdownRef.current) {
|
||||||
|
dropdownRef.current.focus();
|
||||||
|
}
|
||||||
|
}, [isEditing]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setSelectedValues(value);
|
||||||
|
// Force re-render when value changes
|
||||||
|
if (!isEditing) {
|
||||||
|
setIsOpen(false);
|
||||||
|
}
|
||||||
|
}, [value, isEditing]);
|
||||||
|
|
||||||
|
// Calculate dropdown position
|
||||||
|
const calculateDropdownPosition = useCallback(() => {
|
||||||
|
if (buttonRef.current) {
|
||||||
|
const rect = buttonRef.current.getBoundingClientRect();
|
||||||
|
setDropdownPosition({
|
||||||
|
top: rect.bottom + window.scrollY + 4,
|
||||||
|
left: rect.left + window.scrollX,
|
||||||
|
width: rect.width,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Close dropdown when clicking outside
|
||||||
|
useEffect(() => {
|
||||||
|
const handleClickOutside = (event) => {
|
||||||
|
if (dropdownRef.current && !dropdownRef.current.contains(event.target)) {
|
||||||
|
setIsOpen(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isOpen) {
|
||||||
|
calculateDropdownPosition();
|
||||||
|
document.addEventListener("mousedown", handleClickOutside);
|
||||||
|
window.addEventListener("resize", calculateDropdownPosition);
|
||||||
|
window.addEventListener("scroll", calculateDropdownPosition);
|
||||||
|
return () => {
|
||||||
|
document.removeEventListener("mousedown", handleClickOutside);
|
||||||
|
window.removeEventListener("resize", calculateDropdownPosition);
|
||||||
|
window.removeEventListener("scroll", calculateDropdownPosition);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}, [isOpen, calculateDropdownPosition]);
|
||||||
|
|
||||||
|
const handleEdit = () => {
|
||||||
|
if (disabled) return;
|
||||||
|
setIsEditing(true);
|
||||||
|
setSelectedValues(value);
|
||||||
|
setError("");
|
||||||
|
// Automatically open dropdown when editing starts
|
||||||
|
setTimeout(() => {
|
||||||
|
setIsOpen(true);
|
||||||
|
}, 0);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleCancel = () => {
|
||||||
|
setIsEditing(false);
|
||||||
|
setSelectedValues(value);
|
||||||
|
setError("");
|
||||||
|
setIsOpen(false);
|
||||||
|
if (onCancel) onCancel();
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSave = async () => {
|
||||||
|
if (disabled || isLoading) return;
|
||||||
|
|
||||||
|
// Check if values actually changed
|
||||||
|
const sortedCurrent = [...value].sort();
|
||||||
|
const sortedSelected = [...selectedValues].sort();
|
||||||
|
if (JSON.stringify(sortedCurrent) === JSON.stringify(sortedSelected)) {
|
||||||
|
setIsEditing(false);
|
||||||
|
setIsOpen(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsLoading(true);
|
||||||
|
setError("");
|
||||||
|
|
||||||
|
try {
|
||||||
|
await onSave(selectedValues);
|
||||||
|
setIsEditing(false);
|
||||||
|
setIsOpen(false);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err.message || "Failed to save");
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleKeyDown = (e) => {
|
||||||
|
if (e.key === "Enter") {
|
||||||
|
e.preventDefault();
|
||||||
|
handleSave();
|
||||||
|
} else if (e.key === "Escape") {
|
||||||
|
e.preventDefault();
|
||||||
|
handleCancel();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleGroup = (groupId) => {
|
||||||
|
setSelectedValues((prev) => {
|
||||||
|
if (prev.includes(groupId)) {
|
||||||
|
return prev.filter((id) => id !== groupId);
|
||||||
|
} else {
|
||||||
|
return [...prev, groupId];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const _displayValue = useMemo(() => {
|
||||||
|
if (!value || value.length === 0) {
|
||||||
|
return "Ungrouped";
|
||||||
|
}
|
||||||
|
if (value.length === 1) {
|
||||||
|
const option = options.find((opt) => opt.id === value[0]);
|
||||||
|
return option ? option.name : "Unknown Group";
|
||||||
|
}
|
||||||
|
return `${value.length} groups`;
|
||||||
|
}, [value, options]);
|
||||||
|
|
||||||
|
const displayGroups = useMemo(() => {
|
||||||
|
if (!value || value.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
.map((groupId) => options.find((opt) => opt.id === groupId))
|
||||||
|
.filter(Boolean);
|
||||||
|
}, [value, options]);
|
||||||
|
|
||||||
|
if (isEditing) {
|
||||||
|
return (
|
||||||
|
<div className={`relative ${className}`} ref={dropdownRef}>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className="relative flex-1">
|
||||||
|
<button
|
||||||
|
ref={buttonRef}
|
||||||
|
type="button"
|
||||||
|
onClick={() => setIsOpen(!isOpen)}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
|
disabled={isLoading}
|
||||||
|
className={`w-full px-3 py-1 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent flex items-center justify-between ${
|
||||||
|
error ? "border-red-500" : ""
|
||||||
|
} ${isLoading ? "opacity-50" : ""}`}
|
||||||
|
>
|
||||||
|
<span className="truncate">
|
||||||
|
{selectedValues.length === 0
|
||||||
|
? "Ungrouped"
|
||||||
|
: selectedValues.length === 1
|
||||||
|
? options.find((opt) => opt.id === selectedValues[0])
|
||||||
|
?.name || "Unknown Group"
|
||||||
|
: `${selectedValues.length} groups selected`}
|
||||||
|
</span>
|
||||||
|
<ChevronDown className="h-4 w-4 flex-shrink-0" />
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{isOpen && (
|
||||||
|
<div
|
||||||
|
className="fixed z-50 bg-white dark:bg-secondary-800 border border-secondary-300 dark:border-secondary-600 rounded-md shadow-lg max-h-60 overflow-auto"
|
||||||
|
style={{
|
||||||
|
top: `${dropdownPosition.top}px`,
|
||||||
|
left: `${dropdownPosition.left}px`,
|
||||||
|
width: `${dropdownPosition.width}px`,
|
||||||
|
minWidth: "200px",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<div className="py-1">
|
||||||
|
{options.map((option) => (
|
||||||
|
<label
|
||||||
|
key={option.id}
|
||||||
|
className="w-full px-3 py-2 text-left text-sm hover:bg-secondary-100 dark:hover:bg-secondary-700 flex items-center cursor-pointer"
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={selectedValues.includes(option.id)}
|
||||||
|
onChange={() => toggleGroup(option.id)}
|
||||||
|
className="mr-2 h-4 w-4 text-primary-600 focus:ring-primary-500 border-secondary-300 rounded"
|
||||||
|
/>
|
||||||
|
<span
|
||||||
|
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium text-white"
|
||||||
|
style={{ backgroundColor: option.color }}
|
||||||
|
>
|
||||||
|
{option.name}
|
||||||
|
</span>
|
||||||
|
</label>
|
||||||
|
))}
|
||||||
|
{options.length === 0 && (
|
||||||
|
<div className="px-3 py-2 text-sm text-secondary-500 dark:text-secondary-400">
|
||||||
|
No groups available
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={handleSave}
|
||||||
|
disabled={isLoading}
|
||||||
|
className="p-1 text-green-600 hover:text-green-700 hover:bg-green-50 dark:hover:bg-green-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||||
|
title="Save"
|
||||||
|
>
|
||||||
|
<Check className="h-4 w-4" />
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={handleCancel}
|
||||||
|
disabled={isLoading}
|
||||||
|
className="p-1 text-red-600 hover:text-red-700 hover:bg-red-50 dark:hover:bg-red-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||||
|
title="Cancel"
|
||||||
|
>
|
||||||
|
<X className="h-4 w-4" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{error && (
|
||||||
|
<span className="text-xs text-red-600 dark:text-red-400 mt-1 block">
|
||||||
|
{error}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={`flex items-center gap-1 group ${className}`}>
|
||||||
|
{displayGroups.length === 0 ? (
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800">
|
||||||
|
Ungrouped
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<div className="flex items-center gap-1 flex-wrap">
|
||||||
|
{displayGroups.map((group) => (
|
||||||
|
<span
|
||||||
|
key={group.id}
|
||||||
|
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium text-white"
|
||||||
|
style={{ backgroundColor: group.color }}
|
||||||
|
>
|
||||||
|
{group.name}
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{!disabled && (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={handleEdit}
|
||||||
|
className="p-1 text-secondary-400 hover:text-secondary-600 dark:hover:text-secondary-300 hover:bg-secondary-100 dark:hover:bg-secondary-700 rounded transition-colors opacity-0 group-hover:opacity-100"
|
||||||
|
title="Edit groups"
|
||||||
|
>
|
||||||
|
<Edit2 className="h-3 w-3" />
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default InlineMultiGroupEdit;
|
||||||
@@ -11,7 +11,6 @@ import {
|
|||||||
Github,
|
Github,
|
||||||
Globe,
|
Globe,
|
||||||
Home,
|
Home,
|
||||||
List,
|
|
||||||
LogOut,
|
LogOut,
|
||||||
Mail,
|
Mail,
|
||||||
Menu,
|
Menu,
|
||||||
@@ -113,18 +112,26 @@ const Layout = ({ children }) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add Automation item (available to all users with inventory access)
|
||||||
|
inventoryItems.push({
|
||||||
|
name: "Automation",
|
||||||
|
href: "/automation",
|
||||||
|
icon: RefreshCw,
|
||||||
|
new: true,
|
||||||
|
});
|
||||||
|
|
||||||
if (canViewReports()) {
|
if (canViewReports()) {
|
||||||
inventoryItems.push(
|
inventoryItems.push(
|
||||||
{
|
|
||||||
name: "Services",
|
|
||||||
href: "/services",
|
|
||||||
icon: Activity,
|
|
||||||
comingSoon: true,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "Docker",
|
name: "Docker",
|
||||||
href: "/docker",
|
href: "/docker",
|
||||||
icon: Container,
|
icon: Container,
|
||||||
|
beta: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Services",
|
||||||
|
href: "/services",
|
||||||
|
icon: Activity,
|
||||||
comingSoon: true,
|
comingSoon: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -136,21 +143,13 @@ const Layout = ({ children }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add Pro-Action and Queue items (available to all users with inventory access)
|
// Add Pro-Action item (available to all users with inventory access)
|
||||||
inventoryItems.push(
|
inventoryItems.push({
|
||||||
{
|
name: "Pro-Action",
|
||||||
name: "Pro-Action",
|
href: "/pro-action",
|
||||||
href: "/pro-action",
|
icon: Zap,
|
||||||
icon: Zap,
|
comingSoon: true,
|
||||||
comingSoon: true,
|
});
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Queue",
|
|
||||||
href: "/queue",
|
|
||||||
icon: List,
|
|
||||||
comingSoon: true,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
if (inventoryItems.length > 0) {
|
if (inventoryItems.length > 0) {
|
||||||
nav.push({
|
nav.push({
|
||||||
@@ -210,7 +209,7 @@ const Layout = ({ children }) => {
|
|||||||
if (path === "/services") return "Services";
|
if (path === "/services") return "Services";
|
||||||
if (path === "/docker") return "Docker";
|
if (path === "/docker") return "Docker";
|
||||||
if (path === "/pro-action") return "Pro-Action";
|
if (path === "/pro-action") return "Pro-Action";
|
||||||
if (path === "/queue") return "Queue";
|
if (path === "/automation") return "Automation";
|
||||||
if (path === "/users") return "Users";
|
if (path === "/users") return "Users";
|
||||||
if (path === "/permissions") return "Permissions";
|
if (path === "/permissions") return "Permissions";
|
||||||
if (path === "/settings") return "Settings";
|
if (path === "/settings") return "Settings";
|
||||||
@@ -436,6 +435,16 @@ const Layout = ({ children }) => {
|
|||||||
Soon
|
Soon
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
|
{subItem.beta && (
|
||||||
|
<span className="text-xs bg-blue-100 dark:bg-blue-900 text-blue-600 dark:text-blue-200 px-1.5 py-0.5 rounded font-medium">
|
||||||
|
Beta
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{subItem.new && (
|
||||||
|
<span className="text-xs bg-green-100 dark:bg-green-900 text-green-600 dark:text-green-200 px-1.5 py-0.5 rounded font-medium">
|
||||||
|
New
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
</span>
|
</span>
|
||||||
</Link>
|
</Link>
|
||||||
)}
|
)}
|
||||||
@@ -707,6 +716,16 @@ const Layout = ({ children }) => {
|
|||||||
Soon
|
Soon
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
|
{subItem.beta && (
|
||||||
|
<span className="text-xs bg-blue-100 dark:bg-blue-900 text-blue-600 dark:text-blue-200 px-1.5 py-0.5 rounded font-medium">
|
||||||
|
Beta
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{subItem.new && (
|
||||||
|
<span className="text-xs bg-green-100 dark:bg-green-900 text-green-600 dark:text-green-200 px-1.5 py-0.5 rounded font-medium">
|
||||||
|
New
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
{subItem.showUpgradeIcon && (
|
{subItem.showUpgradeIcon && (
|
||||||
<UpgradeNotificationIcon className="h-3 w-3" />
|
<UpgradeNotificationIcon className="h-3 w-3" />
|
||||||
)}
|
)}
|
||||||
@@ -929,11 +948,17 @@ const Layout = ({ children }) => {
|
|||||||
<div className="h-6 w-px bg-secondary-200 dark:bg-secondary-600 lg:hidden" />
|
<div className="h-6 w-px bg-secondary-200 dark:bg-secondary-600 lg:hidden" />
|
||||||
|
|
||||||
<div className="flex flex-1 gap-x-4 self-stretch lg:gap-x-6">
|
<div className="flex flex-1 gap-x-4 self-stretch lg:gap-x-6">
|
||||||
{/* Page title - hidden on dashboard, hosts, repositories, packages, and host details to give more space to search */}
|
{/* Page title - hidden on dashboard, hosts, repositories, packages, automation, docker, and host details to give more space to search */}
|
||||||
{!["/", "/hosts", "/repositories", "/packages"].includes(
|
{![
|
||||||
location.pathname,
|
"/",
|
||||||
) &&
|
"/hosts",
|
||||||
!location.pathname.startsWith("/hosts/") && (
|
"/repositories",
|
||||||
|
"/packages",
|
||||||
|
"/automation",
|
||||||
|
"/docker",
|
||||||
|
].includes(location.pathname) &&
|
||||||
|
!location.pathname.startsWith("/hosts/") &&
|
||||||
|
!location.pathname.startsWith("/docker/") && (
|
||||||
<div className="relative flex items-center">
|
<div className="relative flex items-center">
|
||||||
<h2 className="text-lg font-semibold text-secondary-900 dark:text-secondary-100 whitespace-nowrap">
|
<h2 className="text-lg font-semibold text-secondary-900 dark:text-secondary-100 whitespace-nowrap">
|
||||||
{getPageTitle()}
|
{getPageTitle()}
|
||||||
@@ -943,7 +968,7 @@ const Layout = ({ children }) => {
|
|||||||
|
|
||||||
{/* Global Search Bar */}
|
{/* Global Search Bar */}
|
||||||
<div
|
<div
|
||||||
className={`flex items-center ${["/", "/hosts", "/repositories", "/packages"].includes(location.pathname) || location.pathname.startsWith("/hosts/") ? "flex-1 max-w-none" : "max-w-sm"}`}
|
className={`flex items-center ${["/", "/hosts", "/repositories", "/packages", "/automation", "/docker"].includes(location.pathname) || location.pathname.startsWith("/hosts/") || location.pathname.startsWith("/docker/") ? "flex-1 max-w-none" : "max-w-sm"}`}
|
||||||
>
|
>
|
||||||
<GlobalSearch />
|
<GlobalSearch />
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,376 +1,282 @@
|
|||||||
import { useMutation, useQuery } from "@tanstack/react-query";
|
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
|
||||||
import { AlertCircle, Code, Download, Plus, Shield, X } from "lucide-react";
|
import { AlertCircle, CheckCircle, Clock, RefreshCw } from "lucide-react";
|
||||||
import { useId, useState } from "react";
|
import api from "../../utils/api";
|
||||||
import { agentFileAPI, settingsAPI } from "../../utils/api";
|
|
||||||
|
|
||||||
const AgentManagementTab = () => {
|
const AgentManagementTab = () => {
|
||||||
const scriptFileId = useId();
|
const _queryClient = useQueryClient();
|
||||||
const scriptContentId = useId();
|
|
||||||
const [showUploadModal, setShowUploadModal] = useState(false);
|
|
||||||
|
|
||||||
// Agent file queries and mutations
|
// Agent version queries
|
||||||
const {
|
const {
|
||||||
data: agentFileInfo,
|
data: versionInfo,
|
||||||
isLoading: agentFileLoading,
|
isLoading: versionLoading,
|
||||||
error: agentFileError,
|
error: versionError,
|
||||||
refetch: refetchAgentFile,
|
refetch: refetchVersion,
|
||||||
} = useQuery({
|
} = useQuery({
|
||||||
queryKey: ["agentFile"],
|
queryKey: ["agentVersion"],
|
||||||
queryFn: () => agentFileAPI.getInfo().then((res) => res.data),
|
queryFn: async () => {
|
||||||
|
try {
|
||||||
|
const response = await api.get("/agent/version");
|
||||||
|
console.log("🔍 Frontend received version info:", response.data);
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to fetch version info:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
refetchInterval: 5 * 60 * 1000, // Refetch every 5 minutes
|
||||||
|
enabled: true, // Always enabled
|
||||||
|
retry: 3, // Retry failed requests
|
||||||
});
|
});
|
||||||
|
|
||||||
// Fetch settings for dynamic curl flags
|
const {
|
||||||
const { data: settings } = useQuery({
|
data: _availableVersions,
|
||||||
queryKey: ["settings"],
|
isLoading: _versionsLoading,
|
||||||
queryFn: () => settingsAPI.get().then((res) => res.data),
|
error: _versionsError,
|
||||||
|
} = useQuery({
|
||||||
|
queryKey: ["agentVersions"],
|
||||||
|
queryFn: async () => {
|
||||||
|
try {
|
||||||
|
const response = await api.get("/agent/versions");
|
||||||
|
console.log("🔍 Frontend received available versions:", response.data);
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to fetch available versions:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
enabled: true,
|
||||||
|
retry: 3,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Helper function to get curl flags based on settings
|
const checkUpdatesMutation = useMutation({
|
||||||
const getCurlFlags = () => {
|
mutationFn: async () => {
|
||||||
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
// First check GitHub for updates
|
||||||
};
|
await api.post("/agent/version/check");
|
||||||
|
// Then refresh current agent version detection
|
||||||
const uploadAgentMutation = useMutation({
|
await api.post("/agent/version/refresh");
|
||||||
mutationFn: (scriptContent) =>
|
},
|
||||||
agentFileAPI.upload(scriptContent).then((res) => res.data),
|
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
refetchAgentFile();
|
refetchVersion();
|
||||||
setShowUploadModal(false);
|
|
||||||
},
|
},
|
||||||
onError: (error) => {
|
onError: (error) => {
|
||||||
console.error("Upload agent error:", error);
|
console.error("Check updates error:", error);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const downloadUpdateMutation = useMutation({
|
||||||
|
mutationFn: async () => {
|
||||||
|
// Download the latest binaries
|
||||||
|
const downloadResult = await api.post("/agent/version/download");
|
||||||
|
// Refresh current agent version detection after download
|
||||||
|
await api.post("/agent/version/refresh");
|
||||||
|
// Return the download result for success handling
|
||||||
|
return downloadResult;
|
||||||
|
},
|
||||||
|
onSuccess: (data) => {
|
||||||
|
console.log("Download completed:", data);
|
||||||
|
console.log("Download response data:", data.data);
|
||||||
|
refetchVersion();
|
||||||
|
// Show success message
|
||||||
|
const message =
|
||||||
|
data.data?.message || "Agent binaries downloaded successfully";
|
||||||
|
alert(`✅ ${message}`);
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
console.error("Download update error:", error);
|
||||||
|
alert(`❌ Download failed: ${error.message}`);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const getVersionStatus = () => {
|
||||||
|
console.log("🔍 getVersionStatus called with:", {
|
||||||
|
versionError,
|
||||||
|
versionInfo,
|
||||||
|
versionLoading,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (versionError) {
|
||||||
|
console.log("❌ Version error detected:", versionError);
|
||||||
|
return {
|
||||||
|
status: "error",
|
||||||
|
message: "Failed to load version info",
|
||||||
|
Icon: AlertCircle,
|
||||||
|
color: "text-red-600",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!versionInfo || versionLoading) {
|
||||||
|
console.log("⏳ Loading state:", { versionInfo, versionLoading });
|
||||||
|
return {
|
||||||
|
status: "loading",
|
||||||
|
message: "Loading version info...",
|
||||||
|
Icon: RefreshCw,
|
||||||
|
color: "text-gray-600",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the backend's updateStatus for proper semver comparison
|
||||||
|
switch (versionInfo.updateStatus) {
|
||||||
|
case "update-available":
|
||||||
|
return {
|
||||||
|
status: "update-available",
|
||||||
|
message: `Update available: ${versionInfo.latestVersion}`,
|
||||||
|
Icon: Clock,
|
||||||
|
color: "text-yellow-600",
|
||||||
|
};
|
||||||
|
case "newer-version":
|
||||||
|
return {
|
||||||
|
status: "newer-version",
|
||||||
|
message: `Newer version running: ${versionInfo.currentVersion}`,
|
||||||
|
Icon: CheckCircle,
|
||||||
|
color: "text-blue-600",
|
||||||
|
};
|
||||||
|
case "up-to-date":
|
||||||
|
return {
|
||||||
|
status: "up-to-date",
|
||||||
|
message: `Up to date: ${versionInfo.latestVersion}`,
|
||||||
|
Icon: CheckCircle,
|
||||||
|
color: "text-green-600",
|
||||||
|
};
|
||||||
|
case "no-agent":
|
||||||
|
return {
|
||||||
|
status: "no-agent",
|
||||||
|
message: "No agent binary found",
|
||||||
|
Icon: AlertCircle,
|
||||||
|
color: "text-orange-600",
|
||||||
|
};
|
||||||
|
case "github-unavailable":
|
||||||
|
return {
|
||||||
|
status: "github-unavailable",
|
||||||
|
message: `Agent running: ${versionInfo.currentVersion} (GitHub API unavailable)`,
|
||||||
|
Icon: CheckCircle,
|
||||||
|
color: "text-purple-600",
|
||||||
|
};
|
||||||
|
case "no-data":
|
||||||
|
return {
|
||||||
|
status: "no-data",
|
||||||
|
message: "No version data available",
|
||||||
|
Icon: AlertCircle,
|
||||||
|
color: "text-gray-600",
|
||||||
|
};
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
status: "unknown",
|
||||||
|
message: "Version status unknown",
|
||||||
|
Icon: AlertCircle,
|
||||||
|
color: "text-gray-600",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const versionStatus = getVersionStatus();
|
||||||
|
const StatusIcon = versionStatus.Icon;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
{/* Header */}
|
{/* Header */}
|
||||||
<div className="flex items-center justify-between mb-6">
|
<div className="flex items-center justify-between mb-6">
|
||||||
<div>
|
<div>
|
||||||
<div className="flex items-center mb-2">
|
<h2 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
<Code className="h-6 w-6 text-primary-600 mr-3" />
|
Agent Version Management
|
||||||
<h2 className="text-xl font-semibold text-secondary-900 dark:text-white">
|
</h2>
|
||||||
Agent File Management
|
<p className="text-secondary-600 dark:text-secondary-300">
|
||||||
</h2>
|
Monitor agent versions and download updates
|
||||||
</div>
|
|
||||||
<p className="text-sm text-secondary-500 dark:text-secondary-300">
|
|
||||||
Manage the PatchMon agent script file used for installations and
|
|
||||||
updates
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex space-x-3">
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => {
|
onClick={() => checkUpdatesMutation.mutate()}
|
||||||
const url = "/api/v1/hosts/agent/download";
|
disabled={checkUpdatesMutation.isPending}
|
||||||
const link = document.createElement("a");
|
className="flex items-center px-4 py-2 bg-primary-600 text-white rounded-lg hover:bg-primary-700 disabled:opacity-50"
|
||||||
link.href = url;
|
|
||||||
link.download = "patchmon-agent.sh";
|
|
||||||
document.body.appendChild(link);
|
|
||||||
link.click();
|
|
||||||
document.body.removeChild(link);
|
|
||||||
}}
|
|
||||||
className="btn-outline flex items-center gap-2"
|
|
||||||
>
|
>
|
||||||
<Download className="h-4 w-4" />
|
<RefreshCw
|
||||||
Download
|
className={`h-4 w-4 mr-2 ${checkUpdatesMutation.isPending ? "animate-spin" : ""}`}
|
||||||
</button>
|
/>
|
||||||
<button
|
Check Updates
|
||||||
type="button"
|
|
||||||
onClick={() => setShowUploadModal(true)}
|
|
||||||
className="btn-primary flex items-center gap-2"
|
|
||||||
>
|
|
||||||
<Plus className="h-4 w-4" />
|
|
||||||
Replace Script
|
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Content */}
|
{/* Download Updates Button */}
|
||||||
{agentFileLoading ? (
|
<div className="bg-white dark:bg-secondary-800 rounded-lg shadow p-6 border border-secondary-200 dark:border-secondary-600">
|
||||||
<div className="flex items-center justify-center py-8">
|
<div className="flex items-center justify-between">
|
||||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-primary-600"></div>
|
<div>
|
||||||
</div>
|
<h3 className="text-lg font-semibold text-secondary-900 dark:text-white">
|
||||||
) : agentFileError ? (
|
{versionInfo?.currentVersion
|
||||||
<div className="text-center py-8">
|
? "Download Agent Updates"
|
||||||
<p className="text-red-600 dark:text-red-400">
|
: "Download Agent Binaries"}
|
||||||
Error loading agent file: {agentFileError.message}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
) : !agentFileInfo?.exists ? (
|
|
||||||
<div className="text-center py-8">
|
|
||||||
<Code className="h-12 w-12 text-secondary-400 mx-auto mb-4" />
|
|
||||||
<p className="text-secondary-500 dark:text-secondary-300">
|
|
||||||
No agent script found
|
|
||||||
</p>
|
|
||||||
<p className="text-sm text-secondary-400 dark:text-secondary-400 mt-2">
|
|
||||||
Upload an agent script to get started
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<div className="space-y-6">
|
|
||||||
{/* Agent File Info */}
|
|
||||||
<div className="bg-secondary-50 dark:bg-secondary-700 rounded-lg p-6">
|
|
||||||
<h3 className="text-lg font-medium text-secondary-900 dark:text-white mb-4">
|
|
||||||
Current Agent Script
|
|
||||||
</h3>
|
</h3>
|
||||||
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
|
<p className="text-secondary-600 dark:text-secondary-300">
|
||||||
<div className="flex items-center gap-2">
|
{versionInfo?.currentVersion
|
||||||
<Code className="h-4 w-4 text-blue-600 dark:text-blue-400" />
|
? "Download the latest agent binaries from GitHub"
|
||||||
<span className="text-sm font-medium text-secondary-700 dark:text-secondary-300">
|
: "No agent binaries found. Download from GitHub to get started."}
|
||||||
Version:
|
</p>
|
||||||
</span>
|
|
||||||
<span className="text-sm text-secondary-900 dark:text-white font-mono">
|
|
||||||
{agentFileInfo.version}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Download className="h-4 w-4 text-green-600 dark:text-green-400" />
|
|
||||||
<span className="text-sm font-medium text-secondary-700 dark:text-secondary-300">
|
|
||||||
Size:
|
|
||||||
</span>
|
|
||||||
<span className="text-sm text-secondary-900 dark:text-white">
|
|
||||||
{agentFileInfo.sizeFormatted}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Code className="h-4 w-4 text-yellow-600 dark:text-yellow-400" />
|
|
||||||
<span className="text-sm font-medium text-secondary-700 dark:text-secondary-300">
|
|
||||||
Modified:
|
|
||||||
</span>
|
|
||||||
<span className="text-sm text-secondary-900 dark:text-white">
|
|
||||||
{new Date(agentFileInfo.lastModified).toLocaleDateString()}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Usage Instructions */}
|
|
||||||
<div className="bg-blue-50 dark:bg-blue-900 border border-blue-200 dark:border-blue-700 rounded-md p-4">
|
|
||||||
<div className="flex">
|
|
||||||
<Shield className="h-5 w-5 text-blue-400 dark:text-blue-300" />
|
|
||||||
<div className="ml-3">
|
|
||||||
<h3 className="text-sm font-medium text-blue-800 dark:text-blue-200">
|
|
||||||
Agent Script Usage
|
|
||||||
</h3>
|
|
||||||
<div className="mt-2 text-sm text-blue-700 dark:text-blue-300">
|
|
||||||
<p className="mb-2">This script is used for:</p>
|
|
||||||
<ul className="list-disc list-inside space-y-1">
|
|
||||||
<li>New agent installations via the install script</li>
|
|
||||||
<li>
|
|
||||||
Agent downloads from the /api/v1/hosts/agent/download
|
|
||||||
endpoint
|
|
||||||
</li>
|
|
||||||
<li>Manual agent deployments and updates</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Uninstall Instructions */}
|
|
||||||
<div className="bg-red-50 dark:bg-red-900 border border-red-200 dark:border-red-700 rounded-md p-4">
|
|
||||||
<div className="flex">
|
|
||||||
<Shield className="h-5 w-5 text-red-400 dark:text-red-300" />
|
|
||||||
<div className="ml-3">
|
|
||||||
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
|
||||||
Agent Uninstall Command
|
|
||||||
</h3>
|
|
||||||
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
|
|
||||||
<p className="mb-2">
|
|
||||||
To completely remove PatchMon from a host:
|
|
||||||
</p>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
|
|
||||||
curl {getCurlFlags()} {window.location.origin}
|
|
||||||
/api/v1/hosts/remove | sudo bash
|
|
||||||
</div>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => {
|
|
||||||
const command = `curl ${getCurlFlags()} ${window.location.origin}/api/v1/hosts/remove | sudo bash`;
|
|
||||||
navigator.clipboard.writeText(command);
|
|
||||||
// You could add a toast notification here
|
|
||||||
}}
|
|
||||||
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
|
|
||||||
>
|
|
||||||
Copy
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<p className="mt-2 text-xs">
|
|
||||||
⚠️ This will remove all PatchMon files, configuration, and
|
|
||||||
crontab entries
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => downloadUpdateMutation.mutate()}
|
||||||
|
disabled={downloadUpdateMutation.isPending}
|
||||||
|
className="flex items-center px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 disabled:opacity-50"
|
||||||
|
>
|
||||||
|
<RefreshCw
|
||||||
|
className={`h-4 w-4 mr-2 ${downloadUpdateMutation.isPending ? "animate-spin" : ""}`}
|
||||||
|
/>
|
||||||
|
{downloadUpdateMutation.isPending
|
||||||
|
? "Downloading..."
|
||||||
|
: versionInfo?.currentVersion
|
||||||
|
? "Download Updates"
|
||||||
|
: "Download Agent Binaries"}
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
)}
|
</div>
|
||||||
|
|
||||||
{/* Agent Upload Modal */}
|
{/* Version Status Card */}
|
||||||
{showUploadModal && (
|
<div className="bg-white dark:bg-secondary-800 rounded-lg shadow p-6 border border-secondary-200 dark:border-secondary-600">
|
||||||
<AgentUploadModal
|
<div className="flex items-center justify-between mb-4">
|
||||||
isOpen={showUploadModal}
|
<h3 className="text-lg font-semibold text-secondary-900 dark:text-white">
|
||||||
onClose={() => setShowUploadModal(false)}
|
Agent Version Status
|
||||||
onSubmit={uploadAgentMutation.mutate}
|
</h3>
|
||||||
isLoading={uploadAgentMutation.isPending}
|
<div className="flex items-center space-x-2">
|
||||||
error={uploadAgentMutation.error}
|
{StatusIcon && (
|
||||||
scriptFileId={scriptFileId}
|
<StatusIcon className={`h-5 w-5 ${versionStatus.color}`} />
|
||||||
scriptContentId={scriptContentId}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Agent Upload Modal Component
|
|
||||||
const AgentUploadModal = ({
|
|
||||||
isOpen,
|
|
||||||
onClose,
|
|
||||||
onSubmit,
|
|
||||||
isLoading,
|
|
||||||
error,
|
|
||||||
scriptFileId,
|
|
||||||
scriptContentId,
|
|
||||||
}) => {
|
|
||||||
const [scriptContent, setScriptContent] = useState("");
|
|
||||||
const [uploadError, setUploadError] = useState("");
|
|
||||||
|
|
||||||
const handleSubmit = (e) => {
|
|
||||||
e.preventDefault();
|
|
||||||
setUploadError("");
|
|
||||||
|
|
||||||
if (!scriptContent.trim()) {
|
|
||||||
setUploadError("Script content is required");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!scriptContent.trim().startsWith("#!/")) {
|
|
||||||
setUploadError(
|
|
||||||
"Script must start with a shebang (#!/bin/bash or #!/bin/sh)",
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
onSubmit(scriptContent);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleFileUpload = (e) => {
|
|
||||||
const file = e.target.files[0];
|
|
||||||
if (file) {
|
|
||||||
const reader = new FileReader();
|
|
||||||
reader.onload = (event) => {
|
|
||||||
setScriptContent(event.target.result);
|
|
||||||
setUploadError("");
|
|
||||||
};
|
|
||||||
reader.readAsText(file);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!isOpen) return null;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-lg shadow-xl max-w-4xl w-full mx-4 max-h-[90vh] overflow-y-auto">
|
|
||||||
<div className="px-6 py-4 border-b border-secondary-200 dark:border-secondary-600">
|
|
||||||
<div className="flex items-center justify-between">
|
|
||||||
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
|
|
||||||
Replace Agent Script
|
|
||||||
</h3>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={onClose}
|
|
||||||
className="text-secondary-400 hover:text-secondary-600 dark:text-secondary-500 dark:hover:text-secondary-300"
|
|
||||||
>
|
|
||||||
<X className="h-5 w-5" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<form onSubmit={handleSubmit} className="px-6 py-4">
|
|
||||||
<div className="space-y-4">
|
|
||||||
<div>
|
|
||||||
<label
|
|
||||||
htmlFor={scriptFileId}
|
|
||||||
className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-2"
|
|
||||||
>
|
|
||||||
Upload Script File
|
|
||||||
</label>
|
|
||||||
<input
|
|
||||||
id={scriptFileId}
|
|
||||||
type="file"
|
|
||||||
accept=".sh"
|
|
||||||
onChange={handleFileUpload}
|
|
||||||
className="block w-full text-sm text-secondary-500 dark:text-secondary-400 file:mr-4 file:py-2 file:px-4 file:rounded-md file:border-0 file:text-sm file:font-medium file:bg-primary-50 file:text-primary-700 hover:file:bg-primary-100 dark:file:bg-primary-900 dark:file:text-primary-200"
|
|
||||||
/>
|
|
||||||
<p className="mt-1 text-xs text-secondary-500 dark:text-secondary-400">
|
|
||||||
Select a .sh file to upload, or paste the script content below
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<label
|
|
||||||
htmlFor={scriptContentId}
|
|
||||||
className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-2"
|
|
||||||
>
|
|
||||||
Script Content *
|
|
||||||
</label>
|
|
||||||
<textarea
|
|
||||||
id={scriptContentId}
|
|
||||||
value={scriptContent}
|
|
||||||
onChange={(e) => {
|
|
||||||
setScriptContent(e.target.value);
|
|
||||||
setUploadError("");
|
|
||||||
}}
|
|
||||||
rows={15}
|
|
||||||
className="block w-full border border-secondary-300 dark:border-secondary-600 rounded-md shadow-sm focus:ring-primary-500 focus:border-primary-500 bg-white dark:bg-secondary-700 text-secondary-900 dark:text-white font-mono text-sm"
|
|
||||||
placeholder="#!/bin/bash # PatchMon Agent Script VERSION="1.0.0" # Your script content here..."
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{(uploadError || error) && (
|
|
||||||
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-md p-3">
|
|
||||||
<p className="text-sm text-red-800 dark:text-red-200">
|
|
||||||
{uploadError ||
|
|
||||||
error?.response?.data?.error ||
|
|
||||||
error?.message}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
)}
|
)}
|
||||||
|
<span className={`text-sm font-medium ${versionStatus.color}`}>
|
||||||
|
{versionStatus.message}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-md p-3">
|
{versionInfo && (
|
||||||
<div className="flex">
|
<div className="grid grid-cols-1 md:grid-cols-3 gap-4 text-sm">
|
||||||
<AlertCircle className="h-4 w-4 text-yellow-600 dark:text-yellow-400 mr-2 mt-0.5" />
|
<div>
|
||||||
<div className="text-sm text-yellow-800 dark:text-yellow-200">
|
<span className="text-secondary-500 dark:text-secondary-400">
|
||||||
<p className="font-medium">Important:</p>
|
Current Version:
|
||||||
<ul className="mt-1 list-disc list-inside space-y-1">
|
</span>
|
||||||
<li>This will replace the current agent script file</li>
|
<span className="ml-2 font-medium text-secondary-900 dark:text-white">
|
||||||
<li>A backup will be created automatically</li>
|
{versionInfo.currentVersion || "Unknown"}
|
||||||
<li>All new installations will use this script</li>
|
</span>
|
||||||
<li>
|
</div>
|
||||||
Existing agents will download this version on their next
|
<div>
|
||||||
update
|
<span className="text-secondary-500 dark:text-secondary-400">
|
||||||
</li>
|
Latest Version:
|
||||||
</ul>
|
</span>
|
||||||
</div>
|
<span className="ml-2 font-medium text-secondary-900 dark:text-white">
|
||||||
</div>
|
{versionInfo.latestVersion || "Unknown"}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="text-secondary-500 dark:text-secondary-400">
|
||||||
|
Last Checked:
|
||||||
|
</span>
|
||||||
|
<span className="ml-2 font-medium text-secondary-900 dark:text-white">
|
||||||
|
{versionInfo.lastChecked
|
||||||
|
? new Date(versionInfo.lastChecked).toLocaleString()
|
||||||
|
: "Never"}
|
||||||
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
)}
|
||||||
<div className="flex justify-end gap-3 mt-6">
|
|
||||||
<button type="button" onClick={onClose} className="btn-outline">
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
type="submit"
|
|
||||||
disabled={isLoading || !scriptContent.trim()}
|
|
||||||
className="btn-primary"
|
|
||||||
>
|
|
||||||
{isLoading ? "Uploading..." : "Replace Script"}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -446,6 +446,53 @@ const AgentUpdatesTab = () => {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
|
{/* Uninstall Instructions */}
|
||||||
|
<div className="bg-red-50 dark:bg-red-900 border border-red-200 dark:border-red-700 rounded-md p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<Shield className="h-5 w-5 text-red-400 dark:text-red-300" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Agent Uninstall Command
|
||||||
|
</h3>
|
||||||
|
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
|
<p className="mb-3">To completely remove PatchMon from a host:</p>
|
||||||
|
|
||||||
|
{/* Go Agent Uninstall */}
|
||||||
|
<div className="mb-3">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
|
||||||
|
sudo patchmon-agent uninstall
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
navigator.clipboard.writeText(
|
||||||
|
"sudo patchmon-agent uninstall",
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
|
||||||
|
>
|
||||||
|
Copy
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div className="text-xs text-red-600 dark:text-red-400">
|
||||||
|
Options: <code>--remove-config</code>,{" "}
|
||||||
|
<code>--remove-logs</code>, <code>--remove-all</code>,{" "}
|
||||||
|
<code>--force</code>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p className="mt-2 text-xs">
|
||||||
|
⚠️ This command will remove all PatchMon files, configuration,
|
||||||
|
and crontab entries
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -128,12 +128,14 @@ const VersionUpdateTab = () => {
|
|||||||
<span className="text-lg font-mono text-secondary-900 dark:text-white">
|
<span className="text-lg font-mono text-secondary-900 dark:text-white">
|
||||||
{versionInfo.github.latestRelease.tagName}
|
{versionInfo.github.latestRelease.tagName}
|
||||||
</span>
|
</span>
|
||||||
<div className="text-xs text-secondary-500 dark:text-secondary-400">
|
{versionInfo.github.latestRelease.publishedAt && (
|
||||||
Published:{" "}
|
<div className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
{new Date(
|
Published:{" "}
|
||||||
versionInfo.github.latestRelease.publishedAt,
|
{new Date(
|
||||||
).toLocaleDateString()}
|
versionInfo.github.latestRelease.publishedAt,
|
||||||
</div>
|
).toLocaleDateString()}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import {
|
|||||||
} from "react";
|
} from "react";
|
||||||
import { flushSync } from "react-dom";
|
import { flushSync } from "react-dom";
|
||||||
import { AUTH_PHASES, isAuthPhase } from "../constants/authPhases";
|
import { AUTH_PHASES, isAuthPhase } from "../constants/authPhases";
|
||||||
|
import { isCorsError } from "../utils/api";
|
||||||
|
|
||||||
const AuthContext = createContext();
|
const AuthContext = createContext();
|
||||||
|
|
||||||
@@ -120,9 +121,50 @@ export const AuthProvider = ({ children }) => {
|
|||||||
|
|
||||||
return { success: true };
|
return { success: true };
|
||||||
} else {
|
} else {
|
||||||
|
// Handle HTTP error responses (like 500 CORS errors)
|
||||||
|
console.log("HTTP error response:", response.status, data);
|
||||||
|
|
||||||
|
// Check if this is a CORS error based on the response data
|
||||||
|
if (
|
||||||
|
data.message?.includes("Not allowed by CORS") ||
|
||||||
|
data.message?.includes("CORS") ||
|
||||||
|
data.error?.includes("CORS")
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return { success: false, error: data.error || "Login failed" };
|
return { success: false, error: data.error || "Login failed" };
|
||||||
}
|
}
|
||||||
} catch {
|
} catch (error) {
|
||||||
|
console.log("Login error:", error);
|
||||||
|
console.log("Error response:", error.response);
|
||||||
|
console.log("Error message:", error.message);
|
||||||
|
|
||||||
|
// Check for CORS/network errors first
|
||||||
|
if (isCorsError(error)) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for other network errors
|
||||||
|
if (
|
||||||
|
error.name === "TypeError" &&
|
||||||
|
error.message?.includes("Failed to fetch")
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return { success: false, error: "Network error occurred" };
|
return { success: false, error: "Network error occurred" };
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -167,9 +209,46 @@ export const AuthProvider = ({ children }) => {
|
|||||||
localStorage.setItem("user", JSON.stringify(data.user));
|
localStorage.setItem("user", JSON.stringify(data.user));
|
||||||
return { success: true, user: data.user };
|
return { success: true, user: data.user };
|
||||||
} else {
|
} else {
|
||||||
|
// Handle HTTP error responses (like 500 CORS errors)
|
||||||
|
console.log("HTTP error response:", response.status, data);
|
||||||
|
|
||||||
|
// Check if this is a CORS error based on the response data
|
||||||
|
if (
|
||||||
|
data.message?.includes("Not allowed by CORS") ||
|
||||||
|
data.message?.includes("CORS") ||
|
||||||
|
data.error?.includes("CORS")
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return { success: false, error: data.error || "Update failed" };
|
return { success: false, error: data.error || "Update failed" };
|
||||||
}
|
}
|
||||||
} catch {
|
} catch (error) {
|
||||||
|
// Check for CORS/network errors first
|
||||||
|
if (isCorsError(error)) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for other network errors
|
||||||
|
if (
|
||||||
|
error.name === "TypeError" &&
|
||||||
|
error.message?.includes("Failed to fetch")
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return { success: false, error: "Network error occurred" };
|
return { success: false, error: "Network error occurred" };
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -190,12 +269,49 @@ export const AuthProvider = ({ children }) => {
|
|||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
return { success: true };
|
return { success: true };
|
||||||
} else {
|
} else {
|
||||||
|
// Handle HTTP error responses (like 500 CORS errors)
|
||||||
|
console.log("HTTP error response:", response.status, data);
|
||||||
|
|
||||||
|
// Check if this is a CORS error based on the response data
|
||||||
|
if (
|
||||||
|
data.message?.includes("Not allowed by CORS") ||
|
||||||
|
data.message?.includes("CORS") ||
|
||||||
|
data.error?.includes("CORS")
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: data.error || "Password change failed",
|
error: data.error || "Password change failed",
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
} catch {
|
} catch (error) {
|
||||||
|
// Check for CORS/network errors first
|
||||||
|
if (isCorsError(error)) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for other network errors
|
||||||
|
if (
|
||||||
|
error.name === "TypeError" &&
|
||||||
|
error.message?.includes("Failed to fetch")
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return { success: false, error: "Network error occurred" };
|
return { success: false, error: "Network error occurred" };
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
613
frontend/src/pages/Automation.jsx
Normal file
613
frontend/src/pages/Automation.jsx
Normal file
@@ -0,0 +1,613 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
Activity,
|
||||||
|
ArrowDown,
|
||||||
|
ArrowUp,
|
||||||
|
ArrowUpDown,
|
||||||
|
CheckCircle,
|
||||||
|
Clock,
|
||||||
|
Play,
|
||||||
|
Settings,
|
||||||
|
XCircle,
|
||||||
|
Zap,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { useState } from "react";
|
||||||
|
import api from "../utils/api";
|
||||||
|
|
||||||
|
const Automation = () => {
|
||||||
|
const [activeTab, setActiveTab] = useState("overview");
|
||||||
|
const [sortField, setSortField] = useState("nextRunTimestamp");
|
||||||
|
const [sortDirection, setSortDirection] = useState("asc");
|
||||||
|
|
||||||
|
// Fetch automation overview data
|
||||||
|
const { data: overview, isLoading: overviewLoading } = useQuery({
|
||||||
|
queryKey: ["automation-overview"],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get("/automation/overview");
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000, // Refresh every 30 seconds
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch queue statistics
|
||||||
|
useQuery({
|
||||||
|
queryKey: ["automation-stats"],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get("/automation/stats");
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fetch recent jobs
|
||||||
|
useQuery({
|
||||||
|
queryKey: ["automation-jobs"],
|
||||||
|
queryFn: async () => {
|
||||||
|
const jobs = await Promise.all([
|
||||||
|
api
|
||||||
|
.get("/automation/jobs/github-update-check?limit=5")
|
||||||
|
.then((r) => r.data.data || []),
|
||||||
|
api
|
||||||
|
.get("/automation/jobs/session-cleanup?limit=5")
|
||||||
|
.then((r) => r.data.data || []),
|
||||||
|
]);
|
||||||
|
return {
|
||||||
|
githubUpdate: jobs[0],
|
||||||
|
sessionCleanup: jobs[1],
|
||||||
|
};
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const _getStatusIcon = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return <CheckCircle className="h-4 w-4 text-green-500" />;
|
||||||
|
case "failed":
|
||||||
|
return <XCircle className="h-4 w-4 text-red-500" />;
|
||||||
|
case "active":
|
||||||
|
return <Activity className="h-4 w-4 text-blue-500 animate-pulse" />;
|
||||||
|
default:
|
||||||
|
return <Clock className="h-4 w-4 text-gray-500" />;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const _getStatusColor = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return "bg-green-100 text-green-800";
|
||||||
|
case "failed":
|
||||||
|
return "bg-red-100 text-red-800";
|
||||||
|
case "active":
|
||||||
|
return "bg-blue-100 text-blue-800";
|
||||||
|
default:
|
||||||
|
return "bg-gray-100 text-gray-800";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const _formatDate = (dateString) => {
|
||||||
|
if (!dateString) return "N/A";
|
||||||
|
return new Date(dateString).toLocaleString();
|
||||||
|
};
|
||||||
|
|
||||||
|
const _formatDuration = (ms) => {
|
||||||
|
if (!ms) return "N/A";
|
||||||
|
return `${ms}ms`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStatusBadge = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "Success":
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-green-100 text-green-800">
|
||||||
|
Success
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
case "Failed":
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-red-100 text-red-800">
|
||||||
|
Failed
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
case "Never run":
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
|
||||||
|
Never run
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
default:
|
||||||
|
return (
|
||||||
|
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
|
||||||
|
{status}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getNextRunTime = (schedule, _lastRun) => {
|
||||||
|
if (schedule === "Manual only") return "Manual trigger only";
|
||||||
|
if (schedule.includes("Agent-driven")) return "Agent-driven (automatic)";
|
||||||
|
if (schedule === "Daily at midnight") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(0, 0, 0, 0);
|
||||||
|
return tomorrow.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 2 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(2, 0, 0, 0);
|
||||||
|
return tomorrow.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 3 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(3, 0, 0, 0);
|
||||||
|
return tomorrow.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (schedule === "Every hour") {
|
||||||
|
const now = new Date();
|
||||||
|
const nextHour = new Date(now);
|
||||||
|
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
|
||||||
|
return nextHour.toLocaleString([], {
|
||||||
|
hour12: true,
|
||||||
|
hour: "numeric",
|
||||||
|
minute: "2-digit",
|
||||||
|
day: "numeric",
|
||||||
|
month: "numeric",
|
||||||
|
year: "numeric",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return "Unknown";
|
||||||
|
};
|
||||||
|
|
||||||
|
const getNextRunTimestamp = (schedule) => {
|
||||||
|
if (schedule === "Manual only") return Number.MAX_SAFE_INTEGER; // Manual tasks go to bottom
|
||||||
|
if (schedule.includes("Agent-driven")) return Number.MAX_SAFE_INTEGER - 1; // Agent-driven tasks near bottom but above manual
|
||||||
|
if (schedule === "Daily at midnight") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(0, 0, 0, 0);
|
||||||
|
return tomorrow.getTime();
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 2 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(2, 0, 0, 0);
|
||||||
|
return tomorrow.getTime();
|
||||||
|
}
|
||||||
|
if (schedule === "Daily at 3 AM") {
|
||||||
|
const now = new Date();
|
||||||
|
const tomorrow = new Date(now);
|
||||||
|
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||||
|
tomorrow.setHours(3, 0, 0, 0);
|
||||||
|
return tomorrow.getTime();
|
||||||
|
}
|
||||||
|
if (schedule === "Every hour") {
|
||||||
|
const now = new Date();
|
||||||
|
const nextHour = new Date(now);
|
||||||
|
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
|
||||||
|
return nextHour.getTime();
|
||||||
|
}
|
||||||
|
return Number.MAX_SAFE_INTEGER; // Unknown schedules go to bottom
|
||||||
|
};
|
||||||
|
|
||||||
|
const openBullBoard = () => {
|
||||||
|
const token = localStorage.getItem("token");
|
||||||
|
if (!token) {
|
||||||
|
alert("Please log in to access the Queue Monitor");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the proxied URL through the frontend (port 3000)
|
||||||
|
// This avoids CORS issues as everything goes through the same origin
|
||||||
|
const url = `/bullboard?token=${encodeURIComponent(token)}`;
|
||||||
|
window.open(url, "_blank", "width=1200,height=800");
|
||||||
|
};
|
||||||
|
|
||||||
|
const triggerManualJob = async (jobType, data = {}) => {
|
||||||
|
try {
|
||||||
|
let endpoint;
|
||||||
|
|
||||||
|
if (jobType === "github") {
|
||||||
|
endpoint = "/automation/trigger/github-update";
|
||||||
|
} else if (jobType === "sessions") {
|
||||||
|
endpoint = "/automation/trigger/session-cleanup";
|
||||||
|
} else if (jobType === "orphaned-repos") {
|
||||||
|
endpoint = "/automation/trigger/orphaned-repo-cleanup";
|
||||||
|
} else if (jobType === "orphaned-packages") {
|
||||||
|
endpoint = "/automation/trigger/orphaned-package-cleanup";
|
||||||
|
} else if (jobType === "agent-collection") {
|
||||||
|
endpoint = "/automation/trigger/agent-collection";
|
||||||
|
}
|
||||||
|
|
||||||
|
const _response = await api.post(endpoint, data);
|
||||||
|
|
||||||
|
// Refresh data
|
||||||
|
window.location.reload();
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error triggering job:", error);
|
||||||
|
alert(
|
||||||
|
"Failed to trigger job: " +
|
||||||
|
(error.response?.data?.error || error.message),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSort = (field) => {
|
||||||
|
if (sortField === field) {
|
||||||
|
setSortDirection(sortDirection === "asc" ? "desc" : "asc");
|
||||||
|
} else {
|
||||||
|
setSortField(field);
|
||||||
|
setSortDirection("asc");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getSortIcon = (field) => {
|
||||||
|
if (sortField !== field) return <ArrowUpDown className="h-4 w-4" />;
|
||||||
|
return sortDirection === "asc" ? (
|
||||||
|
<ArrowUp className="h-4 w-4" />
|
||||||
|
) : (
|
||||||
|
<ArrowDown className="h-4 w-4" />
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Sort automations based on current sort settings
|
||||||
|
const sortedAutomations = overview?.automations
|
||||||
|
? [...overview.automations].sort((a, b) => {
|
||||||
|
let aValue, bValue;
|
||||||
|
|
||||||
|
switch (sortField) {
|
||||||
|
case "name":
|
||||||
|
aValue = a.name.toLowerCase();
|
||||||
|
bValue = b.name.toLowerCase();
|
||||||
|
break;
|
||||||
|
case "schedule":
|
||||||
|
aValue = a.schedule.toLowerCase();
|
||||||
|
bValue = b.schedule.toLowerCase();
|
||||||
|
break;
|
||||||
|
case "lastRun":
|
||||||
|
// Convert "Never" to empty string for proper sorting
|
||||||
|
aValue = a.lastRun === "Never" ? "" : a.lastRun;
|
||||||
|
bValue = b.lastRun === "Never" ? "" : b.lastRun;
|
||||||
|
break;
|
||||||
|
case "lastRunTimestamp":
|
||||||
|
aValue = a.lastRunTimestamp || 0;
|
||||||
|
bValue = b.lastRunTimestamp || 0;
|
||||||
|
break;
|
||||||
|
case "nextRunTimestamp":
|
||||||
|
aValue = getNextRunTimestamp(a.schedule);
|
||||||
|
bValue = getNextRunTimestamp(b.schedule);
|
||||||
|
break;
|
||||||
|
case "status":
|
||||||
|
aValue = a.status.toLowerCase();
|
||||||
|
bValue = b.status.toLowerCase();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
aValue = a[sortField];
|
||||||
|
bValue = b[sortField];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aValue < bValue) return sortDirection === "asc" ? -1 : 1;
|
||||||
|
if (aValue > bValue) return sortDirection === "asc" ? 1 : -1;
|
||||||
|
return 0;
|
||||||
|
})
|
||||||
|
: [];
|
||||||
|
|
||||||
|
const tabs = [{ id: "overview", name: "Overview", icon: Settings }];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Page Header */}
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
Automation Management
|
||||||
|
</h1>
|
||||||
|
<p className="text-sm text-secondary-600 dark:text-secondary-400 mt-1">
|
||||||
|
Monitor and manage automated server operations, agent
|
||||||
|
communications, and patch deployments
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={openBullBoard}
|
||||||
|
className="btn-outline flex items-center gap-2"
|
||||||
|
title="Open Bull Board Queue Monitor"
|
||||||
|
>
|
||||||
|
<svg
|
||||||
|
className="h-4 w-4"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
viewBox="0 0 36 36"
|
||||||
|
role="img"
|
||||||
|
aria-label="Bull Board"
|
||||||
|
>
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="18" />
|
||||||
|
<circle fill="#FFF" cx="18" cy="18" r="13.5" />
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="10" />
|
||||||
|
<circle fill="#FFF" cx="18" cy="18" r="6" />
|
||||||
|
<circle fill="#DD2E44" cx="18" cy="18" r="3" />
|
||||||
|
<path
|
||||||
|
opacity=".2"
|
||||||
|
d="M18.24 18.282l13.144 11.754s-2.647 3.376-7.89 5.109L17.579 18.42l.661-.138z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#FFAC33"
|
||||||
|
d="M18.294 19a.994.994 0 01-.704-1.699l.563-.563a.995.995 0 011.408 1.407l-.564.563a.987.987 0 01-.703.292z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#55ACEE"
|
||||||
|
d="M24.016 6.981c-.403 2.079 0 4.691 0 4.691l7.054-7.388c.291-1.454-.528-3.932-1.718-4.238-1.19-.306-4.079.803-5.336 6.935zm5.003 5.003c-2.079.403-4.691 0-4.691 0l7.388-7.054c1.454-.291 3.932.528 4.238 1.718.306 1.19-.803 4.079-6.935 5.336z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill="#3A87C2"
|
||||||
|
d="M32.798 4.485L21.176 17.587c-.362.362-1.673.882-2.51.046-.836-.836-.419-2.08-.057-2.443L31.815 3.501s.676-.635 1.159-.152-.176 1.136-.176 1.136z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
Queue Monitor
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Stats Cards */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||||
|
{/* Scheduled Tasks Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Clock className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Scheduled Tasks
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.scheduledTasks || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Running Tasks Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Play className="h-5 w-5 text-success-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Running Tasks
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.runningTasks || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Failed Tasks Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<XCircle className="h-5 w-5 text-red-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Failed Tasks
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.failedTasks || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Total Task Runs Card */}
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Zap className="h-5 w-5 text-secondary-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Total Task Runs
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{overviewLoading ? "..." : overview?.totalAutomations || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tabs */}
|
||||||
|
<div className="mb-6">
|
||||||
|
<div className="border-b border-gray-200 dark:border-gray-700">
|
||||||
|
<nav className="-mb-px flex space-x-8">
|
||||||
|
{tabs.map((tab) => (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
key={tab.id}
|
||||||
|
onClick={() => setActiveTab(tab.id)}
|
||||||
|
className={`py-2 px-1 border-b-2 font-medium text-sm flex items-center gap-2 ${
|
||||||
|
activeTab === tab.id
|
||||||
|
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
||||||
|
: "border-transparent text-gray-500 hover:text-gray-700 hover:border-gray-300 dark:text-gray-400 dark:hover:text-gray-300"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<tab.icon className="h-4 w-4" />
|
||||||
|
{tab.name}
|
||||||
|
</button>
|
||||||
|
))}
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Tab Content */}
|
||||||
|
{activeTab === "overview" && (
|
||||||
|
<div className="card p-6">
|
||||||
|
{overviewLoading ? (
|
||||||
|
<div className="text-center py-8">
|
||||||
|
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-600 mx-auto"></div>
|
||||||
|
<p className="mt-2 text-sm text-secondary-500">
|
||||||
|
Loading automations...
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-700">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Run
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("name")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Task
|
||||||
|
{getSortIcon("name")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("schedule")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Frequency
|
||||||
|
{getSortIcon("schedule")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("lastRunTimestamp")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Last Run
|
||||||
|
{getSortIcon("lastRunTimestamp")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("nextRunTimestamp")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Next Run
|
||||||
|
{getSortIcon("nextRunTimestamp")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
|
||||||
|
onClick={() => handleSort("status")}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
Status
|
||||||
|
{getSortIcon("status")}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
{sortedAutomations.map((automation) => (
|
||||||
|
<tr
|
||||||
|
key={automation.queue}
|
||||||
|
className="hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||||
|
>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
{automation.schedule !== "Manual only" ? (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
if (automation.queue.includes("github")) {
|
||||||
|
triggerManualJob("github");
|
||||||
|
} else if (automation.queue.includes("session")) {
|
||||||
|
triggerManualJob("sessions");
|
||||||
|
} else if (
|
||||||
|
automation.queue.includes("orphaned-repo")
|
||||||
|
) {
|
||||||
|
triggerManualJob("orphaned-repos");
|
||||||
|
} else if (
|
||||||
|
automation.queue.includes("orphaned-package")
|
||||||
|
) {
|
||||||
|
triggerManualJob("orphaned-packages");
|
||||||
|
} else if (
|
||||||
|
automation.queue.includes("agent-commands")
|
||||||
|
) {
|
||||||
|
triggerManualJob("agent-collection");
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200"
|
||||||
|
title="Run Now"
|
||||||
|
>
|
||||||
|
<Play className="h-3 w-3" />
|
||||||
|
</button>
|
||||||
|
) : (
|
||||||
|
<span className="text-gray-400 text-xs">Manual</span>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
<div>
|
||||||
|
<div className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{automation.name}
|
||||||
|
</div>
|
||||||
|
<div className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
|
{automation.description}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
|
||||||
|
{automation.schedule}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
|
||||||
|
{automation.lastRun}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
|
||||||
|
{getNextRunTime(
|
||||||
|
automation.schedule,
|
||||||
|
automation.lastRun,
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
{getStatusBadge(automation.status)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Automation;
|
||||||
@@ -200,6 +200,8 @@ const Dashboard = () => {
|
|||||||
data: packageTrendsData,
|
data: packageTrendsData,
|
||||||
isLoading: packageTrendsLoading,
|
isLoading: packageTrendsLoading,
|
||||||
error: _packageTrendsError,
|
error: _packageTrendsError,
|
||||||
|
refetch: refetchPackageTrends,
|
||||||
|
isFetching: packageTrendsFetching,
|
||||||
} = useQuery({
|
} = useQuery({
|
||||||
queryKey: ["packageTrends", packageTrendsPeriod, packageTrendsHost],
|
queryKey: ["packageTrends", packageTrendsPeriod, packageTrendsHost],
|
||||||
queryFn: () => {
|
queryFn: () => {
|
||||||
@@ -771,6 +773,20 @@ const Dashboard = () => {
|
|||||||
Package Trends Over Time
|
Package Trends Over Time
|
||||||
</h3>
|
</h3>
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex items-center gap-3">
|
||||||
|
{/* Refresh Button */}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => refetchPackageTrends()}
|
||||||
|
disabled={packageTrendsFetching}
|
||||||
|
className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white hover:bg-secondary-50 dark:hover:bg-secondary-700 focus:ring-2 focus:ring-primary-500 focus:border-primary-500 disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2"
|
||||||
|
title="Refresh data"
|
||||||
|
>
|
||||||
|
<RefreshCw
|
||||||
|
className={`h-4 w-4 ${packageTrendsFetching ? "animate-spin" : ""}`}
|
||||||
|
/>
|
||||||
|
Refresh
|
||||||
|
</button>
|
||||||
|
|
||||||
{/* Period Selector */}
|
{/* Period Selector */}
|
||||||
<select
|
<select
|
||||||
value={packageTrendsPeriod}
|
value={packageTrendsPeriod}
|
||||||
@@ -1161,7 +1177,7 @@ const Dashboard = () => {
|
|||||||
try {
|
try {
|
||||||
const date = new Date(`${label}:00:00`);
|
const date = new Date(`${label}:00:00`);
|
||||||
// Check if date is valid
|
// Check if date is valid
|
||||||
if (isNaN(date.getTime())) {
|
if (Number.isNaN(date.getTime())) {
|
||||||
return label; // Return original label if date is invalid
|
return label; // Return original label if date is invalid
|
||||||
}
|
}
|
||||||
return date.toLocaleDateString("en-US", {
|
return date.toLocaleDateString("en-US", {
|
||||||
@@ -1171,7 +1187,7 @@ const Dashboard = () => {
|
|||||||
minute: "2-digit",
|
minute: "2-digit",
|
||||||
hour12: true,
|
hour12: true,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
return label; // Return original label if parsing fails
|
return label; // Return original label if parsing fails
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1180,17 +1196,24 @@ const Dashboard = () => {
|
|||||||
try {
|
try {
|
||||||
const date = new Date(label);
|
const date = new Date(label);
|
||||||
// Check if date is valid
|
// Check if date is valid
|
||||||
if (isNaN(date.getTime())) {
|
if (Number.isNaN(date.getTime())) {
|
||||||
return label; // Return original label if date is invalid
|
return label; // Return original label if date is invalid
|
||||||
}
|
}
|
||||||
return date.toLocaleDateString("en-US", {
|
return date.toLocaleDateString("en-US", {
|
||||||
month: "short",
|
month: "short",
|
||||||
day: "numeric",
|
day: "numeric",
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
return label; // Return original label if parsing fails
|
return label; // Return original label if parsing fails
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
label: (context) => {
|
||||||
|
const value = context.parsed.y;
|
||||||
|
if (value === null || value === undefined) {
|
||||||
|
return `${context.dataset.label}: No data`;
|
||||||
|
}
|
||||||
|
return `${context.dataset.label}: ${value}`;
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -1222,7 +1245,7 @@ const Dashboard = () => {
|
|||||||
const hourNum = parseInt(hour, 10);
|
const hourNum = parseInt(hour, 10);
|
||||||
|
|
||||||
// Validate hour number
|
// Validate hour number
|
||||||
if (isNaN(hourNum) || hourNum < 0 || hourNum > 23) {
|
if (Number.isNaN(hourNum) || hourNum < 0 || hourNum > 23) {
|
||||||
return hour; // Return original hour if invalid
|
return hour; // Return original hour if invalid
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1233,7 +1256,7 @@ const Dashboard = () => {
|
|||||||
: hourNum === 12
|
: hourNum === 12
|
||||||
? "12 PM"
|
? "12 PM"
|
||||||
: `${hourNum - 12} PM`;
|
: `${hourNum - 12} PM`;
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
return label; // Return original label if parsing fails
|
return label; // Return original label if parsing fails
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1242,14 +1265,14 @@ const Dashboard = () => {
|
|||||||
try {
|
try {
|
||||||
const date = new Date(label);
|
const date = new Date(label);
|
||||||
// Check if date is valid
|
// Check if date is valid
|
||||||
if (isNaN(date.getTime())) {
|
if (Number.isNaN(date.getTime())) {
|
||||||
return label; // Return original label if date is invalid
|
return label; // Return original label if date is invalid
|
||||||
}
|
}
|
||||||
return date.toLocaleDateString("en-US", {
|
return date.toLocaleDateString("en-US", {
|
||||||
month: "short",
|
month: "short",
|
||||||
day: "numeric",
|
day: "numeric",
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (_error) {
|
||||||
return label; // Return original label if parsing fails
|
return label; // Return original label if parsing fails
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1411,7 +1434,6 @@ const Dashboard = () => {
|
|||||||
title="Customize dashboard layout"
|
title="Customize dashboard layout"
|
||||||
>
|
>
|
||||||
<Settings className="h-4 w-4" />
|
<Settings className="h-4 w-4" />
|
||||||
Customize Dashboard
|
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -1423,7 +1445,6 @@ const Dashboard = () => {
|
|||||||
<RefreshCw
|
<RefreshCw
|
||||||
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
||||||
/>
|
/>
|
||||||
{isFetching ? "Refreshing..." : "Refresh"}
|
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
1003
frontend/src/pages/Docker.jsx
Normal file
1003
frontend/src/pages/Docker.jsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,14 @@
|
|||||||
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
|
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
|
||||||
import {
|
import {
|
||||||
Activity,
|
Activity,
|
||||||
|
AlertCircle,
|
||||||
AlertTriangle,
|
AlertTriangle,
|
||||||
ArrowLeft,
|
ArrowLeft,
|
||||||
Calendar,
|
Calendar,
|
||||||
CheckCircle,
|
CheckCircle,
|
||||||
|
CheckCircle2,
|
||||||
Clock,
|
Clock,
|
||||||
|
Clock3,
|
||||||
Copy,
|
Copy,
|
||||||
Cpu,
|
Cpu,
|
||||||
Database,
|
Database,
|
||||||
@@ -27,11 +30,13 @@ import {
|
|||||||
import { useEffect, useId, useState } from "react";
|
import { useEffect, useId, useState } from "react";
|
||||||
import { Link, useNavigate, useParams } from "react-router-dom";
|
import { Link, useNavigate, useParams } from "react-router-dom";
|
||||||
import InlineEdit from "../components/InlineEdit";
|
import InlineEdit from "../components/InlineEdit";
|
||||||
|
import InlineMultiGroupEdit from "../components/InlineMultiGroupEdit";
|
||||||
import {
|
import {
|
||||||
adminHostsAPI,
|
adminHostsAPI,
|
||||||
dashboardAPI,
|
dashboardAPI,
|
||||||
formatDate,
|
formatDate,
|
||||||
formatRelativeTime,
|
formatRelativeTime,
|
||||||
|
hostGroupsAPI,
|
||||||
repositoryAPI,
|
repositoryAPI,
|
||||||
settingsAPI,
|
settingsAPI,
|
||||||
} from "../utils/api";
|
} from "../utils/api";
|
||||||
@@ -46,6 +51,8 @@ const HostDetail = () => {
|
|||||||
const [activeTab, setActiveTab] = useState("host");
|
const [activeTab, setActiveTab] = useState("host");
|
||||||
const [historyPage, setHistoryPage] = useState(0);
|
const [historyPage, setHistoryPage] = useState(0);
|
||||||
const [historyLimit] = useState(10);
|
const [historyLimit] = useState(10);
|
||||||
|
const [notes, setNotes] = useState("");
|
||||||
|
const [notesMessage, setNotesMessage] = useState({ text: "", type: "" });
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: host,
|
data: host,
|
||||||
@@ -66,6 +73,64 @@ const HostDetail = () => {
|
|||||||
refetchOnWindowFocus: false, // Don't refetch when window regains focus
|
refetchOnWindowFocus: false, // Don't refetch when window regains focus
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// WebSocket connection status using Server-Sent Events (SSE) for real-time push updates
|
||||||
|
const [wsStatus, setWsStatus] = useState(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!host?.api_id) return;
|
||||||
|
|
||||||
|
const token = localStorage.getItem("token");
|
||||||
|
if (!token) return;
|
||||||
|
|
||||||
|
let eventSource = null;
|
||||||
|
let reconnectTimeout = null;
|
||||||
|
let isMounted = true;
|
||||||
|
|
||||||
|
const connect = () => {
|
||||||
|
if (!isMounted) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create EventSource for SSE connection
|
||||||
|
eventSource = new EventSource(
|
||||||
|
`/api/v1/ws/status/${host.api_id}/stream?token=${encodeURIComponent(token)}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
eventSource.onmessage = (event) => {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(event.data);
|
||||||
|
setWsStatus(data);
|
||||||
|
} catch (_err) {
|
||||||
|
// Silently handle parse errors
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
eventSource.onerror = (_error) => {
|
||||||
|
console.log(`[SSE] Connection error for ${host.api_id}, retrying...`);
|
||||||
|
eventSource?.close();
|
||||||
|
|
||||||
|
// Automatic reconnection after 5 seconds
|
||||||
|
if (isMounted) {
|
||||||
|
reconnectTimeout = setTimeout(connect, 5000);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} catch (_err) {
|
||||||
|
// Silently handle connection errors
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initial connection
|
||||||
|
connect();
|
||||||
|
|
||||||
|
// Cleanup on unmount or when api_id changes
|
||||||
|
return () => {
|
||||||
|
isMounted = false;
|
||||||
|
if (reconnectTimeout) clearTimeout(reconnectTimeout);
|
||||||
|
if (eventSource) {
|
||||||
|
eventSource.close();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [host?.api_id]);
|
||||||
|
|
||||||
// Fetch repository count for this host
|
// Fetch repository count for this host
|
||||||
const { data: repositories, isLoading: isLoadingRepos } = useQuery({
|
const { data: repositories, isLoading: isLoadingRepos } = useQuery({
|
||||||
queryKey: ["host-repositories", hostId],
|
queryKey: ["host-repositories", hostId],
|
||||||
@@ -75,6 +140,14 @@ const HostDetail = () => {
|
|||||||
enabled: !!hostId,
|
enabled: !!hostId,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Fetch host groups for multi-select
|
||||||
|
const { data: hostGroups } = useQuery({
|
||||||
|
queryKey: ["host-groups"],
|
||||||
|
queryFn: () => hostGroupsAPI.list().then((res) => res.data),
|
||||||
|
staleTime: 5 * 60 * 1000, // 5 minutes - data stays fresh longer
|
||||||
|
refetchOnWindowFocus: false, // Don't refetch when window regains focus
|
||||||
|
});
|
||||||
|
|
||||||
// Tab change handler
|
// Tab change handler
|
||||||
const handleTabChange = (tabName) => {
|
const handleTabChange = (tabName) => {
|
||||||
setActiveTab(tabName);
|
setActiveTab(tabName);
|
||||||
@@ -87,6 +160,13 @@ const HostDetail = () => {
|
|||||||
}
|
}
|
||||||
}, [host]);
|
}, [host]);
|
||||||
|
|
||||||
|
// Sync notes state with host data
|
||||||
|
useEffect(() => {
|
||||||
|
if (host) {
|
||||||
|
setNotes(host.notes || "");
|
||||||
|
}
|
||||||
|
}, [host]);
|
||||||
|
|
||||||
const deleteHostMutation = useMutation({
|
const deleteHostMutation = useMutation({
|
||||||
mutationFn: (hostId) => adminHostsAPI.delete(hostId),
|
mutationFn: (hostId) => adminHostsAPI.delete(hostId),
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
@@ -118,12 +198,32 @@ const HostDetail = () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const updateHostGroupsMutation = useMutation({
|
||||||
|
mutationFn: ({ hostId, groupIds }) =>
|
||||||
|
adminHostsAPI.updateGroups(hostId, groupIds).then((res) => res.data),
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries(["host", hostId]);
|
||||||
|
queryClient.invalidateQueries(["hosts"]);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const updateNotesMutation = useMutation({
|
const updateNotesMutation = useMutation({
|
||||||
mutationFn: ({ hostId, notes }) =>
|
mutationFn: ({ hostId, notes }) =>
|
||||||
adminHostsAPI.updateNotes(hostId, notes).then((res) => res.data),
|
adminHostsAPI.updateNotes(hostId, notes).then((res) => res.data),
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
queryClient.invalidateQueries(["host", hostId]);
|
queryClient.invalidateQueries(["host", hostId]);
|
||||||
queryClient.invalidateQueries(["hosts"]);
|
queryClient.invalidateQueries(["hosts"]);
|
||||||
|
setNotesMessage({ text: "Notes saved successfully!", type: "success" });
|
||||||
|
// Clear message after 3 seconds
|
||||||
|
setTimeout(() => setNotesMessage({ text: "", type: "" }), 3000);
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
setNotesMessage({
|
||||||
|
text: error.response?.data?.error || "Failed to save notes",
|
||||||
|
type: "error",
|
||||||
|
});
|
||||||
|
// Clear message after 5 seconds for errors
|
||||||
|
setTimeout(() => setNotesMessage({ text: "", type: "" }), 5000);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -238,49 +338,67 @@ const HostDetail = () => {
|
|||||||
return (
|
return (
|
||||||
<div className="h-screen flex flex-col">
|
<div className="h-screen flex flex-col">
|
||||||
{/* Header */}
|
{/* Header */}
|
||||||
<div className="flex items-center justify-between mb-4 pb-4 border-b border-secondary-200 dark:border-secondary-600">
|
<div className="flex items-start justify-between mb-4 pb-4 border-b border-secondary-200 dark:border-secondary-600">
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex items-start gap-3">
|
||||||
<Link
|
<Link
|
||||||
to="/hosts"
|
to="/hosts"
|
||||||
className="text-secondary-500 hover:text-secondary-700 dark:text-secondary-400 dark:hover:text-secondary-200"
|
className="text-secondary-500 hover:text-secondary-700 dark:text-secondary-400 dark:hover:text-secondary-200 mt-1"
|
||||||
>
|
>
|
||||||
<ArrowLeft className="h-5 w-5" />
|
<ArrowLeft className="h-5 w-5" />
|
||||||
</Link>
|
</Link>
|
||||||
<h1 className="text-xl font-semibold text-secondary-900 dark:text-white">
|
<div className="flex flex-col gap-2">
|
||||||
{host.friendly_name}
|
{/* Title row with friendly name, badge, and status */}
|
||||||
</h1>
|
<div className="flex items-center gap-3">
|
||||||
{host.system_uptime && (
|
<h1 className="text-2xl font-semibold text-secondary-900 dark:text-white">
|
||||||
<div className="flex items-center gap-1 text-sm text-secondary-600 dark:text-secondary-400">
|
{host.friendly_name}
|
||||||
<Clock className="h-4 w-4" />
|
</h1>
|
||||||
<span className="text-xs font-medium">Uptime:</span>
|
{wsStatus && (
|
||||||
<span>{host.system_uptime}</span>
|
<span
|
||||||
|
className={`inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold uppercase ${
|
||||||
|
wsStatus.connected
|
||||||
|
? "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200 animate-pulse"
|
||||||
|
: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200"
|
||||||
|
}`}
|
||||||
|
title={
|
||||||
|
wsStatus.connected
|
||||||
|
? `Agent connected via ${wsStatus.secure ? "WSS (secure)" : "WS"}`
|
||||||
|
: "Agent not connected"
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{wsStatus.connected
|
||||||
|
? wsStatus.secure
|
||||||
|
? "WSS"
|
||||||
|
: "WS"
|
||||||
|
: "Offline"}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
<div
|
||||||
|
className={`flex items-center gap-2 px-2 py-1 rounded-full text-xs font-medium ${getStatusColor(isStale, host.stats.outdated_packages > 0)}`}
|
||||||
|
>
|
||||||
|
{getStatusIcon(isStale, host.stats.outdated_packages > 0)}
|
||||||
|
{getStatusText(isStale, host.stats.outdated_packages > 0)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/* Info row with uptime and last updated */}
|
||||||
|
<div className="flex items-center gap-4 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
{host.system_uptime && (
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<Clock className="h-3.5 w-3.5" />
|
||||||
|
<span className="text-xs font-medium">Uptime:</span>
|
||||||
|
<span className="text-xs">{host.system_uptime}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<Clock className="h-3.5 w-3.5" />
|
||||||
|
<span className="text-xs font-medium">Last updated:</span>
|
||||||
|
<span className="text-xs">
|
||||||
|
{formatRelativeTime(host.last_update)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
|
||||||
<div className="flex items-center gap-1 text-sm text-secondary-600 dark:text-secondary-400">
|
|
||||||
<Clock className="h-4 w-4" />
|
|
||||||
<span className="text-xs font-medium">Last updated:</span>
|
|
||||||
<span>{formatRelativeTime(host.last_update)}</span>
|
|
||||||
</div>
|
|
||||||
<div
|
|
||||||
className={`flex items-center gap-2 px-2 py-1 rounded-full text-xs font-medium ${getStatusColor(isStale, host.stats.outdated_packages > 0)}`}
|
|
||||||
>
|
|
||||||
{getStatusIcon(isStale, host.stats.outdated_packages > 0)}
|
|
||||||
{getStatusText(isStale, host.stats.outdated_packages > 0)}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => refetch()}
|
|
||||||
disabled={isFetching}
|
|
||||||
className="btn-outline flex items-center gap-2 text-sm"
|
|
||||||
title="Refresh host data"
|
|
||||||
>
|
|
||||||
<RefreshCw
|
|
||||||
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
|
||||||
/>
|
|
||||||
{isFetching ? "Refreshing..." : "Refresh"}
|
|
||||||
</button>
|
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => setShowCredentialsModal(true)}
|
onClick={() => setShowCredentialsModal(true)}
|
||||||
@@ -289,13 +407,24 @@ const HostDetail = () => {
|
|||||||
<Key className="h-4 w-4" />
|
<Key className="h-4 w-4" />
|
||||||
Deploy Agent
|
Deploy Agent
|
||||||
</button>
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => refetch()}
|
||||||
|
disabled={isFetching}
|
||||||
|
className="btn-outline flex items-center justify-center p-2 text-sm"
|
||||||
|
title="Refresh host data"
|
||||||
|
>
|
||||||
|
<RefreshCw
|
||||||
|
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
||||||
|
/>
|
||||||
|
</button>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => setShowDeleteModal(true)}
|
onClick={() => setShowDeleteModal(true)}
|
||||||
className="btn-danger flex items-center gap-2 text-sm"
|
className="btn-danger flex items-center justify-center p-2 text-sm"
|
||||||
|
title="Delete host"
|
||||||
>
|
>
|
||||||
<Trash2 className="h-4 w-4" />
|
<Trash2 className="h-4 w-4" />
|
||||||
Delete
|
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -426,7 +555,18 @@ const HostDetail = () => {
|
|||||||
: "text-secondary-500 dark:text-secondary-400 hover:text-secondary-700 dark:hover:text-secondary-300"
|
: "text-secondary-500 dark:text-secondary-400 hover:text-secondary-700 dark:hover:text-secondary-300"
|
||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
Agent History
|
Package Reports
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => handleTabChange("queue")}
|
||||||
|
className={`px-4 py-2 text-sm font-medium ${
|
||||||
|
activeTab === "queue"
|
||||||
|
? "text-primary-600 dark:text-primary-400 border-b-2 border-primary-500"
|
||||||
|
: "text-secondary-500 dark:text-secondary-400 hover:text-secondary-700 dark:hover:text-secondary-300"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
Agent Queue
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -493,20 +633,30 @@ const HostDetail = () => {
|
|||||||
|
|
||||||
<div>
|
<div>
|
||||||
<p className="text-xs text-secondary-500 dark:text-secondary-300 mb-1.5">
|
<p className="text-xs text-secondary-500 dark:text-secondary-300 mb-1.5">
|
||||||
Host Group
|
Host Groups
|
||||||
</p>
|
</p>
|
||||||
{host.host_groups ? (
|
{/* Extract group IDs from the new many-to-many structure */}
|
||||||
<span
|
{(() => {
|
||||||
className="inline-flex items-center px-2 py-0.5 rounded-full text-xs font-medium text-white"
|
const groupIds =
|
||||||
style={{ backgroundColor: host.host_groups.color }}
|
host.host_group_memberships?.map(
|
||||||
>
|
(membership) => membership.host_groups.id,
|
||||||
{host.host_groups.name}
|
) || [];
|
||||||
</span>
|
return (
|
||||||
) : (
|
<InlineMultiGroupEdit
|
||||||
<span className="inline-flex items-center px-2 py-0.5 rounded-full text-xs font-medium bg-secondary-100 dark:bg-secondary-700 text-secondary-800 dark:text-secondary-200">
|
key={`${host.id}-${groupIds.join(",")}`}
|
||||||
Ungrouped
|
value={groupIds}
|
||||||
</span>
|
onSave={(newGroupIds) =>
|
||||||
)}
|
updateHostGroupsMutation.mutate({
|
||||||
|
hostId: host.id,
|
||||||
|
groupIds: newGroupIds,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
options={hostGroups || []}
|
||||||
|
placeholder="Select groups..."
|
||||||
|
className="w-full"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})()}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
@@ -1095,14 +1245,41 @@ const HostDetail = () => {
|
|||||||
Host Notes
|
Host Notes
|
||||||
</h3>
|
</h3>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Success/Error Message */}
|
||||||
|
{notesMessage.text && (
|
||||||
|
<div
|
||||||
|
className={`rounded-md p-4 ${
|
||||||
|
notesMessage.type === "success"
|
||||||
|
? "bg-green-50 dark:bg-green-900 border border-green-200 dark:border-green-700"
|
||||||
|
: "bg-red-50 dark:bg-red-900 border border-red-200 dark:border-red-700"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="flex">
|
||||||
|
{notesMessage.type === "success" ? (
|
||||||
|
<CheckCircle className="h-5 w-5 text-green-400 dark:text-green-300" />
|
||||||
|
) : (
|
||||||
|
<AlertCircle className="h-5 w-5 text-red-400 dark:text-red-300" />
|
||||||
|
)}
|
||||||
|
<div className="ml-3">
|
||||||
|
<p
|
||||||
|
className={`text-sm font-medium ${
|
||||||
|
notesMessage.type === "success"
|
||||||
|
? "text-green-800 dark:text-green-200"
|
||||||
|
: "text-red-800 dark:text-red-200"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{notesMessage.text}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
<div className="bg-secondary-50 dark:bg-secondary-700 rounded-lg p-4">
|
<div className="bg-secondary-50 dark:bg-secondary-700 rounded-lg p-4">
|
||||||
<textarea
|
<textarea
|
||||||
value={host.notes || ""}
|
value={notes}
|
||||||
onChange={(e) => {
|
onChange={(e) => setNotes(e.target.value)}
|
||||||
// Update local state immediately for better UX
|
|
||||||
const updatedHost = { ...host, notes: e.target.value };
|
|
||||||
queryClient.setQueryData(["host", hostId], updatedHost);
|
|
||||||
}}
|
|
||||||
placeholder="Add notes about this host... (e.g., purpose, special configurations, maintenance notes)"
|
placeholder="Add notes about this host... (e.g., purpose, special configurations, maintenance notes)"
|
||||||
className="w-full h-32 p-3 border border-secondary-200 dark:border-secondary-600 rounded-lg bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white placeholder-secondary-500 dark:placeholder-secondary-400 focus:ring-2 focus:ring-primary-500 focus:border-primary-500 resize-none"
|
className="w-full h-32 p-3 border border-secondary-200 dark:border-secondary-600 rounded-lg bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white placeholder-secondary-500 dark:placeholder-secondary-400 focus:ring-2 focus:ring-primary-500 focus:border-primary-500 resize-none"
|
||||||
maxLength={1000}
|
maxLength={1000}
|
||||||
@@ -1114,14 +1291,14 @@ const HostDetail = () => {
|
|||||||
</p>
|
</p>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<span className="text-xs text-secondary-400 dark:text-secondary-500">
|
<span className="text-xs text-secondary-400 dark:text-secondary-500">
|
||||||
{(host.notes || "").length}/1000
|
{notes.length}/1000
|
||||||
</span>
|
</span>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
updateNotesMutation.mutate({
|
updateNotesMutation.mutate({
|
||||||
hostId: host.id,
|
hostId: host.id,
|
||||||
notes: host.notes || "",
|
notes: notes,
|
||||||
});
|
});
|
||||||
}}
|
}}
|
||||||
disabled={updateNotesMutation.isPending}
|
disabled={updateNotesMutation.isPending}
|
||||||
@@ -1136,6 +1313,9 @@ const HostDetail = () => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Agent Queue */}
|
||||||
|
{activeTab === "queue" && <AgentQueueTab hostId={hostId} />}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -1168,8 +1348,10 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
const [showApiKey, setShowApiKey] = useState(false);
|
const [showApiKey, setShowApiKey] = useState(false);
|
||||||
const [activeTab, setActiveTab] = useState("quick-install");
|
const [activeTab, setActiveTab] = useState("quick-install");
|
||||||
const [forceInstall, setForceInstall] = useState(false);
|
const [forceInstall, setForceInstall] = useState(false);
|
||||||
|
const [architecture, setArchitecture] = useState("amd64");
|
||||||
const apiIdInputId = useId();
|
const apiIdInputId = useId();
|
||||||
const apiKeyInputId = useId();
|
const apiKeyInputId = useId();
|
||||||
|
const architectureSelectId = useId();
|
||||||
|
|
||||||
const { data: serverUrlData } = useQuery({
|
const { data: serverUrlData } = useQuery({
|
||||||
queryKey: ["serverUrl"],
|
queryKey: ["serverUrl"],
|
||||||
@@ -1189,10 +1371,13 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
||||||
};
|
};
|
||||||
|
|
||||||
// Helper function to build installation URL with optional force flag
|
// Helper function to build installation URL with optional force flag and architecture
|
||||||
const getInstallUrl = () => {
|
const getInstallUrl = () => {
|
||||||
const baseUrl = `${serverUrl}/api/v1/hosts/install`;
|
const baseUrl = `${serverUrl}/api/v1/hosts/install`;
|
||||||
return forceInstall ? `${baseUrl}?force=true` : baseUrl;
|
const params = new URLSearchParams();
|
||||||
|
if (forceInstall) params.append("force", "true");
|
||||||
|
params.append("arch", architecture);
|
||||||
|
return `${baseUrl}?${params.toString()}`;
|
||||||
};
|
};
|
||||||
|
|
||||||
const copyToClipboard = async (text) => {
|
const copyToClipboard = async (text) => {
|
||||||
@@ -1308,6 +1493,29 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Architecture Selection */}
|
||||||
|
<div className="mb-3">
|
||||||
|
<label
|
||||||
|
htmlFor={architectureSelectId}
|
||||||
|
className="block text-sm font-medium text-primary-800 dark:text-primary-200 mb-2"
|
||||||
|
>
|
||||||
|
Target Architecture
|
||||||
|
</label>
|
||||||
|
<select
|
||||||
|
id={architectureSelectId}
|
||||||
|
value={architecture}
|
||||||
|
onChange={(e) => setArchitecture(e.target.value)}
|
||||||
|
className="px-3 py-2 border border-primary-300 dark:border-primary-600 rounded-md bg-white dark:bg-secondary-800 text-sm text-secondary-900 dark:text-white focus:ring-primary-500 focus:border-primary-500"
|
||||||
|
>
|
||||||
|
<option value="amd64">AMD64 (x86_64) - Default</option>
|
||||||
|
<option value="386">386 (i386) - 32-bit</option>
|
||||||
|
<option value="arm64">ARM64 (aarch64) - ARM</option>
|
||||||
|
</select>
|
||||||
|
<p className="text-xs text-primary-600 dark:text-primary-400 mt-1">
|
||||||
|
Select the architecture of the target host
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
@@ -1364,12 +1572,12 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
|
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
||||||
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
2. Download and Install Agent Script
|
2. Download and Install Agent Binary
|
||||||
</h5>
|
</h5>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value={`curl ${getCurlFlags()} -o /usr/local/bin/patchmon-agent.sh ${serverUrl}/api/v1/hosts/agent/download -H "X-API-ID: ${host.api_id}" -H "X-API-KEY: ${host.api_key}" && sudo chmod +x /usr/local/bin/patchmon-agent.sh`}
|
value={`curl ${getCurlFlags()} -o /usr/local/bin/patchmon-agent ${serverUrl}/api/v1/hosts/agent/download?arch=${architecture} -H "X-API-ID: ${host.api_id}" -H "X-API-KEY: ${host.api_key}" && sudo chmod +x /usr/local/bin/patchmon-agent`}
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1377,7 +1585,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
`curl ${getCurlFlags()} -o /usr/local/bin/patchmon-agent.sh ${serverUrl}/api/v1/hosts/agent/download -H "X-API-ID: ${host.api_id}" -H "X-API-KEY: ${host.api_key}" && sudo chmod +x /usr/local/bin/patchmon-agent.sh`,
|
`curl ${getCurlFlags()} -o /usr/local/bin/patchmon-agent ${serverUrl}/api/v1/hosts/agent/download?arch=${architecture} -H "X-API-ID: ${host.api_id}" -H "X-API-KEY: ${host.api_key}" && sudo chmod +x /usr/local/bin/patchmon-agent`,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1395,7 +1603,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value={`sudo /usr/local/bin/patchmon-agent.sh configure "${host.api_id}" "${host.api_key}" "${serverUrl}"`}
|
value={`sudo /usr/local/bin/patchmon-agent config set-api "${host.api_id}" "${host.api_key}" "${serverUrl}"`}
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1403,7 +1611,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
`sudo /usr/local/bin/patchmon-agent.sh configure "${host.api_id}" "${host.api_key}" "${serverUrl}"`,
|
`sudo /usr/local/bin/patchmon-agent config set-api "${host.api_id}" "${host.api_key}" "${serverUrl}"`,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1421,7 +1629,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value="sudo /usr/local/bin/patchmon-agent.sh test"
|
value="sudo /usr/local/bin/patchmon-agent ping"
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1429,7 +1637,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
"sudo /usr/local/bin/patchmon-agent.sh test",
|
"sudo /usr/local/bin/patchmon-agent ping",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1447,7 +1655,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value="sudo /usr/local/bin/patchmon-agent.sh update"
|
value="sudo /usr/local/bin/patchmon-agent report"
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1455,7 +1663,7 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
"sudo /usr/local/bin/patchmon-agent.sh update",
|
"sudo /usr/local/bin/patchmon-agent report",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1468,12 +1676,33 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
|
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
||||||
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
6. Setup Crontab (Optional)
|
6. Create Systemd Service File
|
||||||
</h5>
|
</h5>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
value={`(sudo crontab -l 2>/dev/null | grep -v "patchmon-agent.sh update"; echo "${new Date().getMinutes()} * * * * /usr/local/bin/patchmon-agent.sh update >/dev/null 2>&1") | sudo crontab -`}
|
value={`sudo tee /etc/systemd/system/patchmon-agent.service > /dev/null << 'EOF'
|
||||||
|
[Unit]
|
||||||
|
Description=PatchMon Agent Service
|
||||||
|
After=network.target
|
||||||
|
Wants=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=root
|
||||||
|
ExecStart=/usr/local/bin/patchmon-agent serve
|
||||||
|
Restart=always
|
||||||
|
RestartSec=10
|
||||||
|
WorkingDirectory=/etc/patchmon
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
SyslogIdentifier=patchmon-agent
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF`}
|
||||||
readOnly
|
readOnly
|
||||||
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
/>
|
/>
|
||||||
@@ -1481,7 +1710,28 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
copyToClipboard(
|
copyToClipboard(
|
||||||
`(sudo crontab -l 2>/dev/null | grep -v "patchmon-agent.sh update"; echo "${new Date().getMinutes()} * * * * /usr/local/bin/patchmon-agent.sh update >/dev/null 2>&1") | sudo crontab -`,
|
`sudo tee /etc/systemd/system/patchmon-agent.service > /dev/null << 'EOF'
|
||||||
|
[Unit]
|
||||||
|
Description=PatchMon Agent Service
|
||||||
|
After=network.target
|
||||||
|
Wants=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=root
|
||||||
|
ExecStart=/usr/local/bin/patchmon-agent serve
|
||||||
|
Restart=always
|
||||||
|
RestartSec=10
|
||||||
|
WorkingDirectory=/etc/patchmon
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
SyslogIdentifier=patchmon-agent
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF`,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
className="btn-secondary flex items-center gap-1"
|
className="btn-secondary flex items-center gap-1"
|
||||||
@@ -1491,6 +1741,64 @@ const CredentialsModal = ({ host, isOpen, onClose }) => {
|
|||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
||||||
|
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
|
7. Enable and Start Service
|
||||||
|
</h5>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value="sudo systemctl daemon-reload && sudo systemctl enable patchmon-agent && sudo systemctl start patchmon-agent"
|
||||||
|
readOnly
|
||||||
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
|
/>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() =>
|
||||||
|
copyToClipboard(
|
||||||
|
"sudo systemctl daemon-reload && sudo systemctl enable patchmon-agent && sudo systemctl start patchmon-agent",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
className="btn-secondary flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Copy className="h-4 w-4" />
|
||||||
|
Copy
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-secondary-600 dark:text-secondary-400 mt-2">
|
||||||
|
This will start the agent service and establish WebSocket
|
||||||
|
connection for real-time communication
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="bg-white dark:bg-secondary-800 rounded-md p-3 border border-secondary-200 dark:border-secondary-600">
|
||||||
|
<h5 className="text-sm font-medium text-secondary-900 dark:text-white mb-2">
|
||||||
|
8. Verify Service Status
|
||||||
|
</h5>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value="sudo systemctl status patchmon-agent"
|
||||||
|
readOnly
|
||||||
|
className="flex-1 px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-sm font-mono text-secondary-900 dark:text-white"
|
||||||
|
/>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() =>
|
||||||
|
copyToClipboard("sudo systemctl status patchmon-agent")
|
||||||
|
}
|
||||||
|
className="btn-secondary flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Copy className="h-4 w-4" />
|
||||||
|
Copy
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-secondary-600 dark:text-secondary-400 mt-2">
|
||||||
|
Check that the service is running and WebSocket connection
|
||||||
|
is established
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -1659,4 +1967,249 @@ const DeleteConfirmationModal = ({
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Agent Queue Tab Component
|
||||||
|
const AgentQueueTab = ({ hostId }) => {
|
||||||
|
const {
|
||||||
|
data: queueData,
|
||||||
|
isLoading,
|
||||||
|
error,
|
||||||
|
refetch,
|
||||||
|
} = useQuery({
|
||||||
|
queryKey: ["host-queue", hostId],
|
||||||
|
queryFn: () => dashboardAPI.getHostQueue(hostId).then((res) => res.data),
|
||||||
|
staleTime: 30 * 1000, // 30 seconds
|
||||||
|
refetchInterval: 30 * 1000, // Auto-refresh every 30 seconds
|
||||||
|
});
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center h-32">
|
||||||
|
<RefreshCw className="h-6 w-6 animate-spin text-primary-600" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return (
|
||||||
|
<div className="text-center py-8">
|
||||||
|
<AlertCircle className="h-12 w-12 text-red-500 mx-auto mb-4" />
|
||||||
|
<p className="text-red-600 dark:text-red-400">
|
||||||
|
Failed to load queue data
|
||||||
|
</p>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => refetch()}
|
||||||
|
className="mt-2 px-4 py-2 text-sm bg-primary-600 text-white rounded-md hover:bg-primary-700"
|
||||||
|
>
|
||||||
|
Retry
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { waiting, active, delayed, failed, jobHistory } = queueData.data;
|
||||||
|
|
||||||
|
const getStatusIcon = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return <CheckCircle2 className="h-4 w-4 text-green-500" />;
|
||||||
|
case "failed":
|
||||||
|
return <AlertCircle className="h-4 w-4 text-red-500" />;
|
||||||
|
case "active":
|
||||||
|
return <Clock3 className="h-4 w-4 text-blue-500" />;
|
||||||
|
default:
|
||||||
|
return <Clock className="h-4 w-4 text-gray-500" />;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStatusColor = (status) => {
|
||||||
|
switch (status) {
|
||||||
|
case "completed":
|
||||||
|
return "text-green-600 dark:text-green-400";
|
||||||
|
case "failed":
|
||||||
|
return "text-red-600 dark:text-red-400";
|
||||||
|
case "active":
|
||||||
|
return "text-blue-600 dark:text-blue-400";
|
||||||
|
default:
|
||||||
|
return "text-gray-600 dark:text-gray-400";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const formatJobType = (type) => {
|
||||||
|
switch (type) {
|
||||||
|
case "settings_update":
|
||||||
|
return "Settings Update";
|
||||||
|
case "report_now":
|
||||||
|
return "Report Now";
|
||||||
|
case "update_agent":
|
||||||
|
return "Agent Update";
|
||||||
|
default:
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
|
||||||
|
Live Agent Queue Status
|
||||||
|
</h3>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => refetch()}
|
||||||
|
className="btn-outline flex items-center gap-2"
|
||||||
|
title="Refresh queue data"
|
||||||
|
>
|
||||||
|
<RefreshCw className="h-4 w-4" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Queue Summary */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Server className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Waiting
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{waiting}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Clock3 className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Active
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{active}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Clock className="h-5 w-5 text-primary-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Delayed
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{delayed}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<AlertCircle className="h-5 w-5 text-danger-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Failed
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{failed}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Job History */}
|
||||||
|
<div>
|
||||||
|
{jobHistory.length === 0 ? (
|
||||||
|
<div className="text-center py-8">
|
||||||
|
<Server className="h-12 w-12 text-gray-400 mx-auto mb-4" />
|
||||||
|
<p className="text-gray-500 dark:text-gray-400">
|
||||||
|
No job history found
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-700">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Job ID
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Job Name
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Status
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Attempt
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Date/Time
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
|
||||||
|
Error/Output
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-600">
|
||||||
|
{jobHistory.map((job) => (
|
||||||
|
<tr
|
||||||
|
key={job.id}
|
||||||
|
className="hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||||
|
>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-xs font-mono text-secondary-900 dark:text-white">
|
||||||
|
{job.job_id}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-xs text-secondary-900 dark:text-white">
|
||||||
|
{formatJobType(job.job_name)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{getStatusIcon(job.status)}
|
||||||
|
<span
|
||||||
|
className={`text-xs font-medium ${getStatusColor(job.status)}`}
|
||||||
|
>
|
||||||
|
{job.status.charAt(0).toUpperCase() +
|
||||||
|
job.status.slice(1)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-xs text-secondary-900 dark:text-white">
|
||||||
|
{job.attempt_number}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 whitespace-nowrap text-xs text-secondary-900 dark:text-white">
|
||||||
|
{new Date(job.created_at).toLocaleString()}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-2 text-xs">
|
||||||
|
{job.error_message ? (
|
||||||
|
<span className="text-red-600 dark:text-red-400">
|
||||||
|
{job.error_message}
|
||||||
|
</span>
|
||||||
|
) : job.output ? (
|
||||||
|
<span className="text-green-600 dark:text-green-400">
|
||||||
|
{JSON.stringify(job.output)}
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<span className="text-secondary-500 dark:text-secondary-400">
|
||||||
|
-
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
export default HostDetail;
|
export default HostDetail;
|
||||||
|
|||||||
@@ -21,12 +21,13 @@ import {
|
|||||||
Square,
|
Square,
|
||||||
Trash2,
|
Trash2,
|
||||||
Users,
|
Users,
|
||||||
|
Wifi,
|
||||||
X,
|
X,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { useEffect, useId, useMemo, useState } from "react";
|
import { useEffect, useId, useMemo, useState } from "react";
|
||||||
import { Link, useNavigate, useSearchParams } from "react-router-dom";
|
import { Link, useNavigate, useSearchParams } from "react-router-dom";
|
||||||
import InlineEdit from "../components/InlineEdit";
|
import InlineEdit from "../components/InlineEdit";
|
||||||
import InlineGroupEdit from "../components/InlineGroupEdit";
|
import InlineMultiGroupEdit from "../components/InlineMultiGroupEdit";
|
||||||
import InlineToggle from "../components/InlineToggle";
|
import InlineToggle from "../components/InlineToggle";
|
||||||
import {
|
import {
|
||||||
adminHostsAPI,
|
adminHostsAPI,
|
||||||
@@ -34,14 +35,14 @@ import {
|
|||||||
formatRelativeTime,
|
formatRelativeTime,
|
||||||
hostGroupsAPI,
|
hostGroupsAPI,
|
||||||
} from "../utils/api";
|
} from "../utils/api";
|
||||||
import { OSIcon } from "../utils/osIcons.jsx";
|
import { getOSDisplayName, OSIcon } from "../utils/osIcons.jsx";
|
||||||
|
|
||||||
// Add Host Modal Component
|
// Add Host Modal Component
|
||||||
const AddHostModal = ({ isOpen, onClose, onSuccess }) => {
|
const AddHostModal = ({ isOpen, onClose, onSuccess }) => {
|
||||||
const friendlyNameId = useId();
|
const friendlyNameId = useId();
|
||||||
const [formData, setFormData] = useState({
|
const [formData, setFormData] = useState({
|
||||||
friendly_name: "",
|
friendly_name: "",
|
||||||
hostGroupId: "",
|
hostGroupIds: [], // Changed to array for multiple selection
|
||||||
});
|
});
|
||||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||||
const [error, setError] = useState("");
|
const [error, setError] = useState("");
|
||||||
@@ -64,7 +65,7 @@ const AddHostModal = ({ isOpen, onClose, onSuccess }) => {
|
|||||||
const response = await adminHostsAPI.create(formData);
|
const response = await adminHostsAPI.create(formData);
|
||||||
console.log("Host created successfully:", formData.friendly_name);
|
console.log("Host created successfully:", formData.friendly_name);
|
||||||
onSuccess(response.data);
|
onSuccess(response.data);
|
||||||
setFormData({ friendly_name: "", hostGroupId: "" });
|
setFormData({ friendly_name: "", hostGroupIds: [] });
|
||||||
onClose();
|
onClose();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Full error object:", err);
|
console.error("Full error object:", err);
|
||||||
@@ -134,68 +135,56 @@ const AddHostModal = ({ isOpen, onClose, onSuccess }) => {
|
|||||||
|
|
||||||
<div>
|
<div>
|
||||||
<span className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-3">
|
<span className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-3">
|
||||||
Host Group
|
Host Groups
|
||||||
</span>
|
</span>
|
||||||
<div className="grid grid-cols-3 gap-2">
|
<div className="space-y-2 max-h-48 overflow-y-auto">
|
||||||
{/* No Group Option */}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => setFormData({ ...formData, hostGroupId: "" })}
|
|
||||||
className={`flex flex-col items-center justify-center px-2 py-3 text-center border-2 rounded-lg transition-all duration-200 relative min-h-[80px] ${
|
|
||||||
formData.hostGroupId === ""
|
|
||||||
? "border-primary-500 bg-primary-50 dark:bg-primary-900/30 text-primary-700 dark:text-primary-300"
|
|
||||||
: "border-secondary-300 dark:border-secondary-600 bg-white dark:bg-secondary-700 text-secondary-700 dark:text-secondary-200 hover:border-secondary-400 dark:hover:border-secondary-500"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
<div className="text-xs font-medium">No Group</div>
|
|
||||||
<div className="text-xs text-secondary-500 dark:text-secondary-400 mt-1">
|
|
||||||
Ungrouped
|
|
||||||
</div>
|
|
||||||
{formData.hostGroupId === "" && (
|
|
||||||
<div className="absolute top-2 right-2 w-3 h-3 rounded-full bg-primary-500 flex items-center justify-center">
|
|
||||||
<div className="w-1.5 h-1.5 rounded-full bg-white"></div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
|
|
||||||
{/* Host Group Options */}
|
{/* Host Group Options */}
|
||||||
{hostGroups?.map((group) => (
|
{hostGroups?.map((group) => (
|
||||||
<button
|
<label
|
||||||
key={group.id}
|
key={group.id}
|
||||||
type="button"
|
className={`flex items-center gap-3 p-3 border-2 rounded-lg transition-all duration-200 cursor-pointer ${
|
||||||
onClick={() =>
|
formData.hostGroupIds.includes(group.id)
|
||||||
setFormData({ ...formData, hostGroupId: group.id })
|
? "border-primary-500 bg-primary-50 dark:bg-primary-900/30"
|
||||||
}
|
: "border-secondary-300 dark:border-secondary-600 bg-white dark:bg-secondary-700 hover:border-secondary-400 dark:hover:border-secondary-500"
|
||||||
className={`flex flex-col items-center justify-center px-2 py-3 text-center border-2 rounded-lg transition-all duration-200 relative min-h-[80px] ${
|
|
||||||
formData.hostGroupId === group.id
|
|
||||||
? "border-primary-500 bg-primary-50 dark:bg-primary-900/30 text-primary-700 dark:text-primary-300"
|
|
||||||
: "border-secondary-300 dark:border-secondary-600 bg-white dark:bg-secondary-700 text-secondary-700 dark:text-secondary-200 hover:border-secondary-400 dark:hover:border-secondary-500"
|
|
||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
<div className="flex items-center gap-1 mb-1 w-full justify-center">
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={formData.hostGroupIds.includes(group.id)}
|
||||||
|
onChange={(e) => {
|
||||||
|
if (e.target.checked) {
|
||||||
|
setFormData({
|
||||||
|
...formData,
|
||||||
|
hostGroupIds: [...formData.hostGroupIds, group.id],
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
setFormData({
|
||||||
|
...formData,
|
||||||
|
hostGroupIds: formData.hostGroupIds.filter(
|
||||||
|
(id) => id !== group.id,
|
||||||
|
),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className="w-4 h-4 text-primary-600 bg-gray-100 border-gray-300 rounded focus:ring-primary-500 dark:focus:ring-primary-600 dark:ring-offset-gray-800 focus:ring-2 dark:bg-gray-700 dark:border-gray-600"
|
||||||
|
/>
|
||||||
|
<div className="flex items-center gap-2 flex-1">
|
||||||
{group.color && (
|
{group.color && (
|
||||||
<div
|
<div
|
||||||
className="w-3 h-3 rounded-full border border-secondary-300 dark:border-secondary-500 flex-shrink-0"
|
className="w-3 h-3 rounded-full border border-secondary-300 dark:border-secondary-500 flex-shrink-0"
|
||||||
style={{ backgroundColor: group.color }}
|
style={{ backgroundColor: group.color }}
|
||||||
></div>
|
></div>
|
||||||
)}
|
)}
|
||||||
<div className="text-xs font-medium truncate max-w-full">
|
<div className="text-sm font-medium text-secondary-700 dark:text-secondary-200">
|
||||||
{group.name}
|
{group.name}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="text-xs text-secondary-500 dark:text-secondary-400">
|
</label>
|
||||||
Group
|
|
||||||
</div>
|
|
||||||
{formData.hostGroupId === group.id && (
|
|
||||||
<div className="absolute top-2 right-2 w-3 h-3 rounded-full bg-primary-500 flex items-center justify-center">
|
|
||||||
<div className="w-1.5 h-1.5 rounded-full bg-white"></div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</button>
|
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
<p className="mt-2 text-sm text-secondary-500 dark:text-secondary-400">
|
<p className="mt-2 text-sm text-secondary-500 dark:text-secondary-400">
|
||||||
Optional: Assign this host to a group for better organization.
|
Optional: Select one or more groups to assign this host to for
|
||||||
|
better organization.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -328,22 +317,24 @@ const Hosts = () => {
|
|||||||
const defaultConfig = [
|
const defaultConfig = [
|
||||||
{ id: "select", label: "Select", visible: true, order: 0 },
|
{ id: "select", label: "Select", visible: true, order: 0 },
|
||||||
{ id: "host", label: "Friendly Name", visible: true, order: 1 },
|
{ id: "host", label: "Friendly Name", visible: true, order: 1 },
|
||||||
{ id: "ip", label: "IP Address", visible: false, order: 2 },
|
{ id: "hostname", label: "System Hostname", visible: true, order: 2 },
|
||||||
{ id: "group", label: "Group", visible: true, order: 3 },
|
{ id: "ip", label: "IP Address", visible: false, order: 3 },
|
||||||
{ id: "os", label: "OS", visible: true, order: 4 },
|
{ id: "group", label: "Group", visible: true, order: 4 },
|
||||||
{ id: "os_version", label: "OS Version", visible: false, order: 5 },
|
{ id: "os", label: "OS", visible: true, order: 5 },
|
||||||
{ id: "agent_version", label: "Agent Version", visible: true, order: 6 },
|
{ id: "os_version", label: "OS Version", visible: false, order: 6 },
|
||||||
|
{ id: "agent_version", label: "Agent Version", visible: true, order: 7 },
|
||||||
{
|
{
|
||||||
id: "auto_update",
|
id: "auto_update",
|
||||||
label: "Agent Auto-Update",
|
label: "Agent Auto-Update",
|
||||||
visible: true,
|
visible: true,
|
||||||
order: 7,
|
order: 8,
|
||||||
},
|
},
|
||||||
{ id: "status", label: "Status", visible: true, order: 8 },
|
{ id: "ws_status", label: "Connection", visible: true, order: 9 },
|
||||||
{ id: "updates", label: "Updates", visible: true, order: 9 },
|
{ id: "status", label: "Status", visible: true, order: 10 },
|
||||||
{ id: "notes", label: "Notes", visible: false, order: 10 },
|
{ id: "updates", label: "Updates", visible: true, order: 11 },
|
||||||
{ id: "last_update", label: "Last Update", visible: true, order: 11 },
|
{ id: "notes", label: "Notes", visible: false, order: 12 },
|
||||||
{ id: "actions", label: "Actions", visible: true, order: 12 },
|
{ id: "last_update", label: "Last Update", visible: true, order: 13 },
|
||||||
|
{ id: "actions", label: "Actions", visible: true, order: 14 },
|
||||||
];
|
];
|
||||||
|
|
||||||
const saved = localStorage.getItem("hosts-column-config");
|
const saved = localStorage.getItem("hosts-column-config");
|
||||||
@@ -365,8 +356,11 @@ const Hosts = () => {
|
|||||||
localStorage.removeItem("hosts-column-config");
|
localStorage.removeItem("hosts-column-config");
|
||||||
return defaultConfig;
|
return defaultConfig;
|
||||||
} else {
|
} else {
|
||||||
// Use the existing configuration
|
// Ensure ws_status column is visible in saved config
|
||||||
return savedConfig;
|
const updatedConfig = savedConfig.map((col) =>
|
||||||
|
col.id === "ws_status" ? { ...col, visible: true } : col,
|
||||||
|
);
|
||||||
|
return updatedConfig;
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// If there's an error parsing the config, clear it and use default
|
// If there's an error parsing the config, clear it and use default
|
||||||
@@ -398,9 +392,121 @@ const Hosts = () => {
|
|||||||
queryFn: () => hostGroupsAPI.list().then((res) => res.data),
|
queryFn: () => hostGroupsAPI.list().then((res) => res.data),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Track WebSocket status for all hosts
|
||||||
|
const [wsStatusMap, setWsStatusMap] = useState({});
|
||||||
|
|
||||||
|
// Fetch initial WebSocket status for all hosts
|
||||||
|
useEffect(() => {
|
||||||
|
if (!hosts || hosts.length === 0) return;
|
||||||
|
|
||||||
|
const token = localStorage.getItem("token");
|
||||||
|
if (!token) return;
|
||||||
|
|
||||||
|
// Fetch initial WebSocket status for all hosts
|
||||||
|
const fetchInitialStatus = async () => {
|
||||||
|
const statusPromises = hosts
|
||||||
|
.filter((host) => host.api_id)
|
||||||
|
.map(async (host) => {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/v1/ws/status/${host.api_id}`, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (response.ok) {
|
||||||
|
const data = await response.json();
|
||||||
|
return { apiId: host.api_id, status: data.data };
|
||||||
|
}
|
||||||
|
} catch (_error) {
|
||||||
|
// Silently handle errors
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
apiId: host.api_id,
|
||||||
|
status: { connected: false, secure: false },
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const results = await Promise.all(statusPromises);
|
||||||
|
const initialStatusMap = {};
|
||||||
|
results.forEach(({ apiId, status }) => {
|
||||||
|
initialStatusMap[apiId] = status;
|
||||||
|
});
|
||||||
|
|
||||||
|
setWsStatusMap(initialStatusMap);
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchInitialStatus();
|
||||||
|
}, [hosts]);
|
||||||
|
|
||||||
|
// Subscribe to WebSocket status changes for all hosts via SSE
|
||||||
|
useEffect(() => {
|
||||||
|
if (!hosts || hosts.length === 0) return;
|
||||||
|
|
||||||
|
const token = localStorage.getItem("token");
|
||||||
|
if (!token) return;
|
||||||
|
|
||||||
|
const eventSources = new Map();
|
||||||
|
let isMounted = true;
|
||||||
|
|
||||||
|
const connectHost = (apiId) => {
|
||||||
|
if (!isMounted || eventSources.has(apiId)) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const es = new EventSource(
|
||||||
|
`/api/v1/ws/status/${apiId}/stream?token=${encodeURIComponent(token)}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
es.onmessage = (event) => {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(event.data);
|
||||||
|
if (isMounted) {
|
||||||
|
setWsStatusMap((prev) => {
|
||||||
|
const newMap = { ...prev, [apiId]: data };
|
||||||
|
return newMap;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (_err) {
|
||||||
|
// Silently handle parse errors
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
es.onerror = (_error) => {
|
||||||
|
console.log(`[SSE] Connection error for ${apiId}, retrying...`);
|
||||||
|
es?.close();
|
||||||
|
eventSources.delete(apiId);
|
||||||
|
if (isMounted) {
|
||||||
|
// Retry connection after 5 seconds with exponential backoff
|
||||||
|
setTimeout(() => connectHost(apiId), 5000);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
eventSources.set(apiId, es);
|
||||||
|
} catch (_err) {
|
||||||
|
// Silently handle connection errors
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Connect to all hosts
|
||||||
|
for (const host of hosts) {
|
||||||
|
if (host.api_id) {
|
||||||
|
connectHost(host.api_id);
|
||||||
|
} else {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cleanup function
|
||||||
|
return () => {
|
||||||
|
isMounted = false;
|
||||||
|
for (const es of eventSources.values()) {
|
||||||
|
es.close();
|
||||||
|
}
|
||||||
|
eventSources.clear();
|
||||||
|
};
|
||||||
|
}, [hosts]);
|
||||||
|
|
||||||
const bulkUpdateGroupMutation = useMutation({
|
const bulkUpdateGroupMutation = useMutation({
|
||||||
mutationFn: ({ hostIds, hostGroupId }) =>
|
mutationFn: ({ hostIds, groupIds }) =>
|
||||||
adminHostsAPI.bulkUpdateGroup(hostIds, hostGroupId),
|
adminHostsAPI.bulkUpdateGroups(hostIds, groupIds),
|
||||||
onSuccess: (data) => {
|
onSuccess: (data) => {
|
||||||
console.log("bulkUpdateGroupMutation success:", data);
|
console.log("bulkUpdateGroupMutation success:", data);
|
||||||
|
|
||||||
@@ -411,11 +517,7 @@ const Hosts = () => {
|
|||||||
return oldData.map((host) => {
|
return oldData.map((host) => {
|
||||||
const updatedHost = data.hosts.find((h) => h.id === host.id);
|
const updatedHost = data.hosts.find((h) => h.id === host.id);
|
||||||
if (updatedHost) {
|
if (updatedHost) {
|
||||||
// Ensure hostGroupId is set correctly
|
return updatedHost;
|
||||||
return {
|
|
||||||
...updatedHost,
|
|
||||||
hostGroupId: updatedHost.host_groups?.id || null,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
return host;
|
return host;
|
||||||
});
|
});
|
||||||
@@ -439,7 +541,7 @@ const Hosts = () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const updateHostGroupMutation = useMutation({
|
const _updateHostGroupMutation = useMutation({
|
||||||
mutationFn: ({ hostId, hostGroupId }) => {
|
mutationFn: ({ hostId, hostGroupId }) => {
|
||||||
console.log("updateHostGroupMutation called with:", {
|
console.log("updateHostGroupMutation called with:", {
|
||||||
hostId,
|
hostId,
|
||||||
@@ -485,6 +587,46 @@ const Hosts = () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const updateHostGroupsMutation = useMutation({
|
||||||
|
mutationFn: ({ hostId, groupIds }) => {
|
||||||
|
console.log("updateHostGroupsMutation called with:", {
|
||||||
|
hostId,
|
||||||
|
groupIds,
|
||||||
|
});
|
||||||
|
return adminHostsAPI.updateGroups(hostId, groupIds).then((res) => {
|
||||||
|
console.log("updateGroups API response:", res);
|
||||||
|
return res.data;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onSuccess: (data) => {
|
||||||
|
// Update the cache with the new host data
|
||||||
|
queryClient.setQueryData(["hosts"], (oldData) => {
|
||||||
|
console.log("Old cache data before update:", oldData);
|
||||||
|
if (!oldData) return oldData;
|
||||||
|
const updatedData = oldData.map((host) => {
|
||||||
|
if (host.id === data.host.id) {
|
||||||
|
console.log(
|
||||||
|
"Updating host in cache:",
|
||||||
|
host.id,
|
||||||
|
"with new data:",
|
||||||
|
data.host,
|
||||||
|
);
|
||||||
|
return data.host;
|
||||||
|
}
|
||||||
|
return host;
|
||||||
|
});
|
||||||
|
console.log("New cache data after update:", updatedData);
|
||||||
|
return updatedData;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Also invalidate to ensure consistency
|
||||||
|
queryClient.invalidateQueries(["hosts"]);
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
console.error("updateHostGroupsMutation error:", error);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const toggleAutoUpdateMutation = useMutation({
|
const toggleAutoUpdateMutation = useMutation({
|
||||||
mutationFn: ({ hostId, autoUpdate }) =>
|
mutationFn: ({ hostId, autoUpdate }) =>
|
||||||
adminHostsAPI
|
adminHostsAPI
|
||||||
@@ -525,8 +667,8 @@ const Hosts = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleBulkAssign = (hostGroupId) => {
|
const handleBulkAssign = (groupIds) => {
|
||||||
bulkUpdateGroupMutation.mutate({ hostIds: selectedHosts, hostGroupId });
|
bulkUpdateGroupMutation.mutate({ hostIds: selectedHosts, groupIds });
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleBulkDelete = () => {
|
const handleBulkDelete = () => {
|
||||||
@@ -562,7 +704,7 @@ const Hosts = () => {
|
|||||||
osFilter === "all" ||
|
osFilter === "all" ||
|
||||||
host.os_type?.toLowerCase() === osFilter.toLowerCase();
|
host.os_type?.toLowerCase() === osFilter.toLowerCase();
|
||||||
|
|
||||||
// URL filter for hosts needing updates, inactive hosts, up-to-date hosts, or stale hosts
|
// URL filter for hosts needing updates, inactive hosts, up-to-date hosts, stale hosts, or offline hosts
|
||||||
const filter = searchParams.get("filter");
|
const filter = searchParams.get("filter");
|
||||||
const matchesUrlFilter =
|
const matchesUrlFilter =
|
||||||
(filter !== "needsUpdates" ||
|
(filter !== "needsUpdates" ||
|
||||||
@@ -570,7 +712,8 @@ const Hosts = () => {
|
|||||||
(filter !== "inactive" ||
|
(filter !== "inactive" ||
|
||||||
(host.effectiveStatus || host.status) === "inactive") &&
|
(host.effectiveStatus || host.status) === "inactive") &&
|
||||||
(filter !== "upToDate" || (!host.isStale && host.updatesCount === 0)) &&
|
(filter !== "upToDate" || (!host.isStale && host.updatesCount === 0)) &&
|
||||||
(filter !== "stale" || host.isStale);
|
(filter !== "stale" || host.isStale) &&
|
||||||
|
(filter !== "offline" || wsStatusMap[host.api_id]?.connected !== true);
|
||||||
|
|
||||||
// Hide stale filter
|
// Hide stale filter
|
||||||
const matchesHideStale = !hideStale || !host.isStale;
|
const matchesHideStale = !hideStale || !host.isStale;
|
||||||
@@ -655,6 +798,7 @@ const Hosts = () => {
|
|||||||
sortDirection,
|
sortDirection,
|
||||||
searchParams,
|
searchParams,
|
||||||
hideStale,
|
hideStale,
|
||||||
|
wsStatusMap,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// Get unique OS types from hosts for dynamic dropdown
|
// Get unique OS types from hosts for dynamic dropdown
|
||||||
@@ -756,10 +900,19 @@ const Hosts = () => {
|
|||||||
{ id: "group", label: "Group", visible: true, order: 4 },
|
{ id: "group", label: "Group", visible: true, order: 4 },
|
||||||
{ id: "os", label: "OS", visible: true, order: 5 },
|
{ id: "os", label: "OS", visible: true, order: 5 },
|
||||||
{ id: "os_version", label: "OS Version", visible: false, order: 6 },
|
{ id: "os_version", label: "OS Version", visible: false, order: 6 },
|
||||||
{ id: "status", label: "Status", visible: true, order: 7 },
|
{ id: "agent_version", label: "Agent Version", visible: true, order: 7 },
|
||||||
{ id: "updates", label: "Updates", visible: true, order: 8 },
|
{
|
||||||
{ id: "last_update", label: "Last Update", visible: true, order: 9 },
|
id: "auto_update",
|
||||||
{ id: "actions", label: "Actions", visible: true, order: 10 },
|
label: "Agent Auto-Update",
|
||||||
|
visible: true,
|
||||||
|
order: 8,
|
||||||
|
},
|
||||||
|
{ id: "ws_status", label: "Connection", visible: true, order: 9 },
|
||||||
|
{ id: "status", label: "Status", visible: true, order: 10 },
|
||||||
|
{ id: "updates", label: "Updates", visible: true, order: 11 },
|
||||||
|
{ id: "notes", label: "Notes", visible: false, order: 12 },
|
||||||
|
{ id: "last_update", label: "Last Update", visible: true, order: 13 },
|
||||||
|
{ id: "actions", label: "Actions", visible: true, order: 14 },
|
||||||
];
|
];
|
||||||
updateColumnConfig(defaultConfig);
|
updateColumnConfig(defaultConfig);
|
||||||
};
|
};
|
||||||
@@ -822,27 +975,33 @@ const Hosts = () => {
|
|||||||
{host.ip || "N/A"}
|
{host.ip || "N/A"}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
case "group":
|
case "group": {
|
||||||
|
// Extract group IDs from the new many-to-many structure
|
||||||
|
const groupIds =
|
||||||
|
host.host_group_memberships?.map(
|
||||||
|
(membership) => membership.host_groups.id,
|
||||||
|
) || [];
|
||||||
return (
|
return (
|
||||||
<InlineGroupEdit
|
<InlineMultiGroupEdit
|
||||||
key={`${host.id}-${host.host_groups?.id || "ungrouped"}-${host.host_groups?.name || "ungrouped"}`}
|
key={`${host.id}-${groupIds.join(",")}`}
|
||||||
value={host.host_groups?.id}
|
value={groupIds}
|
||||||
onSave={(newGroupId) =>
|
onSave={(newGroupIds) =>
|
||||||
updateHostGroupMutation.mutate({
|
updateHostGroupsMutation.mutate({
|
||||||
hostId: host.id,
|
hostId: host.id,
|
||||||
hostGroupId: newGroupId,
|
groupIds: newGroupIds,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
options={hostGroups || []}
|
options={hostGroups || []}
|
||||||
placeholder="Select group..."
|
placeholder="Select groups..."
|
||||||
className="w-full"
|
className="w-full"
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
}
|
||||||
case "os":
|
case "os":
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center gap-2 text-sm text-secondary-900 dark:text-white">
|
<div className="flex items-center gap-2 text-sm text-secondary-900 dark:text-white">
|
||||||
<OSIcon osType={host.os_type} className="h-4 w-4" />
|
<OSIcon osType={host.os_type} className="h-4 w-4" />
|
||||||
<span>{host.os_type}</span>
|
<span>{getOSDisplayName(host.os_type)}</span>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
case "os_version":
|
case "os_version":
|
||||||
@@ -871,6 +1030,38 @@ const Hosts = () => {
|
|||||||
falseLabel="No"
|
falseLabel="No"
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
case "ws_status": {
|
||||||
|
const wsStatus = wsStatusMap[host.api_id];
|
||||||
|
if (!wsStatus) {
|
||||||
|
return (
|
||||||
|
<span className="inline-flex items-center px-2 py-1 rounded-full text-xs font-medium bg-gray-100 text-gray-600 dark:bg-gray-700 dark:text-gray-400">
|
||||||
|
<div className="w-2 h-2 bg-gray-400 rounded-full mr-1.5"></div>
|
||||||
|
Unknown
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
<span
|
||||||
|
className={`inline-flex items-center px-2 py-1 rounded-full text-xs font-medium ${
|
||||||
|
wsStatus.connected
|
||||||
|
? "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200"
|
||||||
|
: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200"
|
||||||
|
}`}
|
||||||
|
title={
|
||||||
|
wsStatus.connected
|
||||||
|
? `Agent connected via ${wsStatus.secure ? "WSS (secure)" : "WS (insecure)"}`
|
||||||
|
: "Agent not connected"
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className={`w-2 h-2 rounded-full mr-1.5 ${
|
||||||
|
wsStatus.connected ? "bg-green-500 animate-pulse" : "bg-red-500"
|
||||||
|
}`}
|
||||||
|
></div>
|
||||||
|
{wsStatus.connected ? (wsStatus.secure ? "WSS" : "WS") : "Offline"}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
}
|
||||||
case "status":
|
case "status":
|
||||||
return (
|
return (
|
||||||
<div className="text-sm text-secondary-900 dark:text-white">
|
<div className="text-sm text-secondary-900 dark:text-white">
|
||||||
@@ -966,13 +1157,13 @@ const Hosts = () => {
|
|||||||
navigate(`/hosts?${newSearchParams.toString()}`, { replace: true });
|
navigate(`/hosts?${newSearchParams.toString()}`, { replace: true });
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleStaleClick = () => {
|
const handleConnectionStatusClick = () => {
|
||||||
// Filter to show stale/inactive hosts
|
// Filter to show offline hosts (not connected via WebSocket)
|
||||||
setStatusFilter("inactive");
|
setStatusFilter("all");
|
||||||
setShowFilters(true);
|
setShowFilters(true);
|
||||||
// We'll use the existing inactive URL filter logic
|
// Use a new URL filter for connection status
|
||||||
const newSearchParams = new URLSearchParams(window.location.search);
|
const newSearchParams = new URLSearchParams(window.location.search);
|
||||||
newSearchParams.set("filter", "inactive");
|
newSearchParams.set("filter", "offline");
|
||||||
navigate(`/hosts?${newSearchParams.toString()}`, { replace: true });
|
navigate(`/hosts?${newSearchParams.toString()}`, { replace: true });
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1026,13 +1217,12 @@ const Hosts = () => {
|
|||||||
type="button"
|
type="button"
|
||||||
onClick={() => refetch()}
|
onClick={() => refetch()}
|
||||||
disabled={isFetching}
|
disabled={isFetching}
|
||||||
className="btn-outline flex items-center gap-2"
|
className="btn-outline flex items-center justify-center p-2"
|
||||||
title="Refresh hosts data"
|
title="Refresh hosts data"
|
||||||
>
|
>
|
||||||
<RefreshCw
|
<RefreshCw
|
||||||
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
className={`h-4 w-4 ${isFetching ? "animate-spin" : ""}`}
|
||||||
/>
|
/>
|
||||||
{isFetching ? "Refreshing..." : "Refresh"}
|
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -1102,17 +1292,46 @@ const Hosts = () => {
|
|||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 text-left w-full"
|
className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 text-left w-full"
|
||||||
onClick={handleStaleClick}
|
onClick={handleConnectionStatusClick}
|
||||||
>
|
>
|
||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
<AlertTriangle className="h-5 w-5 text-danger-600 mr-2" />
|
<Wifi className="h-5 w-5 text-primary-600 mr-2" />
|
||||||
<div>
|
<div className="flex-1">
|
||||||
<p className="text-sm text-secondary-500 dark:text-white">
|
<p className="text-sm text-secondary-500 dark:text-white mb-1">
|
||||||
Stale
|
Connection Status
|
||||||
</p>
|
|
||||||
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
|
||||||
{hosts?.filter((h) => h.isStale).length || 0}
|
|
||||||
</p>
|
</p>
|
||||||
|
{(() => {
|
||||||
|
const connectedCount =
|
||||||
|
hosts?.filter(
|
||||||
|
(h) => wsStatusMap[h.api_id]?.connected === true,
|
||||||
|
).length || 0;
|
||||||
|
const offlineCount =
|
||||||
|
hosts?.filter(
|
||||||
|
(h) => wsStatusMap[h.api_id]?.connected !== true,
|
||||||
|
).length || 0;
|
||||||
|
return (
|
||||||
|
<div className="flex gap-4">
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<div className="w-2 h-2 bg-green-500 rounded-full"></div>
|
||||||
|
<span className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{connectedCount}
|
||||||
|
</span>
|
||||||
|
<span className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
|
Connected
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<div className="w-2 h-2 bg-red-500 rounded-full"></div>
|
||||||
|
<span className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{offlineCount}
|
||||||
|
</span>
|
||||||
|
<span className="text-xs text-secondary-500 dark:text-secondary-400">
|
||||||
|
Offline
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})()}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</button>
|
</button>
|
||||||
@@ -1437,6 +1656,11 @@ const Hosts = () => {
|
|||||||
<div className="flex items-center gap-2 font-normal text-xs text-secondary-500 dark:text-secondary-300 normal-case tracking-wider">
|
<div className="flex items-center gap-2 font-normal text-xs text-secondary-500 dark:text-secondary-300 normal-case tracking-wider">
|
||||||
{column.label}
|
{column.label}
|
||||||
</div>
|
</div>
|
||||||
|
) : column.id === "ws_status" ? (
|
||||||
|
<div className="flex items-center gap-2 font-normal text-xs text-secondary-500 dark:text-secondary-300 normal-case tracking-wider">
|
||||||
|
<Wifi className="h-3 w-3" />
|
||||||
|
{column.label}
|
||||||
|
</div>
|
||||||
) : column.id === "status" ? (
|
) : column.id === "status" ? (
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -1569,8 +1793,7 @@ const BulkAssignModal = ({
|
|||||||
onAssign,
|
onAssign,
|
||||||
isLoading,
|
isLoading,
|
||||||
}) => {
|
}) => {
|
||||||
const [selectedGroupId, setSelectedGroupId] = useState("");
|
const [selectedGroupIds, setSelectedGroupIds] = useState([]);
|
||||||
const bulkHostGroupId = useId();
|
|
||||||
|
|
||||||
// Fetch host groups for selection
|
// Fetch host groups for selection
|
||||||
const { data: hostGroups } = useQuery({
|
const { data: hostGroups } = useQuery({
|
||||||
@@ -1584,7 +1807,17 @@ const BulkAssignModal = ({
|
|||||||
|
|
||||||
const handleSubmit = (e) => {
|
const handleSubmit = (e) => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
onAssign(selectedGroupId || null);
|
onAssign(selectedGroupIds);
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleGroup = (groupId) => {
|
||||||
|
setSelectedGroupIds((prev) => {
|
||||||
|
if (prev.includes(groupId)) {
|
||||||
|
return prev.filter((id) => id !== groupId);
|
||||||
|
} else {
|
||||||
|
return [...prev, groupId];
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -1592,7 +1825,7 @@ const BulkAssignModal = ({
|
|||||||
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md">
|
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md">
|
||||||
<div className="flex justify-between items-center mb-4">
|
<div className="flex justify-between items-center mb-4">
|
||||||
<h3 className="text-lg font-semibold text-secondary-900 dark:text-white">
|
<h3 className="text-lg font-semibold text-secondary-900 dark:text-white">
|
||||||
Assign to Host Group
|
Assign to Host Groups
|
||||||
</h3>
|
</h3>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
@@ -1622,27 +1855,43 @@ const BulkAssignModal = ({
|
|||||||
|
|
||||||
<form onSubmit={handleSubmit} className="space-y-4">
|
<form onSubmit={handleSubmit} className="space-y-4">
|
||||||
<div>
|
<div>
|
||||||
<label
|
<span className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-3">
|
||||||
htmlFor={bulkHostGroupId}
|
Host Groups
|
||||||
className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-1"
|
</span>
|
||||||
>
|
<div className="space-y-2 max-h-48 overflow-y-auto">
|
||||||
Host Group
|
{/* Host Group Options */}
|
||||||
</label>
|
|
||||||
<select
|
|
||||||
id={bulkHostGroupId}
|
|
||||||
value={selectedGroupId}
|
|
||||||
onChange={(e) => setSelectedGroupId(e.target.value)}
|
|
||||||
className="w-full px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-700 text-secondary-900 dark:text-white focus:outline-none focus:ring-2 focus:ring-primary-500"
|
|
||||||
>
|
|
||||||
<option value="">No group (ungrouped)</option>
|
|
||||||
{hostGroups?.map((group) => (
|
{hostGroups?.map((group) => (
|
||||||
<option key={group.id} value={group.id}>
|
<label
|
||||||
{group.name}
|
key={group.id}
|
||||||
</option>
|
className={`flex items-center gap-3 p-3 border-2 rounded-lg transition-all duration-200 cursor-pointer ${
|
||||||
|
selectedGroupIds.includes(group.id)
|
||||||
|
? "border-primary-500 bg-primary-50 dark:bg-primary-900/30"
|
||||||
|
: "border-secondary-300 dark:border-secondary-600 bg-white dark:bg-secondary-700 hover:border-secondary-400 dark:hover:border-secondary-500"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={selectedGroupIds.includes(group.id)}
|
||||||
|
onChange={() => toggleGroup(group.id)}
|
||||||
|
className="w-4 h-4 text-primary-600 bg-gray-100 border-gray-300 rounded focus:ring-primary-500 dark:focus:ring-primary-600 dark:ring-offset-gray-800 focus:ring-2 dark:bg-gray-700 dark:border-gray-600"
|
||||||
|
/>
|
||||||
|
<div className="flex items-center gap-2 flex-1">
|
||||||
|
{group.color && (
|
||||||
|
<div
|
||||||
|
className="w-3 h-3 rounded-full border border-secondary-300 dark:border-secondary-500 flex-shrink-0"
|
||||||
|
style={{ backgroundColor: group.color }}
|
||||||
|
></div>
|
||||||
|
)}
|
||||||
|
<div className="text-sm font-medium text-secondary-700 dark:text-secondary-200">
|
||||||
|
{group.name}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
))}
|
))}
|
||||||
</select>
|
</div>
|
||||||
<p className="mt-1 text-sm text-secondary-500 dark:text-secondary-400">
|
<p className="mt-2 text-sm text-secondary-500 dark:text-secondary-400">
|
||||||
Select a group to assign these hosts to, or leave ungrouped.
|
Select one or more groups to assign these hosts to, or leave
|
||||||
|
ungrouped.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -1656,7 +1905,7 @@ const BulkAssignModal = ({
|
|||||||
Cancel
|
Cancel
|
||||||
</button>
|
</button>
|
||||||
<button type="submit" className="btn-primary" disabled={isLoading}>
|
<button type="submit" className="btn-primary" disabled={isLoading}>
|
||||||
{isLoading ? "Assigning..." : "Assign to Group"}
|
{isLoading ? "Assigning..." : "Assign to Groups"}
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
@@ -1785,9 +2034,10 @@ const ColumnSettingsModal = ({
|
|||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
|
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50 p-4">
|
||||||
<div className="bg-white dark:bg-secondary-800 rounded-lg shadow-xl max-w-md w-full mx-4">
|
<div className="bg-white dark:bg-secondary-800 rounded-lg shadow-xl max-w-lg w-full max-h-[85vh] flex flex-col">
|
||||||
<div className="px-6 py-4 border-b border-secondary-200 dark:border-secondary-600">
|
{/* Header */}
|
||||||
|
<div className="px-6 py-4 border-b border-secondary-200 dark:border-secondary-600 flex-shrink-0">
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
|
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
|
||||||
Column Settings
|
Column Settings
|
||||||
@@ -1800,14 +2050,14 @@ const ColumnSettingsModal = ({
|
|||||||
<X className="h-5 w-5" />
|
<X className="h-5 w-5" />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
<p className="text-sm text-secondary-600 dark:text-secondary-300 mt-2">
|
||||||
|
|
||||||
<div className="px-6 py-4">
|
|
||||||
<p className="text-sm text-secondary-600 dark:text-secondary-300 mb-4">
|
|
||||||
Drag to reorder columns or toggle visibility
|
Drag to reorder columns or toggle visibility
|
||||||
</p>
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="space-y-2">
|
{/* Scrollable content */}
|
||||||
|
<div className="px-6 py-4 flex-1 overflow-y-auto">
|
||||||
|
<div className="space-y-1">
|
||||||
{columnConfig.map((column, index) => (
|
{columnConfig.map((column, index) => (
|
||||||
<button
|
<button
|
||||||
key={column.id}
|
key={column.id}
|
||||||
@@ -1824,22 +2074,22 @@ const ColumnSettingsModal = ({
|
|||||||
// Focus handling for keyboard users
|
// Focus handling for keyboard users
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={`flex items-center justify-between p-3 border rounded-lg cursor-move w-full text-left ${
|
className={`flex items-center justify-between p-2.5 border rounded-lg cursor-move w-full text-left transition-colors ${
|
||||||
draggedIndex === index
|
draggedIndex === index
|
||||||
? "opacity-50"
|
? "opacity-50"
|
||||||
: "hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
: "hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||||
} border-secondary-200 dark:border-secondary-600`}
|
} border-secondary-200 dark:border-secondary-600`}
|
||||||
>
|
>
|
||||||
<div className="flex items-center gap-3">
|
<div className="flex items-center gap-2.5">
|
||||||
<GripVertical className="h-4 w-4 text-secondary-400 dark:text-secondary-500" />
|
<GripVertical className="h-4 w-4 text-secondary-400 dark:text-secondary-500 flex-shrink-0" />
|
||||||
<span className="text-sm font-medium text-secondary-900 dark:text-white">
|
<span className="text-sm font-medium text-secondary-900 dark:text-white truncate">
|
||||||
{column.label}
|
{column.label}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => onToggleVisibility(column.id)}
|
onClick={() => onToggleVisibility(column.id)}
|
||||||
className={`p-1 rounded ${
|
className={`p-1 rounded transition-colors flex-shrink-0 ${
|
||||||
column.visible
|
column.visible
|
||||||
? "text-primary-600 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-300"
|
? "text-primary-600 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
: "text-secondary-400 hover:text-secondary-600 dark:text-secondary-500 dark:hover:text-secondary-300"
|
: "text-secondary-400 hover:text-secondary-600 dark:text-secondary-500 dark:hover:text-secondary-300"
|
||||||
@@ -1854,8 +2104,11 @@ const ColumnSettingsModal = ({
|
|||||||
</button>
|
</button>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="flex justify-between mt-6">
|
{/* Footer */}
|
||||||
|
<div className="px-6 py-4 border-t border-secondary-200 dark:border-secondary-600 flex-shrink-0">
|
||||||
|
<div className="flex justify-between">
|
||||||
<button type="button" onClick={onReset} className="btn-outline">
|
<button type="button" onClick={onReset} className="btn-outline">
|
||||||
Reset to Default
|
Reset to Default
|
||||||
</button>
|
</button>
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import { useEffect, useId, useState } from "react";
|
|||||||
|
|
||||||
import { useNavigate } from "react-router-dom";
|
import { useNavigate } from "react-router-dom";
|
||||||
import { useAuth } from "../contexts/AuthContext";
|
import { useAuth } from "../contexts/AuthContext";
|
||||||
import { authAPI } from "../utils/api";
|
import { authAPI, isCorsError } from "../utils/api";
|
||||||
|
|
||||||
const Login = () => {
|
const Login = () => {
|
||||||
const usernameId = useId();
|
const usernameId = useId();
|
||||||
@@ -82,7 +82,21 @@ const Login = () => {
|
|||||||
setError(result.error || "Login failed");
|
setError(result.error || "Login failed");
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
setError(err.response?.data?.error || "Login failed");
|
// Check for CORS/network errors first
|
||||||
|
if (isCorsError(err)) {
|
||||||
|
setError(
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
);
|
||||||
|
} else if (
|
||||||
|
err.name === "TypeError" &&
|
||||||
|
err.message?.includes("Failed to fetch")
|
||||||
|
) {
|
||||||
|
setError(
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
setError(err.response?.data?.error || "Login failed");
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
@@ -112,12 +126,25 @@ const Login = () => {
|
|||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Signup error:", err);
|
console.error("Signup error:", err);
|
||||||
const errorMessage =
|
if (isCorsError(err)) {
|
||||||
err.response?.data?.error ||
|
setError(
|
||||||
(err.response?.data?.errors && err.response.data.errors.length > 0
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
? err.response.data.errors.map((e) => e.msg).join(", ")
|
);
|
||||||
: err.message || "Signup failed");
|
} else if (
|
||||||
setError(errorMessage);
|
err.name === "TypeError" &&
|
||||||
|
err.message?.includes("Failed to fetch")
|
||||||
|
) {
|
||||||
|
setError(
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const errorMessage =
|
||||||
|
err.response?.data?.error ||
|
||||||
|
(err.response?.data?.errors && err.response.data.errors.length > 0
|
||||||
|
? err.response.data.errors.map((e) => e.msg).join(", ")
|
||||||
|
: err.message || "Signup failed");
|
||||||
|
setError(errorMessage);
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
@@ -146,9 +173,22 @@ const Login = () => {
|
|||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("TFA verification error:", err);
|
console.error("TFA verification error:", err);
|
||||||
const errorMessage =
|
if (isCorsError(err)) {
|
||||||
err.response?.data?.error || err.message || "TFA verification failed";
|
setError(
|
||||||
setError(errorMessage);
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
);
|
||||||
|
} else if (
|
||||||
|
err.name === "TypeError" &&
|
||||||
|
err.message?.includes("Failed to fetch")
|
||||||
|
) {
|
||||||
|
setError(
|
||||||
|
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const errorMessage =
|
||||||
|
err.response?.data?.error || err.message || "TFA verification failed";
|
||||||
|
setError(errorMessage);
|
||||||
|
}
|
||||||
// Clear the token input for security
|
// Clear the token input for security
|
||||||
setTfaData({ token: "" });
|
setTfaData({ token: "" });
|
||||||
} finally {
|
} finally {
|
||||||
|
|||||||
@@ -153,6 +153,14 @@ const Packages = () => {
|
|||||||
}));
|
}));
|
||||||
}, [packagesResponse]);
|
}, [packagesResponse]);
|
||||||
|
|
||||||
|
// Fetch dashboard stats for card counts (consistent with homepage)
|
||||||
|
const { data: dashboardStats } = useQuery({
|
||||||
|
queryKey: ["dashboardStats"],
|
||||||
|
queryFn: () => dashboardAPI.getStats().then((res) => res.data),
|
||||||
|
staleTime: 5 * 60 * 1000, // Data stays fresh for 5 minutes
|
||||||
|
refetchOnWindowFocus: false, // Don't refetch when window regains focus
|
||||||
|
});
|
||||||
|
|
||||||
// Fetch hosts data to get total packages count
|
// Fetch hosts data to get total packages count
|
||||||
const { data: hosts } = useQuery({
|
const { data: hosts } = useQuery({
|
||||||
queryKey: ["hosts"],
|
queryKey: ["hosts"],
|
||||||
@@ -446,25 +454,21 @@ const Packages = () => {
|
|||||||
const uniquePackageHostsCount = uniquePackageHosts.size;
|
const uniquePackageHostsCount = uniquePackageHosts.size;
|
||||||
|
|
||||||
// Calculate total packages installed
|
// Calculate total packages installed
|
||||||
// When filtering by host, count each package once (since it can only be installed once per host)
|
// Show unique package count (same as table) for consistency
|
||||||
// When not filtering, sum up all installations across all hosts
|
const totalPackagesCount = packages?.length || 0;
|
||||||
const totalPackagesCount =
|
|
||||||
hostFilter && hostFilter !== "all"
|
|
||||||
? packages?.length || 0
|
|
||||||
: packages?.reduce(
|
|
||||||
(sum, pkg) => sum + (pkg.stats?.totalInstalls || 0),
|
|
||||||
0,
|
|
||||||
) || 0;
|
|
||||||
|
|
||||||
// Calculate outdated packages
|
// Calculate total installations across all hosts
|
||||||
const outdatedPackagesCount =
|
const totalInstallationsCount =
|
||||||
packages?.filter((pkg) => (pkg.stats?.updatesNeeded || 0) > 0).length || 0;
|
packages?.reduce((sum, pkg) => sum + (pkg.stats?.totalInstalls || 0), 0) ||
|
||||||
|
|
||||||
// Calculate security updates
|
|
||||||
const securityUpdatesCount =
|
|
||||||
packages?.filter((pkg) => (pkg.stats?.securityUpdates || 0) > 0).length ||
|
|
||||||
0;
|
0;
|
||||||
|
|
||||||
|
// Use dashboard stats for outdated packages count (consistent with homepage)
|
||||||
|
const outdatedPackagesCount =
|
||||||
|
dashboardStats?.cards?.totalOutdatedPackages || 0;
|
||||||
|
|
||||||
|
// Use dashboard stats for security updates count (consistent with homepage)
|
||||||
|
const securityUpdatesCount = dashboardStats?.cards?.securityUpdates || 0;
|
||||||
|
|
||||||
if (isLoading) {
|
if (isLoading) {
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center justify-center h-64">
|
<div className="flex items-center justify-center h-64">
|
||||||
@@ -529,13 +533,13 @@ const Packages = () => {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Summary Stats */}
|
{/* Summary Stats */}
|
||||||
<div className="grid grid-cols-1 sm:grid-cols-4 gap-4 mb-6 flex-shrink-0">
|
<div className="grid grid-cols-1 sm:grid-cols-5 gap-4 mb-6 flex-shrink-0">
|
||||||
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
|
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
|
||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
<Package className="h-5 w-5 text-primary-600 mr-2" />
|
<Package className="h-5 w-5 text-primary-600 mr-2" />
|
||||||
<div>
|
<div>
|
||||||
<p className="text-sm text-secondary-500 dark:text-white">
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
Total Installed
|
Total Packages
|
||||||
</p>
|
</p>
|
||||||
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
{totalPackagesCount}
|
{totalPackagesCount}
|
||||||
@@ -544,6 +548,20 @@ const Packages = () => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Package className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Total Installations
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{totalInstallationsCount}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
|
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
|
||||||
<div className="flex items-center">
|
<div className="flex items-center">
|
||||||
<Package className="h-5 w-5 text-warning-600 mr-2" />
|
<Package className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ import { useEffect, useId, useState } from "react";
|
|||||||
|
|
||||||
import { useAuth } from "../contexts/AuthContext";
|
import { useAuth } from "../contexts/AuthContext";
|
||||||
import { useTheme } from "../contexts/ThemeContext";
|
import { useTheme } from "../contexts/ThemeContext";
|
||||||
import { tfaAPI } from "../utils/api";
|
import { isCorsError, tfaAPI } from "../utils/api";
|
||||||
|
|
||||||
const Profile = () => {
|
const Profile = () => {
|
||||||
const usernameId = useId();
|
const usernameId = useId();
|
||||||
@@ -88,8 +88,15 @@ const Profile = () => {
|
|||||||
text: result.error || "Failed to update profile",
|
text: result.error || "Failed to update profile",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch {
|
} catch (error) {
|
||||||
setMessage({ type: "error", text: "Network error occurred" });
|
if (isCorsError(error)) {
|
||||||
|
setMessage({
|
||||||
|
type: "error",
|
||||||
|
text: "CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
setMessage({ type: "error", text: "Network error occurred" });
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
@@ -133,8 +140,15 @@ const Profile = () => {
|
|||||||
text: result.error || "Failed to change password",
|
text: result.error || "Failed to change password",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch {
|
} catch (error) {
|
||||||
setMessage({ type: "error", text: "Network error occurred" });
|
if (isCorsError(error)) {
|
||||||
|
setMessage({
|
||||||
|
type: "error",
|
||||||
|
text: "CORS_ORIGIN mismatch - please set your URL in your environment variable",
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
setMessage({ type: "error", text: "Network error occurred" });
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,699 +0,0 @@
|
|||||||
import {
|
|
||||||
Activity,
|
|
||||||
AlertCircle,
|
|
||||||
CheckCircle,
|
|
||||||
Clock,
|
|
||||||
Download,
|
|
||||||
Eye,
|
|
||||||
Filter,
|
|
||||||
Package,
|
|
||||||
Pause,
|
|
||||||
Play,
|
|
||||||
RefreshCw,
|
|
||||||
Search,
|
|
||||||
Server,
|
|
||||||
XCircle,
|
|
||||||
} from "lucide-react";
|
|
||||||
import { useState } from "react";
|
|
||||||
|
|
||||||
const Queue = () => {
|
|
||||||
const [activeTab, setActiveTab] = useState("server");
|
|
||||||
const [filterStatus, setFilterStatus] = useState("all");
|
|
||||||
const [searchQuery, setSearchQuery] = useState("");
|
|
||||||
|
|
||||||
// Mock data for demonstration
|
|
||||||
const serverQueueData = [
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
type: "Server Update Check",
|
|
||||||
description: "Check for server updates from GitHub",
|
|
||||||
status: "running",
|
|
||||||
priority: "high",
|
|
||||||
createdAt: "2024-01-15 10:30:00",
|
|
||||||
estimatedCompletion: "2024-01-15 10:35:00",
|
|
||||||
progress: 75,
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
type: "Session Cleanup",
|
|
||||||
description: "Clear expired login sessions",
|
|
||||||
status: "pending",
|
|
||||||
priority: "medium",
|
|
||||||
createdAt: "2024-01-15 10:25:00",
|
|
||||||
estimatedCompletion: "2024-01-15 10:40:00",
|
|
||||||
progress: 0,
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
type: "Database Optimization",
|
|
||||||
description: "Optimize database indexes and cleanup old records",
|
|
||||||
status: "completed",
|
|
||||||
priority: "low",
|
|
||||||
createdAt: "2024-01-15 09:00:00",
|
|
||||||
completedAt: "2024-01-15 09:45:00",
|
|
||||||
progress: 100,
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
type: "Backup Creation",
|
|
||||||
description: "Create system backup",
|
|
||||||
status: "failed",
|
|
||||||
priority: "high",
|
|
||||||
createdAt: "2024-01-15 08:00:00",
|
|
||||||
errorMessage: "Insufficient disk space",
|
|
||||||
progress: 45,
|
|
||||||
retryCount: 2,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const agentQueueData = [
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
hostname: "web-server-01",
|
|
||||||
ip: "192.168.1.100",
|
|
||||||
type: "Agent Update Collection",
|
|
||||||
description: "Agent v1.2.7 → v1.2.8",
|
|
||||||
status: "pending",
|
|
||||||
priority: "medium",
|
|
||||||
lastCommunication: "2024-01-15 10:00:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 11:00:00",
|
|
||||||
currentVersion: "1.2.7",
|
|
||||||
targetVersion: "1.2.8",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 5,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
hostname: "db-server-02",
|
|
||||||
ip: "192.168.1.101",
|
|
||||||
type: "Data Collection",
|
|
||||||
description: "Collect package and system information",
|
|
||||||
status: "running",
|
|
||||||
priority: "high",
|
|
||||||
lastCommunication: "2024-01-15 10:15:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 11:15:00",
|
|
||||||
currentVersion: "1.2.8",
|
|
||||||
targetVersion: "1.2.8",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
hostname: "app-server-03",
|
|
||||||
ip: "192.168.1.102",
|
|
||||||
type: "Agent Update Collection",
|
|
||||||
description: "Agent v1.2.6 → v1.2.8",
|
|
||||||
status: "completed",
|
|
||||||
priority: "low",
|
|
||||||
lastCommunication: "2024-01-15 09:30:00",
|
|
||||||
completedAt: "2024-01-15 09:45:00",
|
|
||||||
currentVersion: "1.2.8",
|
|
||||||
targetVersion: "1.2.8",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 5,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
hostname: "test-server-04",
|
|
||||||
ip: "192.168.1.103",
|
|
||||||
type: "Data Collection",
|
|
||||||
description: "Collect package and system information",
|
|
||||||
status: "failed",
|
|
||||||
priority: "medium",
|
|
||||||
lastCommunication: "2024-01-15 08:00:00",
|
|
||||||
errorMessage: "Connection timeout",
|
|
||||||
retryCount: 3,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const patchQueueData = [
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
hostname: "web-server-01",
|
|
||||||
ip: "192.168.1.100",
|
|
||||||
packages: ["nginx", "openssl", "curl"],
|
|
||||||
type: "Security Updates",
|
|
||||||
description: "Apply critical security patches",
|
|
||||||
status: "pending",
|
|
||||||
priority: "high",
|
|
||||||
scheduledFor: "2024-01-15 19:00:00",
|
|
||||||
lastCommunication: "2024-01-15 18:00:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 19:00:00",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 2,
|
|
||||||
hostname: "db-server-02",
|
|
||||||
ip: "192.168.1.101",
|
|
||||||
packages: ["postgresql", "python3"],
|
|
||||||
type: "Feature Updates",
|
|
||||||
description: "Update database and Python packages",
|
|
||||||
status: "running",
|
|
||||||
priority: "medium",
|
|
||||||
scheduledFor: "2024-01-15 20:00:00",
|
|
||||||
lastCommunication: "2024-01-15 19:15:00",
|
|
||||||
nextExpectedCommunication: "2024-01-15 20:15:00",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 3,
|
|
||||||
hostname: "app-server-03",
|
|
||||||
ip: "192.168.1.102",
|
|
||||||
packages: ["nodejs", "npm"],
|
|
||||||
type: "Maintenance Updates",
|
|
||||||
description: "Update Node.js and npm packages",
|
|
||||||
status: "completed",
|
|
||||||
priority: "low",
|
|
||||||
scheduledFor: "2024-01-15 18:30:00",
|
|
||||||
completedAt: "2024-01-15 18:45:00",
|
|
||||||
retryCount: 0,
|
|
||||||
maxRetries: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 4,
|
|
||||||
hostname: "test-server-04",
|
|
||||||
ip: "192.168.1.103",
|
|
||||||
packages: ["docker", "docker-compose"],
|
|
||||||
type: "Security Updates",
|
|
||||||
description: "Update Docker components",
|
|
||||||
status: "failed",
|
|
||||||
priority: "high",
|
|
||||||
scheduledFor: "2024-01-15 17:00:00",
|
|
||||||
errorMessage: "Package conflicts detected",
|
|
||||||
retryCount: 2,
|
|
||||||
maxRetries: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const getStatusIcon = (status) => {
|
|
||||||
switch (status) {
|
|
||||||
case "running":
|
|
||||||
return <RefreshCw className="h-4 w-4 text-blue-500 animate-spin" />;
|
|
||||||
case "completed":
|
|
||||||
return <CheckCircle className="h-4 w-4 text-green-500" />;
|
|
||||||
case "failed":
|
|
||||||
return <XCircle className="h-4 w-4 text-red-500" />;
|
|
||||||
case "pending":
|
|
||||||
return <Clock className="h-4 w-4 text-yellow-500" />;
|
|
||||||
case "paused":
|
|
||||||
return <Pause className="h-4 w-4 text-gray-500" />;
|
|
||||||
default:
|
|
||||||
return <AlertCircle className="h-4 w-4 text-gray-500" />;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getStatusColor = (status) => {
|
|
||||||
switch (status) {
|
|
||||||
case "running":
|
|
||||||
return "bg-blue-100 text-blue-800";
|
|
||||||
case "completed":
|
|
||||||
return "bg-green-100 text-green-800";
|
|
||||||
case "failed":
|
|
||||||
return "bg-red-100 text-red-800";
|
|
||||||
case "pending":
|
|
||||||
return "bg-yellow-100 text-yellow-800";
|
|
||||||
case "paused":
|
|
||||||
return "bg-gray-100 text-gray-800";
|
|
||||||
default:
|
|
||||||
return "bg-gray-100 text-gray-800";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getPriorityColor = (priority) => {
|
|
||||||
switch (priority) {
|
|
||||||
case "high":
|
|
||||||
return "bg-red-100 text-red-800";
|
|
||||||
case "medium":
|
|
||||||
return "bg-yellow-100 text-yellow-800";
|
|
||||||
case "low":
|
|
||||||
return "bg-green-100 text-green-800";
|
|
||||||
default:
|
|
||||||
return "bg-gray-100 text-gray-800";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const filteredData = (data) => {
|
|
||||||
let filtered = data;
|
|
||||||
|
|
||||||
if (filterStatus !== "all") {
|
|
||||||
filtered = filtered.filter((item) => item.status === filterStatus);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (searchQuery) {
|
|
||||||
filtered = filtered.filter(
|
|
||||||
(item) =>
|
|
||||||
item.hostname?.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
|
||||||
item.type?.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
|
||||||
item.description?.toLowerCase().includes(searchQuery.toLowerCase()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return filtered;
|
|
||||||
};
|
|
||||||
|
|
||||||
const tabs = [
|
|
||||||
{
|
|
||||||
id: "server",
|
|
||||||
name: "Server Queue",
|
|
||||||
icon: Server,
|
|
||||||
data: serverQueueData,
|
|
||||||
count: serverQueueData.length,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: "agent",
|
|
||||||
name: "Agent Queue",
|
|
||||||
icon: Download,
|
|
||||||
data: agentQueueData,
|
|
||||||
count: agentQueueData.length,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: "patch",
|
|
||||||
name: "Patch Management",
|
|
||||||
icon: Package,
|
|
||||||
data: patchQueueData,
|
|
||||||
count: patchQueueData.length,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const renderServerQueueItem = (item) => (
|
|
||||||
<div
|
|
||||||
key={item.id}
|
|
||||||
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
|
|
||||||
>
|
|
||||||
<div className="flex items-start justify-between">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="flex items-center gap-3 mb-2">
|
|
||||||
{getStatusIcon(item.status)}
|
|
||||||
<h3 className="font-medium text-gray-900 dark:text-white">
|
|
||||||
{item.type}
|
|
||||||
</h3>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
|
|
||||||
>
|
|
||||||
{item.status}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
|
|
||||||
>
|
|
||||||
{item.priority}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-3">
|
|
||||||
{item.description}
|
|
||||||
</p>
|
|
||||||
|
|
||||||
{item.status === "running" && (
|
|
||||||
<div className="mb-3">
|
|
||||||
<div className="flex justify-between text-xs text-gray-500 mb-1">
|
|
||||||
<span>Progress</span>
|
|
||||||
<span>{item.progress}%</span>
|
|
||||||
</div>
|
|
||||||
<div className="w-full bg-gray-200 rounded-full h-2">
|
|
||||||
<div
|
|
||||||
className="bg-blue-600 h-2 rounded-full transition-all duration-300"
|
|
||||||
style={{ width: `${item.progress}%` }}
|
|
||||||
></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Created:</span> {item.createdAt}
|
|
||||||
</div>
|
|
||||||
{item.status === "running" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">ETA:</span>{" "}
|
|
||||||
{item.estimatedCompletion}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "completed" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Completed:</span>{" "}
|
|
||||||
{item.completedAt}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<div className="col-span-2">
|
|
||||||
<span className="font-medium">Error:</span> {item.errorMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{item.retryCount > 0 && (
|
|
||||||
<div className="mt-2 text-xs text-orange-600">
|
|
||||||
Retries: {item.retryCount}/{item.maxRetries}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex gap-2 ml-4">
|
|
||||||
{item.status === "running" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Pause className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
{item.status === "paused" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Play className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
const renderAgentQueueItem = (item) => (
|
|
||||||
<div
|
|
||||||
key={item.id}
|
|
||||||
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
|
|
||||||
>
|
|
||||||
<div className="flex items-start justify-between">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="flex items-center gap-3 mb-2">
|
|
||||||
{getStatusIcon(item.status)}
|
|
||||||
<h3 className="font-medium text-gray-900 dark:text-white">
|
|
||||||
{item.hostname}
|
|
||||||
</h3>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
|
|
||||||
>
|
|
||||||
{item.status}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
|
|
||||||
>
|
|
||||||
{item.priority}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-2">
|
|
||||||
{item.type}
|
|
||||||
</p>
|
|
||||||
<p className="text-sm text-gray-500 mb-3">{item.description}</p>
|
|
||||||
|
|
||||||
{item.type === "Agent Update Collection" && (
|
|
||||||
<div className="mb-3 p-2 bg-gray-50 dark:bg-gray-700 rounded">
|
|
||||||
<div className="text-xs text-gray-600 dark:text-gray-400">
|
|
||||||
<span className="font-medium">Version:</span>{" "}
|
|
||||||
{item.currentVersion} → {item.targetVersion}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">IP:</span> {item.ip}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Last Comm:</span>{" "}
|
|
||||||
{item.lastCommunication}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Next Expected:</span>{" "}
|
|
||||||
{item.nextExpectedCommunication}
|
|
||||||
</div>
|
|
||||||
{item.status === "completed" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Completed:</span>{" "}
|
|
||||||
{item.completedAt}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<div className="col-span-2">
|
|
||||||
<span className="font-medium">Error:</span> {item.errorMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{item.retryCount > 0 && (
|
|
||||||
<div className="mt-2 text-xs text-orange-600">
|
|
||||||
Retries: {item.retryCount}/{item.maxRetries}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex gap-2 ml-4">
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
const renderPatchQueueItem = (item) => (
|
|
||||||
<div
|
|
||||||
key={item.id}
|
|
||||||
className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 p-4 hover:shadow-md transition-shadow"
|
|
||||||
>
|
|
||||||
<div className="flex items-start justify-between">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="flex items-center gap-3 mb-2">
|
|
||||||
{getStatusIcon(item.status)}
|
|
||||||
<h3 className="font-medium text-gray-900 dark:text-white">
|
|
||||||
{item.hostname}
|
|
||||||
</h3>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getStatusColor(item.status)}`}
|
|
||||||
>
|
|
||||||
{item.status}
|
|
||||||
</span>
|
|
||||||
<span
|
|
||||||
className={`px-2 py-1 text-xs font-medium rounded-full ${getPriorityColor(item.priority)}`}
|
|
||||||
>
|
|
||||||
{item.priority}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-2">
|
|
||||||
{item.type}
|
|
||||||
</p>
|
|
||||||
<p className="text-sm text-gray-500 mb-3">{item.description}</p>
|
|
||||||
|
|
||||||
<div className="mb-3">
|
|
||||||
<div className="text-xs text-gray-600 dark:text-gray-400 mb-1">
|
|
||||||
<span className="font-medium">Packages:</span>
|
|
||||||
</div>
|
|
||||||
<div className="flex flex-wrap gap-1">
|
|
||||||
{item.packages.map((pkg) => (
|
|
||||||
<span
|
|
||||||
key={pkg}
|
|
||||||
className="px-2 py-1 bg-blue-100 text-blue-800 text-xs rounded"
|
|
||||||
>
|
|
||||||
{pkg}
|
|
||||||
</span>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="grid grid-cols-2 gap-4 text-xs text-gray-500">
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">IP:</span> {item.ip}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Scheduled:</span>{" "}
|
|
||||||
{item.scheduledFor}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Last Comm:</span>{" "}
|
|
||||||
{item.lastCommunication}
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Next Expected:</span>{" "}
|
|
||||||
{item.nextExpectedCommunication}
|
|
||||||
</div>
|
|
||||||
{item.status === "completed" && (
|
|
||||||
<div>
|
|
||||||
<span className="font-medium">Completed:</span>{" "}
|
|
||||||
{item.completedAt}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<div className="col-span-2">
|
|
||||||
<span className="font-medium">Error:</span> {item.errorMessage}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{item.retryCount > 0 && (
|
|
||||||
<div className="mt-2 text-xs text-orange-600">
|
|
||||||
Retries: {item.retryCount}/{item.maxRetries}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex gap-2 ml-4">
|
|
||||||
{item.status === "failed" && (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<RefreshCw className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="p-2 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300"
|
|
||||||
>
|
|
||||||
<Eye className="h-4 w-4" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
|
|
||||||
const currentTab = tabs.find((tab) => tab.id === activeTab);
|
|
||||||
const filteredItems = filteredData(currentTab?.data || []);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="min-h-screen bg-gray-50 dark:bg-gray-900">
|
|
||||||
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
|
||||||
{/* Header */}
|
|
||||||
<div className="mb-8">
|
|
||||||
<h1 className="text-3xl font-bold text-gray-900 dark:text-white mb-2">
|
|
||||||
Queue Management
|
|
||||||
</h1>
|
|
||||||
<p className="text-gray-600 dark:text-gray-400">
|
|
||||||
Monitor and manage server operations, agent communications, and
|
|
||||||
patch deployments
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Tabs */}
|
|
||||||
<div className="mb-6">
|
|
||||||
<div className="border-b border-gray-200 dark:border-gray-700">
|
|
||||||
<nav className="-mb-px flex space-x-8">
|
|
||||||
{tabs.map((tab) => (
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
key={tab.id}
|
|
||||||
onClick={() => setActiveTab(tab.id)}
|
|
||||||
className={`py-2 px-1 border-b-2 font-medium text-sm flex items-center gap-2 ${
|
|
||||||
activeTab === tab.id
|
|
||||||
? "border-blue-500 text-blue-600 dark:text-blue-400"
|
|
||||||
: "border-transparent text-gray-500 hover:text-gray-700 hover:border-gray-300 dark:text-gray-400 dark:hover:text-gray-300"
|
|
||||||
}`}
|
|
||||||
>
|
|
||||||
<tab.icon className="h-4 w-4" />
|
|
||||||
{tab.name}
|
|
||||||
<span className="bg-gray-100 dark:bg-gray-700 text-gray-600 dark:text-gray-300 px-2 py-0.5 rounded-full text-xs">
|
|
||||||
{tab.count}
|
|
||||||
</span>
|
|
||||||
</button>
|
|
||||||
))}
|
|
||||||
</nav>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Filters and Search */}
|
|
||||||
<div className="mb-6 flex flex-col sm:flex-row gap-4">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="relative">
|
|
||||||
<Search className="absolute left-3 top-1/2 transform -translate-y-1/2 text-gray-400 h-4 w-4" />
|
|
||||||
<input
|
|
||||||
type="text"
|
|
||||||
placeholder="Search queues..."
|
|
||||||
value={searchQuery}
|
|
||||||
onChange={(e) => setSearchQuery(e.target.value)}
|
|
||||||
className="w-full pl-10 pr-4 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<select
|
|
||||||
value={filterStatus}
|
|
||||||
onChange={(e) => setFilterStatus(e.target.value)}
|
|
||||||
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
|
||||||
>
|
|
||||||
<option value="all">All Status</option>
|
|
||||||
<option value="pending">Pending</option>
|
|
||||||
<option value="running">Running</option>
|
|
||||||
<option value="completed">Completed</option>
|
|
||||||
<option value="failed">Failed</option>
|
|
||||||
<option value="paused">Paused</option>
|
|
||||||
</select>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="px-3 py-2 border border-gray-300 dark:border-gray-600 rounded-md bg-white dark:bg-gray-800 text-gray-900 dark:text-white hover:bg-gray-50 dark:hover:bg-gray-700 flex items-center gap-2"
|
|
||||||
>
|
|
||||||
<Filter className="h-4 w-4" />
|
|
||||||
More Filters
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Queue Items */}
|
|
||||||
<div className="space-y-4">
|
|
||||||
{filteredItems.length === 0 ? (
|
|
||||||
<div className="text-center py-12">
|
|
||||||
<Activity className="mx-auto h-12 w-12 text-gray-400" />
|
|
||||||
<h3 className="mt-2 text-sm font-medium text-gray-900 dark:text-white">
|
|
||||||
No queue items found
|
|
||||||
</h3>
|
|
||||||
<p className="mt-1 text-sm text-gray-500 dark:text-gray-400">
|
|
||||||
{searchQuery
|
|
||||||
? "Try adjusting your search criteria"
|
|
||||||
: "No items match the current filters"}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
filteredItems.map((item) => {
|
|
||||||
switch (activeTab) {
|
|
||||||
case "server":
|
|
||||||
return renderServerQueueItem(item);
|
|
||||||
case "agent":
|
|
||||||
return renderAgentQueueItem(item);
|
|
||||||
case "patch":
|
|
||||||
return renderPatchQueueItem(item);
|
|
||||||
default:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default Queue;
|
|
||||||
@@ -120,7 +120,7 @@ const Settings = () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Helper function to get curl flags based on settings
|
// Helper function to get curl flags based on settings
|
||||||
const getCurlFlags = () => {
|
const _getCurlFlags = () => {
|
||||||
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -144,7 +144,7 @@ const Settings = () => {
|
|||||||
defaultUserRole: settings.default_user_role || "user",
|
defaultUserRole: settings.default_user_role || "user",
|
||||||
githubRepoUrl:
|
githubRepoUrl:
|
||||||
settings.github_repo_url ||
|
settings.github_repo_url ||
|
||||||
"git@github.com:9technologygroup/patchmon.net.git",
|
"https://github.com/PatchMon/PatchMon.git",
|
||||||
repositoryType: settings.repository_type || "public",
|
repositoryType: settings.repository_type || "public",
|
||||||
sshKeyPath: settings.ssh_key_path || "",
|
sshKeyPath: settings.ssh_key_path || "",
|
||||||
useCustomSshKey: !!settings.ssh_key_path,
|
useCustomSshKey: !!settings.ssh_key_path,
|
||||||
@@ -1155,28 +1155,39 @@ const Settings = () => {
|
|||||||
Agent Uninstall Command
|
Agent Uninstall Command
|
||||||
</h3>
|
</h3>
|
||||||
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
|
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
<p className="mb-2">
|
<p className="mb-3">
|
||||||
To completely remove PatchMon from a host:
|
To completely remove PatchMon from a host:
|
||||||
</p>
|
</p>
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
|
{/* Go Agent Uninstall */}
|
||||||
curl {getCurlFlags()} {window.location.origin}
|
<div className="mb-3">
|
||||||
/api/v1/hosts/remove | sudo bash
|
<div className="space-y-2">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
|
||||||
|
sudo patchmon-agent uninstall
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
navigator.clipboard.writeText(
|
||||||
|
"sudo patchmon-agent uninstall",
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
|
||||||
|
>
|
||||||
|
Copy
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div className="text-xs text-red-600 dark:text-red-400">
|
||||||
|
Options: <code>--remove-config</code>,{" "}
|
||||||
|
<code>--remove-logs</code>,{" "}
|
||||||
|
<code>--remove-all</code>, <code>--force</code>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
onClick={() => {
|
|
||||||
const command = `curl ${getCurlFlags()} ${window.location.origin}/api/v1/hosts/remove | sudo bash`;
|
|
||||||
navigator.clipboard.writeText(command);
|
|
||||||
// You could add a toast notification here
|
|
||||||
}}
|
|
||||||
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
|
|
||||||
>
|
|
||||||
Copy
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<p className="mt-2 text-xs">
|
<p className="mt-2 text-xs">
|
||||||
⚠️ This will remove all PatchMon files,
|
⚠️ This command will remove all PatchMon files,
|
||||||
configuration, and crontab entries
|
configuration, and crontab entries
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
389
frontend/src/pages/docker/ContainerDetail.jsx
Normal file
389
frontend/src/pages/docker/ContainerDetail.jsx
Normal file
@@ -0,0 +1,389 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
CheckCircle,
|
||||||
|
Container,
|
||||||
|
ExternalLink,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api, { formatRelativeTime } from "../../utils/api";
|
||||||
|
|
||||||
|
const ContainerDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "container", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/containers/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const container = data?.container;
|
||||||
|
const similarContainers = data?.similarContainers || [];
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !container) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Container not found
|
||||||
|
</h3>
|
||||||
|
<p className="mt-2 text-sm text-red-700 dark:text-red-300">
|
||||||
|
The container you're looking for doesn't exist or has been
|
||||||
|
removed.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const getStatusBadge = (status) => {
|
||||||
|
const statusClasses = {
|
||||||
|
running:
|
||||||
|
"bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200",
|
||||||
|
exited: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200",
|
||||||
|
paused:
|
||||||
|
"bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200",
|
||||||
|
restarting:
|
||||||
|
"bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200",
|
||||||
|
};
|
||||||
|
return (
|
||||||
|
<span
|
||||||
|
className={`inline-flex items-center px-3 py-1 rounded-full text-sm font-medium ${
|
||||||
|
statusClasses[status] ||
|
||||||
|
"bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{status}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Container className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{container.name}
|
||||||
|
</h1>
|
||||||
|
{getStatusBadge(container.status)}
|
||||||
|
</div>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
Container ID: {container.container_id.substring(0, 12)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
{/* Update Status Card */}
|
||||||
|
{container.docker_images?.docker_image_updates &&
|
||||||
|
container.docker_images.docker_image_updates.length > 0 ? (
|
||||||
|
<div className="card p-4 bg-yellow-50 dark:bg-yellow-900/20 border-yellow-200 dark:border-yellow-800">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-yellow-600 dark:text-yellow-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-yellow-200">
|
||||||
|
Update Available
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-yellow-100 truncate">
|
||||||
|
{
|
||||||
|
container.docker_images.docker_image_updates[0]
|
||||||
|
.available_tag
|
||||||
|
}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="card p-4 bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<CheckCircle className="h-5 w-5 text-green-600 dark:text-green-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-green-200">
|
||||||
|
Update Status
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-green-100">
|
||||||
|
Up to date
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Server className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">Host</p>
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${container.host?.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 truncate block"
|
||||||
|
>
|
||||||
|
{container.host?.friendly_name || container.host?.hostname}
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
State
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{container.state || container.status}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<RefreshCw className="h-5 w-5 text-secondary-400 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Last Checked
|
||||||
|
</p>
|
||||||
|
<p className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.last_checked)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Container and Image Information - Side by Side */}
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{/* Container Details */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Container Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Container ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono break-all">
|
||||||
|
{container.container_id}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Image Tag
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{container.image_tag}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.created_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{container.started_at && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Started
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.started_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{container.ports && Object.keys(container.ports).length > 0 && (
|
||||||
|
<div className="sm:col-span-2">
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Port Mappings
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
{Object.entries(container.ports).map(([key, value]) => (
|
||||||
|
<span
|
||||||
|
key={key}
|
||||||
|
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200"
|
||||||
|
>
|
||||||
|
{key} → {value}
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Image Information */}
|
||||||
|
{container.docker_images && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Image Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Repository
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
<Link
|
||||||
|
to={`/docker/images/${container.docker_images.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
{container.docker_images.repository}
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Tag
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{container.docker_images.tag}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Source
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{container.docker_images.source}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{container.docker_images.size_bytes && (
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Size
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{(
|
||||||
|
Number(container.docker_images.size_bytes) /
|
||||||
|
1024 /
|
||||||
|
1024
|
||||||
|
).toFixed(2)}{" "}
|
||||||
|
MB
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Image ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-xs text-secondary-900 dark:text-white font-mono break-all">
|
||||||
|
{container.docker_images.image_id?.substring(0, 12)}...
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{formatRelativeTime(container.docker_images.created_at)}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Similar Containers */}
|
||||||
|
{similarContainers.length > 0 && (
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Similar Containers (Same Image)
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5">
|
||||||
|
<ul className="divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{similarContainers.map((similar) => (
|
||||||
|
<li
|
||||||
|
key={similar.id}
|
||||||
|
className="py-4 flex items-center justify-between"
|
||||||
|
>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Container className="h-5 w-5 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${similar.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{similar.name}
|
||||||
|
</Link>
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-secondary-400">
|
||||||
|
{similar.status}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ContainerDetail;
|
||||||
354
frontend/src/pages/docker/HostDetail.jsx
Normal file
354
frontend/src/pages/docker/HostDetail.jsx
Normal file
@@ -0,0 +1,354 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
Container,
|
||||||
|
ExternalLink,
|
||||||
|
Package,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api from "../../utils/api";
|
||||||
|
|
||||||
|
const HostDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "host", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/hosts/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const host = data?.host;
|
||||||
|
const containers = data?.containers || [];
|
||||||
|
const images = data?.images || [];
|
||||||
|
const stats = data?.stats;
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !host) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Host not found
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Server className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{host.friendly_name || host.hostname}
|
||||||
|
</h1>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
{host.ip}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${id}`}
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
View Full Host Details
|
||||||
|
<ExternalLink className="ml-2 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Total Containers
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.totalContainers || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Running
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.runningContainers || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-red-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Stopped
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.stoppedContainers || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Package className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Images
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{stats?.totalImages || 0}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Host Information */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Host Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5 space-y-6">
|
||||||
|
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Friendly Name
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.friendly_name}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Hostname
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.hostname}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
IP Address
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.ip}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
OS
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{host.os_type} {host.os_version}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Containers */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Containers ({containers.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Container Name
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Image
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Status
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{containers.map((container) => (
|
||||||
|
<tr key={container.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{container.name}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{container.image_name}:{container.image_tag}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
{container.status}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Images */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Images ({images.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Repository
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Tag
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Source
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{images.map((image) => (
|
||||||
|
<tr key={image.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/docker/images/${image.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{image.repository}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
{image.tag}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{image.source}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/docker/images/${image.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default HostDetail;
|
||||||
439
frontend/src/pages/docker/ImageDetail.jsx
Normal file
439
frontend/src/pages/docker/ImageDetail.jsx
Normal file
@@ -0,0 +1,439 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
AlertTriangle,
|
||||||
|
ArrowLeft,
|
||||||
|
Container,
|
||||||
|
ExternalLink,
|
||||||
|
Package,
|
||||||
|
RefreshCw,
|
||||||
|
Server,
|
||||||
|
Shield,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { Link, useParams } from "react-router-dom";
|
||||||
|
import api, { formatRelativeTime } from "../../utils/api";
|
||||||
|
|
||||||
|
const ImageDetail = () => {
|
||||||
|
const { id } = useParams();
|
||||||
|
|
||||||
|
const { data, isLoading, error } = useQuery({
|
||||||
|
queryKey: ["docker", "image", id],
|
||||||
|
queryFn: async () => {
|
||||||
|
const response = await api.get(`/docker/images/${id}`);
|
||||||
|
return response.data;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const image = data?.image;
|
||||||
|
const hosts = data?.hosts || [];
|
||||||
|
const containers = image?.docker_containers || [];
|
||||||
|
const updates = image?.docker_image_updates || [];
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center min-h-screen">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error || !image) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||||
|
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-red-400" />
|
||||||
|
<div className="ml-3">
|
||||||
|
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
|
||||||
|
Image not found
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<Link
|
||||||
|
to="/docker"
|
||||||
|
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
|
||||||
|
>
|
||||||
|
<ArrowLeft className="h-4 w-4 mr-2" />
|
||||||
|
Back to Docker
|
||||||
|
</Link>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Package className="h-8 w-8 text-secondary-400 mr-3" />
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
|
||||||
|
{image.repository}:{image.tag}
|
||||||
|
</h1>
|
||||||
|
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
|
||||||
|
Image ID: {image.image_id.substring(0, 12)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview Cards */}
|
||||||
|
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Container className="h-5 w-5 text-green-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Containers
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{containers.length}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Server className="h-5 w-5 text-purple-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Hosts
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{hosts.length}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<Package className="h-5 w-5 text-blue-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">Size</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{image.size_bytes ? (
|
||||||
|
<>{(Number(image.size_bytes) / 1024 / 1024).toFixed(0)} MB</>
|
||||||
|
) : (
|
||||||
|
"N/A"
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="card p-4">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-warning-600 mr-2" />
|
||||||
|
</div>
|
||||||
|
<div className="w-0 flex-1">
|
||||||
|
<p className="text-sm text-secondary-500 dark:text-white">
|
||||||
|
Updates
|
||||||
|
</p>
|
||||||
|
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
|
||||||
|
{updates.length}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Available Updates with Digest Comparison */}
|
||||||
|
{updates.length > 0 && (
|
||||||
|
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4">
|
||||||
|
<div className="flex">
|
||||||
|
<AlertTriangle className="h-5 w-5 text-yellow-400" />
|
||||||
|
<div className="ml-3 flex-1">
|
||||||
|
<h3 className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
|
||||||
|
Updates Available
|
||||||
|
</h3>
|
||||||
|
<div className="mt-2 space-y-3">
|
||||||
|
{updates.map((update) => {
|
||||||
|
let digestInfo = null;
|
||||||
|
try {
|
||||||
|
if (update.changelog_url) {
|
||||||
|
digestInfo = JSON.parse(update.changelog_url);
|
||||||
|
}
|
||||||
|
} catch (_e) {
|
||||||
|
// Ignore parse errors
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={update.id}
|
||||||
|
className="bg-white dark:bg-secondary-800 rounded-lg p-3 border border-yellow-200 dark:border-yellow-700"
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between mb-2">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{update.is_security_update && (
|
||||||
|
<Shield className="h-4 w-4 text-red-500" />
|
||||||
|
)}
|
||||||
|
<span className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
|
||||||
|
New version available:{" "}
|
||||||
|
<span className="font-semibold">
|
||||||
|
{update.available_tag}
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{update.is_security_update && (
|
||||||
|
<span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-medium bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200">
|
||||||
|
Security
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{digestInfo &&
|
||||||
|
digestInfo.method === "digest_comparison" && (
|
||||||
|
<div className="mt-2 pt-2 border-t border-yellow-200 dark:border-yellow-700">
|
||||||
|
<p className="text-xs text-secondary-600 dark:text-secondary-400 mb-1">
|
||||||
|
Detected via digest comparison:
|
||||||
|
</p>
|
||||||
|
<div className="font-mono text-xs space-y-1">
|
||||||
|
<div className="text-red-600 dark:text-red-400">
|
||||||
|
<span className="font-bold">- Current: </span>
|
||||||
|
{digestInfo.current_digest}
|
||||||
|
</div>
|
||||||
|
<div className="text-green-600 dark:text-green-400">
|
||||||
|
<span className="font-bold">+ Available: </span>
|
||||||
|
{digestInfo.available_digest}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Image Information */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Image Information
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="px-6 py-5 space-y-6">
|
||||||
|
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Repository
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.repository}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Tag
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.tag}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Source
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.source}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Created
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.created_at
|
||||||
|
? formatRelativeTime(image.created_at)
|
||||||
|
: "Unknown"}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Image ID
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white">
|
||||||
|
{image.image_id}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Last Checked
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
|
||||||
|
{image.last_checked
|
||||||
|
? formatRelativeTime(image.last_checked)
|
||||||
|
: "Never"}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
{image.digest && (
|
||||||
|
<div className="sm:col-span-2">
|
||||||
|
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
|
||||||
|
Digest
|
||||||
|
</dt>
|
||||||
|
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white break-all">
|
||||||
|
{image.digest}
|
||||||
|
</dd>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Containers using this image */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Containers ({containers.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Container Name
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Status
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Host
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{containers.map((container) => (
|
||||||
|
<tr key={container.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{container.name}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
|
||||||
|
{container.status}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{container.host_id}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/docker/containers/${container.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Hosts using this image */}
|
||||||
|
<div className="card">
|
||||||
|
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
|
||||||
|
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
|
||||||
|
Hosts ({hosts.length})
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
<thead className="bg-secondary-50 dark:bg-secondary-900">
|
||||||
|
<tr>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Host Name
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
IP Address
|
||||||
|
</th>
|
||||||
|
<th
|
||||||
|
scope="col"
|
||||||
|
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
|
||||||
|
>
|
||||||
|
Actions
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
|
||||||
|
{hosts.map((host) => (
|
||||||
|
<tr key={host.id}>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap">
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${host.id}`}
|
||||||
|
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
|
||||||
|
>
|
||||||
|
{host.friendly_name || host.hostname}
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
|
||||||
|
{host.ip}
|
||||||
|
</td>
|
||||||
|
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
|
||||||
|
<Link
|
||||||
|
to={`/hosts/${host.id}`}
|
||||||
|
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
|
||||||
|
>
|
||||||
|
View
|
||||||
|
<ExternalLink className="ml-1 h-4 w-4" />
|
||||||
|
</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ImageDetail;
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -56,11 +56,23 @@ export const dashboardAPI = {
|
|||||||
const url = `/dashboard/hosts/${hostId}${queryString ? `?${queryString}` : ""}`;
|
const url = `/dashboard/hosts/${hostId}${queryString ? `?${queryString}` : ""}`;
|
||||||
return api.get(url);
|
return api.get(url);
|
||||||
},
|
},
|
||||||
|
getHostQueue: (hostId, params = {}) => {
|
||||||
|
const queryString = new URLSearchParams(params).toString();
|
||||||
|
const url = `/dashboard/hosts/${hostId}/queue${queryString ? `?${queryString}` : ""}`;
|
||||||
|
return api.get(url);
|
||||||
|
},
|
||||||
|
getHostWsStatus: (hostId) => api.get(`/dashboard/hosts/${hostId}/ws-status`),
|
||||||
|
getWsStatusByApiId: (apiId) => api.get(`/ws/status/${apiId}`),
|
||||||
getPackageTrends: (params = {}) => {
|
getPackageTrends: (params = {}) => {
|
||||||
const queryString = new URLSearchParams(params).toString();
|
const queryString = new URLSearchParams(params).toString();
|
||||||
const url = `/dashboard/package-trends${queryString ? `?${queryString}` : ""}`;
|
const url = `/dashboard/package-trends${queryString ? `?${queryString}` : ""}`;
|
||||||
return api.get(url);
|
return api.get(url);
|
||||||
},
|
},
|
||||||
|
getPackageSpikeAnalysis: (params = {}) => {
|
||||||
|
const queryString = new URLSearchParams(params).toString();
|
||||||
|
const url = `/dashboard/package-spike-analysis${queryString ? `?${queryString}` : ""}`;
|
||||||
|
return api.get(url);
|
||||||
|
},
|
||||||
getRecentUsers: () => api.get("/dashboard/recent-users"),
|
getRecentUsers: () => api.get("/dashboard/recent-users"),
|
||||||
getRecentCollection: () => api.get("/dashboard/recent-collection"),
|
getRecentCollection: () => api.get("/dashboard/recent-collection"),
|
||||||
};
|
};
|
||||||
@@ -75,8 +87,12 @@ export const adminHostsAPI = {
|
|||||||
api.post(`/hosts/${hostId}/regenerate-credentials`),
|
api.post(`/hosts/${hostId}/regenerate-credentials`),
|
||||||
updateGroup: (hostId, hostGroupId) =>
|
updateGroup: (hostId, hostGroupId) =>
|
||||||
api.put(`/hosts/${hostId}/group`, { hostGroupId }),
|
api.put(`/hosts/${hostId}/group`, { hostGroupId }),
|
||||||
|
updateGroups: (hostId, groupIds) =>
|
||||||
|
api.put(`/hosts/${hostId}/groups`, { groupIds }),
|
||||||
bulkUpdateGroup: (hostIds, hostGroupId) =>
|
bulkUpdateGroup: (hostIds, hostGroupId) =>
|
||||||
api.put("/hosts/bulk/group", { hostIds, hostGroupId }),
|
api.put("/hosts/bulk/group", { hostIds, hostGroupId }),
|
||||||
|
bulkUpdateGroups: (hostIds, groupIds) =>
|
||||||
|
api.put("/hosts/bulk/groups", { hostIds, groupIds }),
|
||||||
toggleAutoUpdate: (hostId, autoUpdate) =>
|
toggleAutoUpdate: (hostId, autoUpdate) =>
|
||||||
api.patch(`/hosts/${hostId}/auto-update`, { auto_update: autoUpdate }),
|
api.patch(`/hosts/${hostId}/auto-update`, { auto_update: autoUpdate }),
|
||||||
updateFriendlyName: (hostId, friendlyName) =>
|
updateFriendlyName: (hostId, friendlyName) =>
|
||||||
@@ -205,7 +221,82 @@ export const packagesAPI = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Utility functions
|
// Utility functions
|
||||||
|
export const isCorsError = (error) => {
|
||||||
|
// Check for browser-level CORS errors (when request is blocked before reaching server)
|
||||||
|
if (error.message?.includes("Failed to fetch") && !error.response) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for TypeError with Failed to fetch (common CORS error pattern)
|
||||||
|
if (
|
||||||
|
error.name === "TypeError" &&
|
||||||
|
error.message?.includes("Failed to fetch")
|
||||||
|
) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for backend CORS errors that get converted to 500 by proxy
|
||||||
|
if (error.response?.status === 500) {
|
||||||
|
// Check if the error message contains CORS-related text
|
||||||
|
if (
|
||||||
|
error.message?.includes("Not allowed by CORS") ||
|
||||||
|
error.message?.includes("CORS") ||
|
||||||
|
error.message?.includes("cors")
|
||||||
|
) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the response data contains CORS error information
|
||||||
|
if (
|
||||||
|
error.response?.data?.error?.includes("CORS") ||
|
||||||
|
error.response?.data?.error?.includes("cors") ||
|
||||||
|
error.response?.data?.message?.includes("CORS") ||
|
||||||
|
error.response?.data?.message?.includes("cors") ||
|
||||||
|
error.response?.data?.message?.includes("Not allowed by CORS")
|
||||||
|
) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for specific CORS error patterns from backend logs
|
||||||
|
if (
|
||||||
|
error.message?.includes("origin") &&
|
||||||
|
error.message?.includes("callback")
|
||||||
|
) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is likely a CORS error based on context
|
||||||
|
// If we're accessing from localhost but CORS_ORIGIN is set to fabio, this is likely CORS
|
||||||
|
const currentOrigin = window.location.origin;
|
||||||
|
if (
|
||||||
|
currentOrigin === "http://localhost:3000" &&
|
||||||
|
error.config?.url?.includes("/api/")
|
||||||
|
) {
|
||||||
|
// This is likely a CORS error when accessing from localhost
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for CORS-related errors
|
||||||
|
return (
|
||||||
|
error.message?.includes("CORS") ||
|
||||||
|
error.message?.includes("cors") ||
|
||||||
|
error.message?.includes("Access to fetch") ||
|
||||||
|
error.message?.includes("blocked by CORS policy") ||
|
||||||
|
error.message?.includes("Cross-Origin Request Blocked") ||
|
||||||
|
error.message?.includes("NetworkError when attempting to fetch resource") ||
|
||||||
|
error.message?.includes("ERR_BLOCKED_BY_CLIENT") ||
|
||||||
|
error.message?.includes("ERR_NETWORK") ||
|
||||||
|
error.message?.includes("ERR_CONNECTION_REFUSED")
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
export const formatError = (error) => {
|
export const formatError = (error) => {
|
||||||
|
// Check for CORS-related errors
|
||||||
|
if (isCorsError(error)) {
|
||||||
|
return "CORS_ORIGIN mismatch - please set your URL in your environment variable";
|
||||||
|
}
|
||||||
|
|
||||||
if (error.response?.data?.message) {
|
if (error.response?.data?.message) {
|
||||||
return error.response.data.message;
|
return error.response.data.message;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,43 +1,104 @@
|
|||||||
import { Monitor, Server } from "lucide-react";
|
import { Monitor, Server } from "lucide-react";
|
||||||
import { DiWindows } from "react-icons/di";
|
import { DiWindows } from "react-icons/di";
|
||||||
// Import OS icons from react-icons
|
// Import OS icons from react-icons Simple Icons - using only confirmed available icons
|
||||||
import {
|
import {
|
||||||
|
SiAlmalinux,
|
||||||
SiAlpinelinux,
|
SiAlpinelinux,
|
||||||
SiArchlinux,
|
SiArchlinux,
|
||||||
SiCentos,
|
SiCentos,
|
||||||
SiDebian,
|
SiDebian,
|
||||||
|
SiDeepin,
|
||||||
|
SiElementary,
|
||||||
SiFedora,
|
SiFedora,
|
||||||
|
SiGentoo,
|
||||||
|
SiKalilinux,
|
||||||
SiLinux,
|
SiLinux,
|
||||||
|
SiLinuxmint,
|
||||||
SiMacos,
|
SiMacos,
|
||||||
|
SiManjaro,
|
||||||
|
SiOpensuse,
|
||||||
|
SiOracle,
|
||||||
|
SiParrotsecurity,
|
||||||
|
SiPopos,
|
||||||
|
SiRedhat,
|
||||||
|
SiRockylinux,
|
||||||
|
SiSlackware,
|
||||||
|
SiSolus,
|
||||||
|
SiSuse,
|
||||||
|
SiTails,
|
||||||
SiUbuntu,
|
SiUbuntu,
|
||||||
|
SiZorin,
|
||||||
} from "react-icons/si";
|
} from "react-icons/si";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* OS Icon mapping utility
|
* OS Icon mapping utility
|
||||||
* Maps operating system types to appropriate react-icons components
|
* Maps operating system types to appropriate react-icons components
|
||||||
|
* Now uses specific icons based on actual OS names from /etc/os-release
|
||||||
*/
|
*/
|
||||||
export const getOSIcon = (osType) => {
|
export const getOSIcon = (osType) => {
|
||||||
if (!osType) return Monitor;
|
if (!osType) return Monitor;
|
||||||
|
|
||||||
const os = osType.toLowerCase();
|
const os = osType.toLowerCase();
|
||||||
|
|
||||||
// Linux distributions with authentic react-icons
|
// Ubuntu and Ubuntu variants
|
||||||
if (os.includes("ubuntu")) return SiUbuntu;
|
if (os.includes("ubuntu")) {
|
||||||
|
// For Ubuntu variants, use generic Ubuntu icon as fallback
|
||||||
|
return SiUbuntu;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pop!_OS
|
||||||
|
if (os.includes("pop") || os.includes("pop!_os")) return SiPopos;
|
||||||
|
|
||||||
|
// Linux Mint
|
||||||
|
if (os.includes("mint") || os.includes("linuxmint")) return SiLinuxmint;
|
||||||
|
|
||||||
|
// Elementary OS
|
||||||
|
if (os.includes("elementary")) return SiElementary;
|
||||||
|
|
||||||
|
// Debian
|
||||||
if (os.includes("debian")) return SiDebian;
|
if (os.includes("debian")) return SiDebian;
|
||||||
if (
|
|
||||||
os.includes("centos") ||
|
// Rocky Linux
|
||||||
os.includes("rhel") ||
|
if (os.includes("rocky")) return SiRockylinux;
|
||||||
os.includes("red hat") ||
|
|
||||||
os.includes("almalinux") ||
|
// AlmaLinux
|
||||||
os.includes("rocky")
|
if (os.includes("alma") || os.includes("almalinux")) return SiAlmalinux;
|
||||||
)
|
|
||||||
return SiCentos;
|
// CentOS
|
||||||
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
|
if (os.includes("centos")) return SiCentos;
|
||||||
return SiLinux; // Use generic Linux icon for Oracle Linux
|
|
||||||
|
// Red Hat Enterprise Linux
|
||||||
|
if (os.includes("rhel") || os.includes("red hat")) return SiRedhat;
|
||||||
|
|
||||||
|
// Fedora
|
||||||
if (os.includes("fedora")) return SiFedora;
|
if (os.includes("fedora")) return SiFedora;
|
||||||
|
|
||||||
|
// Oracle Linux
|
||||||
|
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
|
||||||
|
return SiOracle;
|
||||||
|
|
||||||
|
// SUSE distributions
|
||||||
|
if (os.includes("opensuse")) return SiOpensuse;
|
||||||
|
if (os.includes("suse")) return SiSuse;
|
||||||
|
|
||||||
|
// Arch-based distributions
|
||||||
if (os.includes("arch")) return SiArchlinux;
|
if (os.includes("arch")) return SiArchlinux;
|
||||||
|
if (os.includes("manjaro")) return SiManjaro;
|
||||||
|
if (os.includes("endeavour") || os.includes("endeavouros"))
|
||||||
|
return SiArchlinux; // Fallback to Arch
|
||||||
|
if (os.includes("garuda")) return SiArchlinux; // Fallback to Arch
|
||||||
|
if (os.includes("blackarch")) return SiArchlinux; // Fallback to Arch
|
||||||
|
|
||||||
|
// Other distributions
|
||||||
if (os.includes("alpine")) return SiAlpinelinux;
|
if (os.includes("alpine")) return SiAlpinelinux;
|
||||||
if (os.includes("suse") || os.includes("opensuse")) return SiLinux; // SUSE uses generic Linux icon
|
if (os.includes("gentoo")) return SiGentoo;
|
||||||
|
if (os.includes("slackware")) return SiSlackware;
|
||||||
|
if (os.includes("zorin")) return SiZorin;
|
||||||
|
if (os.includes("deepin")) return SiDeepin;
|
||||||
|
if (os.includes("solus")) return SiSolus;
|
||||||
|
if (os.includes("tails")) return SiTails;
|
||||||
|
if (os.includes("parrot")) return SiParrotsecurity;
|
||||||
|
if (os.includes("kali")) return SiKalilinux;
|
||||||
|
|
||||||
// Generic Linux
|
// Generic Linux
|
||||||
if (os.includes("linux")) return SiLinux;
|
if (os.includes("linux")) return SiLinux;
|
||||||
@@ -70,27 +131,83 @@ export const getOSColor = (osType) => {
|
|||||||
/**
|
/**
|
||||||
* OS Display name utility
|
* OS Display name utility
|
||||||
* Provides clean, formatted OS names for display
|
* Provides clean, formatted OS names for display
|
||||||
|
* Updated to handle more distributions from /etc/os-release
|
||||||
*/
|
*/
|
||||||
export const getOSDisplayName = (osType) => {
|
export const getOSDisplayName = (osType) => {
|
||||||
if (!osType) return "Unknown";
|
if (!osType) return "Unknown";
|
||||||
|
|
||||||
const os = osType.toLowerCase();
|
const os = osType.toLowerCase();
|
||||||
|
|
||||||
// Linux distributions
|
// Ubuntu and variants
|
||||||
if (os.includes("ubuntu")) return "Ubuntu";
|
if (os.includes("ubuntu")) {
|
||||||
|
if (os.includes("kubuntu")) return "Kubuntu";
|
||||||
|
if (os.includes("lubuntu")) return "Lubuntu";
|
||||||
|
if (os.includes("xubuntu")) return "Xubuntu";
|
||||||
|
if (os.includes("ubuntu mate") || os.includes("ubuntumate"))
|
||||||
|
return "Ubuntu MATE";
|
||||||
|
if (os.includes("ubuntu budgie") || os.includes("ubuntubudgie"))
|
||||||
|
return "Ubuntu Budgie";
|
||||||
|
if (os.includes("ubuntu studio") || os.includes("ubuntustudio"))
|
||||||
|
return "Ubuntu Studio";
|
||||||
|
if (os.includes("ubuntu kylin") || os.includes("ubuntukylin"))
|
||||||
|
return "Ubuntu Kylin";
|
||||||
|
return "Ubuntu";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pop!_OS
|
||||||
|
if (os.includes("pop") || os.includes("pop!_os")) return "Pop!_OS";
|
||||||
|
|
||||||
|
// Linux Mint
|
||||||
|
if (os.includes("mint") || os.includes("linuxmint")) return "Linux Mint";
|
||||||
|
|
||||||
|
// Elementary OS
|
||||||
|
if (os.includes("elementary")) return "Elementary OS";
|
||||||
|
|
||||||
|
// Debian
|
||||||
if (os.includes("debian")) return "Debian";
|
if (os.includes("debian")) return "Debian";
|
||||||
if (os.includes("centos")) return "CentOS";
|
|
||||||
if (os.includes("almalinux")) return "AlmaLinux";
|
// Rocky Linux
|
||||||
if (os.includes("rocky")) return "Rocky Linux";
|
if (os.includes("rocky")) return "Rocky Linux";
|
||||||
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
|
|
||||||
return "Oracle Linux";
|
// AlmaLinux
|
||||||
|
if (os.includes("alma") || os.includes("almalinux")) return "AlmaLinux";
|
||||||
|
|
||||||
|
// CentOS
|
||||||
|
if (os.includes("centos")) return "CentOS";
|
||||||
|
|
||||||
|
// Red Hat Enterprise Linux
|
||||||
if (os.includes("rhel") || os.includes("red hat"))
|
if (os.includes("rhel") || os.includes("red hat"))
|
||||||
return "Red Hat Enterprise Linux";
|
return "Red Hat Enterprise Linux";
|
||||||
|
|
||||||
|
// Fedora
|
||||||
if (os.includes("fedora")) return "Fedora";
|
if (os.includes("fedora")) return "Fedora";
|
||||||
if (os.includes("arch")) return "Arch Linux";
|
|
||||||
if (os.includes("suse")) return "SUSE Linux";
|
// Oracle Linux
|
||||||
|
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
|
||||||
|
return "Oracle Linux";
|
||||||
|
|
||||||
|
// SUSE distributions
|
||||||
if (os.includes("opensuse")) return "openSUSE";
|
if (os.includes("opensuse")) return "openSUSE";
|
||||||
|
if (os.includes("suse")) return "SUSE Linux";
|
||||||
|
|
||||||
|
// Arch-based distributions
|
||||||
|
if (os.includes("arch")) return "Arch Linux";
|
||||||
|
if (os.includes("manjaro")) return "Manjaro";
|
||||||
|
if (os.includes("endeavour") || os.includes("endeavouros"))
|
||||||
|
return "EndeavourOS";
|
||||||
|
if (os.includes("garuda")) return "Garuda Linux";
|
||||||
|
if (os.includes("blackarch")) return "BlackArch Linux";
|
||||||
|
|
||||||
|
// Other distributions
|
||||||
if (os.includes("alpine")) return "Alpine Linux";
|
if (os.includes("alpine")) return "Alpine Linux";
|
||||||
|
if (os.includes("gentoo")) return "Gentoo";
|
||||||
|
if (os.includes("slackware")) return "Slackware";
|
||||||
|
if (os.includes("zorin")) return "Zorin OS";
|
||||||
|
if (os.includes("deepin")) return "Deepin";
|
||||||
|
if (os.includes("solus")) return "Solus";
|
||||||
|
if (os.includes("tails")) return "Tails";
|
||||||
|
if (os.includes("parrot")) return "Parrot Security";
|
||||||
|
if (os.includes("kali")) return "Kali Linux";
|
||||||
|
|
||||||
// Generic Linux
|
// Generic Linux
|
||||||
if (os.includes("linux")) return "Linux";
|
if (os.includes("linux")) return "Linux";
|
||||||
|
|||||||
@@ -37,6 +37,11 @@ export default defineConfig({
|
|||||||
}
|
}
|
||||||
: undefined,
|
: undefined,
|
||||||
},
|
},
|
||||||
|
"/admin": {
|
||||||
|
target: `http://${process.env.BACKEND_HOST || "localhost"}:${process.env.BACKEND_PORT || "3001"}`,
|
||||||
|
changeOrigin: true,
|
||||||
|
secure: false,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
build: {
|
build: {
|
||||||
|
|||||||
441
package-lock.json
generated
441
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon",
|
"name": "patchmon",
|
||||||
"version": "1.2.7",
|
"version": "1.3.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "patchmon",
|
"name": "patchmon",
|
||||||
"version": "1.2.7",
|
"version": "1.3.0",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
"backend",
|
"backend",
|
||||||
@@ -23,23 +23,30 @@
|
|||||||
},
|
},
|
||||||
"backend": {
|
"backend": {
|
||||||
"name": "patchmon-backend",
|
"name": "patchmon-backend",
|
||||||
"version": "1.2.7",
|
"version": "1.3.0",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@bull-board/api": "^6.13.1",
|
||||||
|
"@bull-board/express": "^6.13.1",
|
||||||
"@prisma/client": "^6.1.0",
|
"@prisma/client": "^6.1.0",
|
||||||
|
"axios": "^1.7.9",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
|
"bullmq": "^5.61.0",
|
||||||
|
"cookie-parser": "^1.4.7",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"dotenv": "^16.4.7",
|
"dotenv": "^16.4.7",
|
||||||
"express": "^4.21.2",
|
"express": "^4.21.2",
|
||||||
"express-rate-limit": "^7.5.0",
|
"express-rate-limit": "^7.5.0",
|
||||||
"express-validator": "^7.2.0",
|
"express-validator": "^7.2.0",
|
||||||
"helmet": "^8.0.0",
|
"helmet": "^8.0.0",
|
||||||
|
"ioredis": "^5.8.1",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"moment": "^2.30.1",
|
"moment": "^2.30.1",
|
||||||
"qrcode": "^1.5.4",
|
"qrcode": "^1.5.4",
|
||||||
"speakeasy": "^2.0.0",
|
"speakeasy": "^2.0.0",
|
||||||
"uuid": "^11.0.3",
|
"uuid": "^11.0.3",
|
||||||
"winston": "^3.17.0"
|
"winston": "^3.17.0",
|
||||||
|
"ws": "^8.18.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/bcryptjs": "^2.4.6",
|
"@types/bcryptjs": "^2.4.6",
|
||||||
@@ -52,7 +59,7 @@
|
|||||||
},
|
},
|
||||||
"frontend": {
|
"frontend": {
|
||||||
"name": "patchmon-frontend",
|
"name": "patchmon-frontend",
|
||||||
"version": "1.2.7",
|
"version": "1.3.0",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@dnd-kit/core": "^6.3.1",
|
"@dnd-kit/core": "^6.3.1",
|
||||||
@@ -559,6 +566,39 @@
|
|||||||
"node": ">=14.21.3"
|
"node": ">=14.21.3"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@bull-board/api": {
|
||||||
|
"version": "6.13.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@bull-board/api/-/api-6.13.1.tgz",
|
||||||
|
"integrity": "sha512-L9Ukfd/gxg8VIUb+vXRcU31yJsAaLLKG2qU/OMXQJ5EoXm2JhWBat+26YgrH/oKIb9zbZsg8xwHyqxa7sHEkVg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"redis-info": "^3.1.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@bull-board/ui": "6.13.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@bull-board/express": {
|
||||||
|
"version": "6.13.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@bull-board/express/-/express-6.13.1.tgz",
|
||||||
|
"integrity": "sha512-wipvCsdeMdcgWVc77qrs858OjyGo7IAjJxuuWd4q5dvciFmTU1fmfZddWuZ1jDWpq5P7KdcpGxjzF1vnd2GaUw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@bull-board/api": "6.13.1",
|
||||||
|
"@bull-board/ui": "6.13.1",
|
||||||
|
"ejs": "^3.1.10",
|
||||||
|
"express": "^4.21.1 || ^5.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@bull-board/ui": {
|
||||||
|
"version": "6.13.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@bull-board/ui/-/ui-6.13.1.tgz",
|
||||||
|
"integrity": "sha512-DzPjCFzjEbDukhfSd7nLdTLVKIv5waARQuAXETSRqiKTN4vSA1KNdaJ8p72YwHujKO19yFW1zWjNKrzsa8DCIg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@bull-board/api": "6.13.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@colors/colors": {
|
"node_modules/@colors/colors": {
|
||||||
"version": "1.6.0",
|
"version": "1.6.0",
|
||||||
"resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz",
|
"resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz",
|
||||||
@@ -1074,6 +1114,12 @@
|
|||||||
"node": ">=18"
|
"node": ">=18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@ioredis/commands": {
|
||||||
|
"version": "1.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.4.0.tgz",
|
||||||
|
"integrity": "sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@isaacs/cliui": {
|
"node_modules/@isaacs/cliui": {
|
||||||
"version": "8.0.2",
|
"version": "8.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
|
||||||
@@ -1233,6 +1279,84 @@
|
|||||||
"integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==",
|
"integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==",
|
||||||
|
"cpu": [
|
||||||
|
"arm"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
]
|
||||||
|
},
|
||||||
"node_modules/@nodelib/fs.scandir": {
|
"node_modules/@nodelib/fs.scandir": {
|
||||||
"version": "2.1.5",
|
"version": "2.1.5",
|
||||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||||
@@ -1992,7 +2116,6 @@
|
|||||||
"version": "1.0.2",
|
"version": "1.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
|
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/base32.js": {
|
"node_modules/base32.js": {
|
||||||
@@ -2132,6 +2255,33 @@
|
|||||||
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==",
|
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==",
|
||||||
"license": "BSD-3-Clause"
|
"license": "BSD-3-Clause"
|
||||||
},
|
},
|
||||||
|
"node_modules/bullmq": {
|
||||||
|
"version": "5.61.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.61.0.tgz",
|
||||||
|
"integrity": "sha512-khaTjc1JnzaYFl4FrUtsSsqugAW/urRrcZ9Q0ZE+REAw8W+gkHFqxbGlutOu6q7j7n91wibVaaNlOUMdiEvoSQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"cron-parser": "^4.9.0",
|
||||||
|
"ioredis": "^5.4.1",
|
||||||
|
"msgpackr": "^1.11.2",
|
||||||
|
"node-abort-controller": "^3.1.1",
|
||||||
|
"semver": "^7.5.4",
|
||||||
|
"tslib": "^2.0.0",
|
||||||
|
"uuid": "^11.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/bullmq/node_modules/semver": {
|
||||||
|
"version": "7.7.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
|
||||||
|
"integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
|
||||||
|
"license": "ISC",
|
||||||
|
"bin": {
|
||||||
|
"semver": "bin/semver.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/bytes": {
|
"node_modules/bytes": {
|
||||||
"version": "3.1.2",
|
"version": "3.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
||||||
@@ -2370,6 +2520,15 @@
|
|||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/cluster-key-slot": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/color": {
|
"node_modules/color": {
|
||||||
"version": "3.2.1",
|
"version": "3.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz",
|
||||||
@@ -2544,6 +2703,28 @@
|
|||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/cookie-parser": {
|
||||||
|
"version": "1.4.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.7.tgz",
|
||||||
|
"integrity": "sha512-nGUvgXnotP3BsjiLX2ypbQnWoGUPIIfHQNZkkC668ntrzGWEZVW70HDEB1qnNGMicPje6EttlIgzo51YSwNQGw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"cookie": "0.7.2",
|
||||||
|
"cookie-signature": "1.0.6"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.8.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/cookie-parser/node_modules/cookie": {
|
||||||
|
"version": "0.7.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
|
||||||
|
"integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/cookie-signature": {
|
"node_modules/cookie-signature": {
|
||||||
"version": "1.0.6",
|
"version": "1.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
|
||||||
@@ -2563,6 +2744,18 @@
|
|||||||
"node": ">= 0.10"
|
"node": ">= 0.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/cron-parser": {
|
||||||
|
"version": "4.9.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",
|
||||||
|
"integrity": "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"luxon": "^3.2.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/cross-spawn": {
|
"node_modules/cross-spawn": {
|
||||||
"version": "7.0.6",
|
"version": "7.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
||||||
@@ -2667,6 +2860,15 @@
|
|||||||
"node": ">=0.4.0"
|
"node": ">=0.4.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/denque": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/depd": {
|
"node_modules/depd": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||||
@@ -2693,6 +2895,16 @@
|
|||||||
"npm": "1.2.8000 || >= 1.4.16"
|
"npm": "1.2.8000 || >= 1.4.16"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/detect-libc": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"optional": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/didyoumean": {
|
"node_modules/didyoumean": {
|
||||||
"version": "1.2.2",
|
"version": "1.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz",
|
||||||
@@ -2772,6 +2984,21 @@
|
|||||||
"fast-check": "^3.23.1"
|
"fast-check": "^3.23.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/ejs": {
|
||||||
|
"version": "3.1.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz",
|
||||||
|
"integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"jake": "^10.8.5"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"ejs": "bin/cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/electron-to-chromium": {
|
"node_modules/electron-to-chromium": {
|
||||||
"version": "1.5.227",
|
"version": "1.5.227",
|
||||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.227.tgz",
|
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.227.tgz",
|
||||||
@@ -3080,6 +3307,36 @@
|
|||||||
"integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==",
|
"integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/filelist": {
|
||||||
|
"version": "1.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz",
|
||||||
|
"integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"minimatch": "^5.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/filelist/node_modules/brace-expansion": {
|
||||||
|
"version": "2.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
|
||||||
|
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"balanced-match": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/filelist/node_modules/minimatch": {
|
||||||
|
"version": "5.1.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
|
||||||
|
"integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"brace-expansion": "^2.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/fill-range": {
|
"node_modules/fill-range": {
|
||||||
"version": "7.1.1",
|
"version": "7.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||||
@@ -3529,6 +3786,30 @@
|
|||||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
|
"node_modules/ioredis": {
|
||||||
|
"version": "5.8.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.8.1.tgz",
|
||||||
|
"integrity": "sha512-Qho8TgIamqEPdgiMadJwzRMW3TudIg6vpg4YONokGDudy4eqRIJtDbVX72pfLBcWxvbn3qm/40TyGUObdW4tLQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@ioredis/commands": "1.4.0",
|
||||||
|
"cluster-key-slot": "^1.1.0",
|
||||||
|
"debug": "^4.3.4",
|
||||||
|
"denque": "^2.1.0",
|
||||||
|
"lodash.defaults": "^4.2.0",
|
||||||
|
"lodash.isarguments": "^3.1.0",
|
||||||
|
"redis-errors": "^1.2.0",
|
||||||
|
"redis-parser": "^3.0.0",
|
||||||
|
"standard-as-callback": "^2.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12.22.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/ioredis"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/ipaddr.js": {
|
"node_modules/ipaddr.js": {
|
||||||
"version": "1.9.1",
|
"version": "1.9.1",
|
||||||
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
|
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
|
||||||
@@ -3656,6 +3937,23 @@
|
|||||||
"@pkgjs/parseargs": "^0.11.0"
|
"@pkgjs/parseargs": "^0.11.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/jake": {
|
||||||
|
"version": "10.9.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/jake/-/jake-10.9.4.tgz",
|
||||||
|
"integrity": "sha512-wpHYzhxiVQL+IV05BLE2Xn34zW1S223hvjtqk0+gsPrwd/8JNLXJgZZM/iPFsYc1xyphF+6M6EvdE5E9MBGkDA==",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"async": "^3.2.6",
|
||||||
|
"filelist": "^1.0.4",
|
||||||
|
"picocolors": "^1.1.1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"jake": "bin/cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/jiti": {
|
"node_modules/jiti": {
|
||||||
"version": "2.6.0",
|
"version": "2.6.0",
|
||||||
"resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.0.tgz",
|
"resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.0.tgz",
|
||||||
@@ -3960,12 +4258,24 @@
|
|||||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
|
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/lodash.defaults": {
|
||||||
|
"version": "4.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
||||||
|
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/lodash.includes": {
|
"node_modules/lodash.includes": {
|
||||||
"version": "4.3.0",
|
"version": "4.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
|
||||||
"integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==",
|
"integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/lodash.isarguments": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/lodash.isboolean": {
|
"node_modules/lodash.isboolean": {
|
||||||
"version": "3.0.3",
|
"version": "3.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
|
||||||
@@ -4050,6 +4360,15 @@
|
|||||||
"react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc"
|
"react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/luxon": {
|
||||||
|
"version": "3.7.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.2.tgz",
|
||||||
|
"integrity": "sha512-vtEhXh/gNjI9Yg1u4jX/0YVPMvxzHuGgCm6tC5kZyb08yjGWGnqAjGJvcXbqQR2P3MyMEFnRbpcdFS6PBcLqew==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/math-intrinsics": {
|
"node_modules/math-intrinsics": {
|
||||||
"version": "1.1.0",
|
"version": "1.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||||
@@ -4180,6 +4499,37 @@
|
|||||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/msgpackr": {
|
||||||
|
"version": "1.11.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.5.tgz",
|
||||||
|
"integrity": "sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"optionalDependencies": {
|
||||||
|
"msgpackr-extract": "^3.0.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/msgpackr-extract": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==",
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"dependencies": {
|
||||||
|
"node-gyp-build-optional-packages": "5.2.2"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"download-msgpackr-prebuilds": "bin/download-prebuilds.js"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/mz": {
|
"node_modules/mz": {
|
||||||
"version": "2.7.0",
|
"version": "2.7.0",
|
||||||
"resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz",
|
"resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz",
|
||||||
@@ -4220,6 +4570,12 @@
|
|||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/node-abort-controller": {
|
||||||
|
"version": "3.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz",
|
||||||
|
"integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/node-fetch-native": {
|
"node_modules/node-fetch-native": {
|
||||||
"version": "1.6.7",
|
"version": "1.6.7",
|
||||||
"resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.7.tgz",
|
"resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.7.tgz",
|
||||||
@@ -4227,6 +4583,21 @@
|
|||||||
"devOptional": true,
|
"devOptional": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/node-gyp-build-optional-packages": {
|
||||||
|
"version": "5.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz",
|
||||||
|
"integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"dependencies": {
|
||||||
|
"detect-libc": "^2.0.1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"node-gyp-build-optional-packages": "bin.js",
|
||||||
|
"node-gyp-build-optional-packages-optional": "optional.js",
|
||||||
|
"node-gyp-build-optional-packages-test": "build-test.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/node-releases": {
|
"node_modules/node-releases": {
|
||||||
"version": "2.0.21",
|
"version": "2.0.21",
|
||||||
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.21.tgz",
|
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.21.tgz",
|
||||||
@@ -4532,7 +4903,6 @@
|
|||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||||
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
|
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
|
||||||
"dev": true,
|
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
"node_modules/picomatch": {
|
"node_modules/picomatch": {
|
||||||
@@ -5090,6 +5460,36 @@
|
|||||||
"node": ">=8.10.0"
|
"node": ">=8.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/redis-errors": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
|
||||||
|
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/redis-info": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/redis-info/-/redis-info-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-ER4L9Sh/vm63DkIE0bkSjxluQlioBiBgf5w1UuldaW/3vPcecdljVDisZhmnCMvsxHNiARTTDDHGg9cGwTfrKg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"lodash": "^4.17.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/redis-parser": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"redis-errors": "^1.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/require-directory": {
|
"node_modules/require-directory": {
|
||||||
"version": "2.1.1",
|
"version": "2.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
||||||
@@ -5541,6 +5941,12 @@
|
|||||||
"node": "*"
|
"node": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/standard-as-callback": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/statuses": {
|
"node_modules/statuses": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||||
@@ -6188,6 +6594,27 @@
|
|||||||
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
|
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/ws": {
|
||||||
|
"version": "8.18.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
|
||||||
|
"integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"bufferutil": "^4.0.1",
|
||||||
|
"utf-8-validate": ">=5.0.2"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"bufferutil": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"utf-8-validate": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/y18n": {
|
"node_modules/y18n": {
|
||||||
"version": "5.0.8",
|
"version": "5.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "patchmon",
|
"name": "patchmon",
|
||||||
"version": "1.2.7",
|
"version": "1.3.0",
|
||||||
"description": "Linux Patch Monitoring System",
|
"description": "Linux Patch Monitoring System",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
|
|||||||
722
setup.sh
722
setup.sh
@@ -34,7 +34,7 @@ BLUE='\033[0;34m'
|
|||||||
NC='\033[0m' # No Color
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
# Global variables
|
# Global variables
|
||||||
SCRIPT_VERSION="self-hosting-install.sh v1.2.7-selfhost-2025-01-20-1"
|
SCRIPT_VERSION="self-hosting-install.sh v1.3.0-selfhost-2025-10-19-1"
|
||||||
DEFAULT_GITHUB_REPO="https://github.com/PatchMon/PatchMon.git"
|
DEFAULT_GITHUB_REPO="https://github.com/PatchMon/PatchMon.git"
|
||||||
FQDN=""
|
FQDN=""
|
||||||
CUSTOM_FQDN=""
|
CUSTOM_FQDN=""
|
||||||
@@ -60,6 +60,9 @@ SERVICE_USE_LETSENCRYPT="true" # Will be set based on user input
|
|||||||
SERVER_PROTOCOL_SEL="https"
|
SERVER_PROTOCOL_SEL="https"
|
||||||
SERVER_PORT_SEL="" # Will be set to BACKEND_PORT in init_instance_vars
|
SERVER_PORT_SEL="" # Will be set to BACKEND_PORT in init_instance_vars
|
||||||
SETUP_NGINX="true"
|
SETUP_NGINX="true"
|
||||||
|
UPDATE_MODE="false"
|
||||||
|
SELECTED_INSTANCE=""
|
||||||
|
SELECTED_SERVICE_NAME=""
|
||||||
|
|
||||||
# Functions
|
# Functions
|
||||||
print_status() {
|
print_status() {
|
||||||
@@ -433,6 +436,57 @@ generate_jwt_secret() {
|
|||||||
openssl rand -base64 64 | tr -d "=+/" | cut -c1-50
|
openssl rand -base64 64 | tr -d "=+/" | cut -c1-50
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Generate Redis password
|
||||||
|
generate_redis_password() {
|
||||||
|
openssl rand -base64 32 | tr -d "=+/" | cut -c1-25
|
||||||
|
}
|
||||||
|
|
||||||
|
# Find next available Redis database
|
||||||
|
find_next_redis_db() {
|
||||||
|
print_info "Finding next available Redis database..."
|
||||||
|
|
||||||
|
# Start from database 0 and keep checking until we find an empty one
|
||||||
|
local db_num=0
|
||||||
|
local max_attempts=16 # Redis default is 16 databases
|
||||||
|
|
||||||
|
while [ $db_num -lt $max_attempts ]; do
|
||||||
|
# Test if database is empty
|
||||||
|
local key_count
|
||||||
|
local redis_output
|
||||||
|
|
||||||
|
# Try to get database size
|
||||||
|
redis_output=$(redis-cli -h localhost -p 6379 -n "$db_num" DBSIZE 2>&1)
|
||||||
|
|
||||||
|
# Check for errors
|
||||||
|
if echo "$redis_output" | grep -q "ERR"; then
|
||||||
|
if echo "$redis_output" | grep -q "invalid DB index"; then
|
||||||
|
print_warning "Reached maximum database limit at database $db_num"
|
||||||
|
break
|
||||||
|
else
|
||||||
|
print_error "Error checking database $db_num: $redis_output"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
key_count="$redis_output"
|
||||||
|
|
||||||
|
# If database is empty, use it
|
||||||
|
if [ "$key_count" = "0" ]; then
|
||||||
|
print_status "Found available Redis database: $db_num (empty)"
|
||||||
|
echo "$db_num"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Database $db_num has $key_count keys, checking next..."
|
||||||
|
db_num=$((db_num + 1))
|
||||||
|
done
|
||||||
|
|
||||||
|
print_warning "No available Redis databases found (checked 0-$max_attempts)"
|
||||||
|
print_info "Using database 0 (may have existing data)"
|
||||||
|
echo "0"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
# Initialize instance variables
|
# Initialize instance variables
|
||||||
init_instance_vars() {
|
init_instance_vars() {
|
||||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] init_instance_vars function started" >> "$DEBUG_LOG"
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] init_instance_vars function started" >> "$DEBUG_LOG"
|
||||||
@@ -464,6 +518,12 @@ init_instance_vars() {
|
|||||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Generating JWT secret..." >> "$DEBUG_LOG"
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Generating JWT secret..." >> "$DEBUG_LOG"
|
||||||
JWT_SECRET=$(generate_jwt_secret)
|
JWT_SECRET=$(generate_jwt_secret)
|
||||||
|
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Generating Redis password..." >> "$DEBUG_LOG"
|
||||||
|
REDIS_PASSWORD=$(generate_redis_password)
|
||||||
|
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Finding next available Redis database..." >> "$DEBUG_LOG"
|
||||||
|
REDIS_DB=$(find_next_redis_db)
|
||||||
|
|
||||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Generating random backend port..." >> "$DEBUG_LOG"
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Generating random backend port..." >> "$DEBUG_LOG"
|
||||||
|
|
||||||
# Generate random backend port (3001-3999)
|
# Generate random backend port (3001-3999)
|
||||||
@@ -567,6 +627,150 @@ install_postgresql() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Install Redis
|
||||||
|
install_redis() {
|
||||||
|
print_info "Installing Redis..."
|
||||||
|
|
||||||
|
if systemctl is-active --quiet redis-server; then
|
||||||
|
print_status "Redis already running"
|
||||||
|
else
|
||||||
|
$PKG_INSTALL redis-server
|
||||||
|
systemctl start redis-server
|
||||||
|
systemctl enable redis-server
|
||||||
|
print_status "Redis installed and started"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Configure Redis with user authentication
|
||||||
|
configure_redis() {
|
||||||
|
print_info "Configuring Redis with user authentication..."
|
||||||
|
|
||||||
|
# Check if Redis is running
|
||||||
|
if ! systemctl is-active --quiet redis-server; then
|
||||||
|
print_error "Redis is not running. Please start Redis first."
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate Redis username based on instance
|
||||||
|
REDIS_USER="patchmon_${DB_SAFE_NAME}"
|
||||||
|
|
||||||
|
# Generate separate user password (more secure than reusing admin password)
|
||||||
|
REDIS_USER_PASSWORD=$(openssl rand -base64 32 | tr -d "=+/" | cut -c1-32)
|
||||||
|
|
||||||
|
print_info "Creating Redis user: $REDIS_USER for database $REDIS_DB"
|
||||||
|
|
||||||
|
# Create Redis configuration backup
|
||||||
|
if [ -f /etc/redis/redis.conf ]; then
|
||||||
|
cp /etc/redis/redis.conf /etc/redis/redis.conf.backup.$(date +%Y%m%d_%H%M%S)
|
||||||
|
print_info "Created Redis configuration backup"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Configure Redis with ACL authentication
|
||||||
|
print_info "Configuring Redis with ACL authentication"
|
||||||
|
|
||||||
|
# Ensure ACL file exists and is configured
|
||||||
|
if [ ! -f /etc/redis/users.acl ]; then
|
||||||
|
touch /etc/redis/users.acl
|
||||||
|
chown redis:redis /etc/redis/users.acl
|
||||||
|
chmod 640 /etc/redis/users.acl
|
||||||
|
print_status "Created Redis ACL file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Configure ACL file in redis.conf
|
||||||
|
if ! grep -q "^aclfile" /etc/redis/redis.conf; then
|
||||||
|
echo "aclfile /etc/redis/users.acl" >> /etc/redis/redis.conf
|
||||||
|
print_status "Added ACL file configuration to Redis"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove any requirepass configuration (incompatible with ACL)
|
||||||
|
if grep -q "^requirepass" /etc/redis/redis.conf; then
|
||||||
|
sed -i 's/^requirepass.*/# &/' /etc/redis/redis.conf
|
||||||
|
print_status "Disabled requirepass (incompatible with ACL)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove any user definitions from redis.conf (should be in ACL file)
|
||||||
|
if grep -q "^user " /etc/redis/redis.conf; then
|
||||||
|
sed -i '/^user /d' /etc/redis/redis.conf
|
||||||
|
print_status "Removed user definitions from redis.conf"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create admin user in ACL file if it doesn't exist
|
||||||
|
if ! grep -q "^user admin" /etc/redis/users.acl; then
|
||||||
|
echo "user admin on sanitize-payload >$REDIS_PASSWORD ~* &* +@all" >> /etc/redis/users.acl
|
||||||
|
print_status "Added admin user to ACL file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Restart Redis to apply ACL configuration
|
||||||
|
print_info "Restarting Redis to apply ACL configuration..."
|
||||||
|
systemctl restart redis-server
|
||||||
|
|
||||||
|
# Wait for Redis to start
|
||||||
|
sleep 3
|
||||||
|
|
||||||
|
# Test admin connection
|
||||||
|
if ! redis-cli -h 127.0.0.1 -p 6379 --user admin --pass "$REDIS_PASSWORD" --no-auth-warning ping > /dev/null 2>&1; then
|
||||||
|
print_error "Failed to configure Redis ACL authentication"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_status "Redis ACL authentication configuration successful"
|
||||||
|
|
||||||
|
# Create Redis user with ACL
|
||||||
|
print_info "Creating Redis ACL user: $REDIS_USER"
|
||||||
|
|
||||||
|
# Create user with password and permissions - capture output for error handling
|
||||||
|
local acl_result
|
||||||
|
acl_result=$(redis-cli -h 127.0.0.1 -p 6379 --user admin --pass "$REDIS_PASSWORD" --no-auth-warning ACL SETUSER "$REDIS_USER" on ">${REDIS_USER_PASSWORD}" ~* +@all 2>&1)
|
||||||
|
|
||||||
|
if [ "$acl_result" = "OK" ]; then
|
||||||
|
print_status "Redis user '$REDIS_USER' created successfully"
|
||||||
|
|
||||||
|
# Save ACL users to file to persist across restarts
|
||||||
|
local save_result
|
||||||
|
save_result=$(redis-cli -h 127.0.0.1 -p 6379 --user admin --pass "$REDIS_PASSWORD" --no-auth-warning ACL SAVE 2>&1)
|
||||||
|
|
||||||
|
if [ "$save_result" = "OK" ]; then
|
||||||
|
print_status "Redis ACL users saved to file"
|
||||||
|
else
|
||||||
|
print_warning "Failed to save ACL users to file: $save_result"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify user was actually created
|
||||||
|
local verify_result
|
||||||
|
verify_result=$(redis-cli -h 127.0.0.1 -p 6379 --user admin --pass "$REDIS_PASSWORD" --no-auth-warning ACL GETUSER "$REDIS_USER" 2>&1)
|
||||||
|
|
||||||
|
if [ "$verify_result" = "(nil)" ]; then
|
||||||
|
print_error "User creation reported OK but user does not exist"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "Failed to create Redis user: $acl_result"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test user connection
|
||||||
|
print_info "Testing Redis user connection..."
|
||||||
|
if redis-cli -h 127.0.0.1 -p 6379 --user "$REDIS_USER" --pass "$REDIS_USER_PASSWORD" --no-auth-warning -n "$REDIS_DB" ping > /dev/null 2>&1; then
|
||||||
|
print_status "Redis user connection test successful"
|
||||||
|
else
|
||||||
|
print_error "Redis user connection test failed"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Mark the selected database as in-use
|
||||||
|
redis-cli -h 127.0.0.1 -p 6379 --user "$REDIS_USER" --pass "$REDIS_USER_PASSWORD" --no-auth-warning -n "$REDIS_DB" SET "patchmon:initialized" "$(date -u +%Y-%m-%dT%H:%M:%SZ)" > /dev/null
|
||||||
|
print_status "Marked Redis database $REDIS_DB as in-use"
|
||||||
|
|
||||||
|
# Update .env with the USER PASSWORD, not admin password
|
||||||
|
echo "REDIS_USER=$REDIS_USER" >> .env
|
||||||
|
echo "REDIS_PASSWORD=$REDIS_USER_PASSWORD" >> .env
|
||||||
|
echo "REDIS_DB=$REDIS_DB" >> .env
|
||||||
|
|
||||||
|
print_status "Redis user password: $REDIS_USER_PASSWORD"
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
# Install nginx
|
# Install nginx
|
||||||
install_nginx() {
|
install_nginx() {
|
||||||
print_info "Installing nginx..."
|
print_info "Installing nginx..."
|
||||||
@@ -642,31 +846,61 @@ EOF
|
|||||||
|
|
||||||
# Setup database for instance
|
# Setup database for instance
|
||||||
setup_database() {
|
setup_database() {
|
||||||
print_info "Creating database: $DB_NAME"
|
print_info "Setting up database: $DB_NAME"
|
||||||
|
|
||||||
# Check if sudo is available for user switching
|
# Check if sudo is available for user switching
|
||||||
if command -v sudo >/dev/null 2>&1; then
|
if command -v sudo >/dev/null 2>&1; then
|
||||||
# Drop and recreate database and user for clean state
|
# Check if user exists
|
||||||
sudo -u postgres psql -c "DROP DATABASE IF EXISTS $DB_NAME;" || true
|
user_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'" || echo "0")
|
||||||
sudo -u postgres psql -c "DROP USER IF EXISTS $DB_USER;" || true
|
|
||||||
|
|
||||||
# Create database and user
|
if [ "$user_exists" = "1" ]; then
|
||||||
sudo -u postgres psql -c "CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';"
|
print_info "Database user $DB_USER already exists, skipping creation"
|
||||||
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;"
|
else
|
||||||
|
print_info "Creating database user $DB_USER"
|
||||||
|
sudo -u postgres psql -c "CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if database exists
|
||||||
|
db_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" || echo "0")
|
||||||
|
|
||||||
|
if [ "$db_exists" = "1" ]; then
|
||||||
|
print_info "Database $DB_NAME already exists, skipping creation"
|
||||||
|
else
|
||||||
|
print_info "Creating database $DB_NAME"
|
||||||
|
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Always grant privileges (in case they were revoked)
|
||||||
sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;"
|
sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;"
|
||||||
else
|
else
|
||||||
# Alternative method for systems without sudo (run as postgres user directly)
|
# Alternative method for systems without sudo (run as postgres user directly)
|
||||||
print_warning "sudo not available, using alternative method for PostgreSQL setup"
|
print_warning "sudo not available, using alternative method for PostgreSQL setup"
|
||||||
|
|
||||||
# Switch to postgres user using su
|
# Check if user exists
|
||||||
su - postgres -c "psql -c \"DROP DATABASE IF EXISTS $DB_NAME;\"" || true
|
user_exists=$(su - postgres -c "psql -tAc \"SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'\"" || echo "0")
|
||||||
su - postgres -c "psql -c \"DROP USER IF EXISTS $DB_USER;\"" || true
|
|
||||||
su - postgres -c "psql -c \"CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';\""
|
if [ "$user_exists" = "1" ]; then
|
||||||
su - postgres -c "psql -c \"CREATE DATABASE $DB_NAME OWNER $DB_USER;\""
|
print_info "Database user $DB_USER already exists, skipping creation"
|
||||||
|
else
|
||||||
|
print_info "Creating database user $DB_USER"
|
||||||
|
su - postgres -c "psql -c \"CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if database exists
|
||||||
|
db_exists=$(su - postgres -c "psql -tAc \"SELECT 1 FROM pg_database WHERE datname='$DB_NAME'\"" || echo "0")
|
||||||
|
|
||||||
|
if [ "$db_exists" = "1" ]; then
|
||||||
|
print_info "Database $DB_NAME already exists, skipping creation"
|
||||||
|
else
|
||||||
|
print_info "Creating database $DB_NAME"
|
||||||
|
su - postgres -c "psql -c \"CREATE DATABASE $DB_NAME OWNER $DB_USER;\""
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Always grant privileges (in case they were revoked)
|
||||||
su - postgres -c "psql -c \"GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;\""
|
su - postgres -c "psql -c \"GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;\""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
print_status "Database $DB_NAME created with user $DB_USER"
|
print_status "Database setup complete for $DB_NAME"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Clone application repository
|
# Clone application repository
|
||||||
@@ -825,6 +1059,13 @@ AUTH_RATE_LIMIT_MAX=500
|
|||||||
AGENT_RATE_LIMIT_WINDOW_MS=60000
|
AGENT_RATE_LIMIT_WINDOW_MS=60000
|
||||||
AGENT_RATE_LIMIT_MAX=1000
|
AGENT_RATE_LIMIT_MAX=1000
|
||||||
|
|
||||||
|
# Redis Configuration
|
||||||
|
REDIS_HOST=localhost
|
||||||
|
REDIS_PORT=6379
|
||||||
|
REDIS_USER=$REDIS_USER
|
||||||
|
REDIS_PASSWORD=$REDIS_PASSWORD
|
||||||
|
REDIS_DB=$REDIS_DB
|
||||||
|
|
||||||
# Logging
|
# Logging
|
||||||
LOG_LEVEL=info
|
LOG_LEVEL=info
|
||||||
ENABLE_LOGGING=true
|
ENABLE_LOGGING=true
|
||||||
@@ -834,7 +1075,7 @@ EOF
|
|||||||
cat > frontend/.env << EOF
|
cat > frontend/.env << EOF
|
||||||
VITE_API_URL=$SERVER_PROTOCOL_SEL://$FQDN/api/v1
|
VITE_API_URL=$SERVER_PROTOCOL_SEL://$FQDN/api/v1
|
||||||
VITE_APP_NAME=PatchMon
|
VITE_APP_NAME=PatchMon
|
||||||
VITE_APP_VERSION=1.2.7
|
VITE_APP_VERSION=1.3.0
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
print_status "Environment files created"
|
print_status "Environment files created"
|
||||||
@@ -928,7 +1169,47 @@ server {
|
|||||||
add_header X-XSS-Protection "1; mode=block";
|
add_header X-XSS-Protection "1; mode=block";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Bull Board proxy
|
||||||
|
location /bullboard {
|
||||||
|
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host \$host;
|
||||||
|
|
||||||
|
# CORS headers for Bull Board
|
||||||
|
add_header Access-Control-Allow-Origin * always;
|
||||||
|
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||||
|
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||||
|
|
||||||
|
# Handle preflight requests
|
||||||
|
if (\$request_method = 'OPTIONS') {
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
# API routes
|
# API routes
|
||||||
|
# Bull Board proxy
|
||||||
|
location /bullboard {
|
||||||
|
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host \$host;
|
||||||
|
|
||||||
|
# CORS headers for Bull Board
|
||||||
|
add_header Access-Control-Allow-Origin * always;
|
||||||
|
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||||
|
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||||
|
|
||||||
|
# Handle preflight requests
|
||||||
|
if (\$request_method = 'OPTIONS') {
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
location /api/ {
|
location /api/ {
|
||||||
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
@@ -1009,7 +1290,47 @@ server {
|
|||||||
add_header X-XSS-Protection "1; mode=block";
|
add_header X-XSS-Protection "1; mode=block";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Bull Board proxy
|
||||||
|
location /bullboard {
|
||||||
|
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host \$host;
|
||||||
|
|
||||||
|
# CORS headers for Bull Board
|
||||||
|
add_header Access-Control-Allow-Origin * always;
|
||||||
|
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||||
|
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||||
|
|
||||||
|
# Handle preflight requests
|
||||||
|
if (\$request_method = 'OPTIONS') {
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
# API proxy
|
# API proxy
|
||||||
|
# Bull Board proxy
|
||||||
|
location /bullboard {
|
||||||
|
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host \$host;
|
||||||
|
|
||||||
|
# CORS headers for Bull Board
|
||||||
|
add_header Access-Control-Allow-Origin * always;
|
||||||
|
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||||
|
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||||
|
|
||||||
|
# Handle preflight requests
|
||||||
|
if (\$request_method = 'OPTIONS') {
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
location /api/ {
|
location /api/ {
|
||||||
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
@@ -1078,7 +1399,47 @@ server {
|
|||||||
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Bull Board proxy
|
||||||
|
location /bullboard {
|
||||||
|
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host \$host;
|
||||||
|
|
||||||
|
# CORS headers for Bull Board
|
||||||
|
add_header Access-Control-Allow-Origin * always;
|
||||||
|
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||||
|
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||||
|
|
||||||
|
# Handle preflight requests
|
||||||
|
if (\$request_method = 'OPTIONS') {
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
# API routes
|
# API routes
|
||||||
|
# Bull Board proxy
|
||||||
|
location /bullboard {
|
||||||
|
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
||||||
|
proxy_set_header Host \$host;
|
||||||
|
proxy_set_header X-Real-IP \$remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||||
|
proxy_set_header X-Forwarded-Host \$host;
|
||||||
|
|
||||||
|
# CORS headers for Bull Board
|
||||||
|
add_header Access-Control-Allow-Origin * always;
|
||||||
|
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||||
|
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||||
|
|
||||||
|
# Handle preflight requests
|
||||||
|
if (\$request_method = 'OPTIONS') {
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
location /api/ {
|
location /api/ {
|
||||||
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
proxy_pass http://127.0.0.1:$BACKEND_PORT;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
@@ -1206,7 +1567,7 @@ create_agent_version() {
|
|||||||
|
|
||||||
# Priority 2: Use fallback version if not found
|
# Priority 2: Use fallback version if not found
|
||||||
if [ "$current_version" = "N/A" ] || [ -z "$current_version" ]; then
|
if [ "$current_version" = "N/A" ] || [ -z "$current_version" ]; then
|
||||||
current_version="1.2.7"
|
current_version="1.3.0"
|
||||||
print_warning "Could not determine version, using fallback: $current_version"
|
print_warning "Could not determine version, using fallback: $current_version"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -1323,6 +1684,13 @@ Database Information:
|
|||||||
- Host: localhost
|
- Host: localhost
|
||||||
- Port: 5432
|
- Port: 5432
|
||||||
|
|
||||||
|
Redis Information:
|
||||||
|
- Host: localhost
|
||||||
|
- Port: 6379
|
||||||
|
- User: $REDIS_USER
|
||||||
|
- Password: $REDIS_PASSWORD
|
||||||
|
- Database: $REDIS_DB
|
||||||
|
|
||||||
Networking:
|
Networking:
|
||||||
- Backend Port: $BACKEND_PORT
|
- Backend Port: $BACKEND_PORT
|
||||||
- Nginx Config: /etc/nginx/sites-available/$FQDN
|
- Nginx Config: /etc/nginx/sites-available/$FQDN
|
||||||
@@ -1474,6 +1842,9 @@ deploy_instance() {
|
|||||||
echo -e "${YELLOW}Database Name: $DB_NAME${NC}"
|
echo -e "${YELLOW}Database Name: $DB_NAME${NC}"
|
||||||
echo -e "${YELLOW}Database User: $DB_USER${NC}"
|
echo -e "${YELLOW}Database User: $DB_USER${NC}"
|
||||||
echo -e "${YELLOW}Database Password: $DB_PASS${NC}"
|
echo -e "${YELLOW}Database Password: $DB_PASS${NC}"
|
||||||
|
echo -e "${YELLOW}Redis User: $REDIS_USER${NC}"
|
||||||
|
echo -e "${YELLOW}Redis Password: $REDIS_PASSWORD${NC}"
|
||||||
|
echo -e "${YELLOW}Redis Database: $REDIS_DB${NC}"
|
||||||
echo -e "${YELLOW}JWT Secret: $JWT_SECRET${NC}"
|
echo -e "${YELLOW}JWT Secret: $JWT_SECRET${NC}"
|
||||||
echo -e "${YELLOW}Backend Port: $BACKEND_PORT${NC}"
|
echo -e "${YELLOW}Backend Port: $BACKEND_PORT${NC}"
|
||||||
echo -e "${YELLOW}Instance User: $INSTANCE_USER${NC}"
|
echo -e "${YELLOW}Instance User: $INSTANCE_USER${NC}"
|
||||||
@@ -1483,6 +1854,8 @@ deploy_instance() {
|
|||||||
# System setup (prerequisites already installed in interactive_setup)
|
# System setup (prerequisites already installed in interactive_setup)
|
||||||
install_nodejs
|
install_nodejs
|
||||||
install_postgresql
|
install_postgresql
|
||||||
|
install_redis
|
||||||
|
configure_redis
|
||||||
install_nginx
|
install_nginx
|
||||||
|
|
||||||
# Only install certbot if SSL is enabled
|
# Only install certbot if SSL is enabled
|
||||||
@@ -1550,11 +1923,295 @@ deploy_instance() {
|
|||||||
:
|
:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Detect existing PatchMon installations
|
||||||
|
detect_installations() {
|
||||||
|
local installations=()
|
||||||
|
|
||||||
|
# Find all directories in /opt that contain PatchMon installations
|
||||||
|
if [ -d "/opt" ]; then
|
||||||
|
for dir in /opt/*/; do
|
||||||
|
local dirname=$(basename "$dir")
|
||||||
|
# Skip backup directories
|
||||||
|
if [[ "$dirname" =~ \.backup\. ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
# Check if it's a PatchMon installation
|
||||||
|
if [ -f "$dir/backend/package.json" ] && grep -q "patchmon" "$dir/backend/package.json" 2>/dev/null; then
|
||||||
|
installations+=("$dirname")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "${installations[@]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Select installation to update
|
||||||
|
select_installation_to_update() {
|
||||||
|
local installations=($(detect_installations))
|
||||||
|
|
||||||
|
if [ ${#installations[@]} -eq 0 ]; then
|
||||||
|
print_error "No existing PatchMon installations found in /opt"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Found ${#installations[@]} existing installation(s):"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
local i=1
|
||||||
|
declare -A install_map
|
||||||
|
for install in "${installations[@]}"; do
|
||||||
|
# Get current version if possible
|
||||||
|
local version="unknown"
|
||||||
|
if [ -f "/opt/$install/backend/package.json" ]; then
|
||||||
|
version=$(grep '"version"' "/opt/$install/backend/package.json" | head -1 | sed 's/.*"version": "\([^"]*\)".*/\1/')
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get service status - try multiple naming conventions
|
||||||
|
# Convention 1: Just the install name (e.g., patchmon.internal)
|
||||||
|
local service_name="$install"
|
||||||
|
# Convention 2: patchmon. prefix (e.g., patchmon.patchmon.internal)
|
||||||
|
local alt_service_name1="patchmon.$install"
|
||||||
|
# Convention 3: patchmon- prefix with underscores (e.g., patchmon-patchmon_internal)
|
||||||
|
local alt_service_name2="patchmon-$(echo "$install" | tr '.' '_')"
|
||||||
|
local status="unknown"
|
||||||
|
|
||||||
|
# Try convention 1 first (most common)
|
||||||
|
if systemctl is-active --quiet "$service_name" 2>/dev/null; then
|
||||||
|
status="running"
|
||||||
|
elif systemctl is-enabled --quiet "$service_name" 2>/dev/null; then
|
||||||
|
status="stopped"
|
||||||
|
# Try convention 2
|
||||||
|
elif systemctl is-active --quiet "$alt_service_name1" 2>/dev/null; then
|
||||||
|
status="running"
|
||||||
|
service_name="$alt_service_name1"
|
||||||
|
elif systemctl is-enabled --quiet "$alt_service_name1" 2>/dev/null; then
|
||||||
|
status="stopped"
|
||||||
|
service_name="$alt_service_name1"
|
||||||
|
# Try convention 3
|
||||||
|
elif systemctl is-active --quiet "$alt_service_name2" 2>/dev/null; then
|
||||||
|
status="running"
|
||||||
|
service_name="$alt_service_name2"
|
||||||
|
elif systemctl is-enabled --quiet "$alt_service_name2" 2>/dev/null; then
|
||||||
|
status="stopped"
|
||||||
|
service_name="$alt_service_name2"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "%2d. %-30s (v%-10s - %s)\n" "$i" "$install" "$version" "$status"
|
||||||
|
install_map[$i]="$install"
|
||||||
|
# Store the service name for later use
|
||||||
|
declare -g "service_map_$i=$service_name"
|
||||||
|
i=$((i + 1))
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
read_input "Select installation number to update" SELECTION "1"
|
||||||
|
|
||||||
|
if [[ "$SELECTION" =~ ^[0-9]+$ ]] && [ -n "${install_map[$SELECTION]}" ]; then
|
||||||
|
SELECTED_INSTANCE="${install_map[$SELECTION]}"
|
||||||
|
# Get the stored service name
|
||||||
|
local varname="service_map_$SELECTION"
|
||||||
|
SELECTED_SERVICE_NAME="${!varname}"
|
||||||
|
print_status "Selected: $SELECTED_INSTANCE"
|
||||||
|
print_info "Service: $SELECTED_SERVICE_NAME"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "Invalid selection. Please enter a number from 1 to ${#installations[@]}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update existing installation
|
||||||
|
update_installation() {
|
||||||
|
local instance_dir="/opt/$SELECTED_INSTANCE"
|
||||||
|
local service_name="$SELECTED_SERVICE_NAME"
|
||||||
|
|
||||||
|
print_info "Updating PatchMon installation: $SELECTED_INSTANCE"
|
||||||
|
print_info "Installation directory: $instance_dir"
|
||||||
|
print_info "Service name: $service_name"
|
||||||
|
|
||||||
|
# Verify it's a git repository
|
||||||
|
if [ ! -d "$instance_dir/.git" ]; then
|
||||||
|
print_error "Installation directory is not a git repository"
|
||||||
|
print_error "Cannot perform git-based update"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add git safe.directory to avoid ownership issues when running as root
|
||||||
|
print_info "Configuring git safe.directory..."
|
||||||
|
git config --global --add safe.directory "$instance_dir" 2>/dev/null || true
|
||||||
|
|
||||||
|
# Load existing .env to get database credentials
|
||||||
|
if [ -f "$instance_dir/backend/.env" ]; then
|
||||||
|
source "$instance_dir/backend/.env"
|
||||||
|
print_status "Loaded existing configuration"
|
||||||
|
|
||||||
|
# Parse DATABASE_URL to extract credentials
|
||||||
|
# Format: postgresql://user:password@host:port/database
|
||||||
|
if [ -n "$DATABASE_URL" ]; then
|
||||||
|
# Extract components using regex
|
||||||
|
DB_USER=$(echo "$DATABASE_URL" | sed -n 's|postgresql://\([^:]*\):.*|\1|p')
|
||||||
|
DB_PASS=$(echo "$DATABASE_URL" | sed -n 's|postgresql://[^:]*:\([^@]*\)@.*|\1|p')
|
||||||
|
DB_HOST=$(echo "$DATABASE_URL" | sed -n 's|.*@\([^:]*\):.*|\1|p')
|
||||||
|
DB_PORT=$(echo "$DATABASE_URL" | sed -n 's|.*:\([0-9]*\)/.*|\1|p')
|
||||||
|
DB_NAME=$(echo "$DATABASE_URL" | sed -n 's|.*/\([^?]*\).*|\1|p')
|
||||||
|
|
||||||
|
print_info "Database: $DB_NAME (user: $DB_USER)"
|
||||||
|
else
|
||||||
|
print_error "DATABASE_URL not found in .env file"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "Cannot find .env file at $instance_dir/backend/.env"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Select branch/version to update to
|
||||||
|
select_branch
|
||||||
|
|
||||||
|
print_info "Updating to: $DEPLOYMENT_BRANCH"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
read_yes_no "Proceed with update? This will pull new code and restart services" CONFIRM_UPDATE "y"
|
||||||
|
|
||||||
|
if [ "$CONFIRM_UPDATE" != "y" ]; then
|
||||||
|
print_warning "Update cancelled by user"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Stop the service
|
||||||
|
print_info "Stopping service: $service_name"
|
||||||
|
systemctl stop "$service_name" || true
|
||||||
|
|
||||||
|
# Create backup directory
|
||||||
|
local timestamp=$(date +%Y%m%d_%H%M%S)
|
||||||
|
local backup_dir="$instance_dir.backup.$timestamp"
|
||||||
|
local db_backup_file="$backup_dir/database_backup_$timestamp.sql"
|
||||||
|
|
||||||
|
print_info "Creating backup directory: $backup_dir"
|
||||||
|
mkdir -p "$backup_dir"
|
||||||
|
|
||||||
|
# Backup database
|
||||||
|
print_info "Backing up database: $DB_NAME"
|
||||||
|
if PGPASSWORD="$DB_PASS" pg_dump -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -F c -f "$db_backup_file" 2>/dev/null; then
|
||||||
|
print_status "Database backup created: $db_backup_file"
|
||||||
|
else
|
||||||
|
print_warning "Database backup failed, but continuing with code backup"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Backup code
|
||||||
|
print_info "Backing up code files..."
|
||||||
|
cp -r "$instance_dir" "$backup_dir/code"
|
||||||
|
print_status "Code backup created"
|
||||||
|
|
||||||
|
# Update code
|
||||||
|
print_info "Pulling latest code from branch: $DEPLOYMENT_BRANCH"
|
||||||
|
cd "$instance_dir"
|
||||||
|
|
||||||
|
# Clean up any untracked files that might conflict with incoming changes
|
||||||
|
print_info "Cleaning up untracked files to prevent merge conflicts..."
|
||||||
|
git clean -fd
|
||||||
|
|
||||||
|
# Reset any local changes to ensure clean state
|
||||||
|
print_info "Resetting local changes to ensure clean state..."
|
||||||
|
git reset --hard HEAD
|
||||||
|
|
||||||
|
# Fetch latest changes
|
||||||
|
git fetch origin
|
||||||
|
|
||||||
|
# Checkout the selected branch/tag
|
||||||
|
git checkout "$DEPLOYMENT_BRANCH"
|
||||||
|
git pull origin "$DEPLOYMENT_BRANCH" || git pull # For tags, just pull
|
||||||
|
|
||||||
|
print_status "Code updated successfully"
|
||||||
|
|
||||||
|
# Update dependencies
|
||||||
|
print_info "Updating backend dependencies..."
|
||||||
|
cd "$instance_dir/backend"
|
||||||
|
npm install --production --ignore-scripts
|
||||||
|
|
||||||
|
print_info "Updating frontend dependencies..."
|
||||||
|
cd "$instance_dir/frontend"
|
||||||
|
npm install --ignore-scripts
|
||||||
|
|
||||||
|
# Build frontend
|
||||||
|
print_info "Building frontend..."
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
# Run database migrations and generate Prisma client
|
||||||
|
print_info "Running database migrations..."
|
||||||
|
cd "$instance_dir/backend"
|
||||||
|
npx prisma generate
|
||||||
|
npx prisma migrate deploy
|
||||||
|
|
||||||
|
# Start the service
|
||||||
|
print_info "Starting service: $service_name"
|
||||||
|
systemctl start "$service_name"
|
||||||
|
|
||||||
|
# Wait a moment and check status
|
||||||
|
sleep 3
|
||||||
|
|
||||||
|
if systemctl is-active --quiet "$service_name"; then
|
||||||
|
print_success "✅ Update completed successfully!"
|
||||||
|
print_status "Service $service_name is running"
|
||||||
|
|
||||||
|
# Get new version
|
||||||
|
local new_version=$(grep '"version"' "$instance_dir/backend/package.json" | head -1 | sed 's/.*"version": "\([^"]*\)".*/\1/')
|
||||||
|
print_info "Updated to version: $new_version"
|
||||||
|
echo ""
|
||||||
|
print_info "Backup Information:"
|
||||||
|
print_info " Code backup: $backup_dir/code"
|
||||||
|
print_info " Database backup: $db_backup_file"
|
||||||
|
echo ""
|
||||||
|
print_info "To restore database if needed:"
|
||||||
|
print_info " PGPASSWORD=\"$DB_PASS\" pg_restore -h \"$DB_HOST\" -U \"$DB_USER\" -d \"$DB_NAME\" -c \"$db_backup_file\""
|
||||||
|
echo ""
|
||||||
|
else
|
||||||
|
print_error "Service failed to start after update"
|
||||||
|
echo ""
|
||||||
|
print_warning "ROLLBACK INSTRUCTIONS:"
|
||||||
|
print_info "1. Restore code:"
|
||||||
|
print_info " sudo rm -rf $instance_dir"
|
||||||
|
print_info " sudo mv $backup_dir/code $instance_dir"
|
||||||
|
echo ""
|
||||||
|
print_info "2. Restore database:"
|
||||||
|
print_info " PGPASSWORD=\"$DB_PASS\" pg_restore -h \"$DB_HOST\" -U \"$DB_USER\" -d \"$DB_NAME\" -c \"$db_backup_file\""
|
||||||
|
echo ""
|
||||||
|
print_info "3. Restart service:"
|
||||||
|
print_info " sudo systemctl start $service_name"
|
||||||
|
echo ""
|
||||||
|
print_info "Check logs: journalctl -u $service_name -f"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# Main script execution
|
# Main script execution
|
||||||
main() {
|
main() {
|
||||||
# Log script entry
|
# Parse command-line arguments
|
||||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Interactive installation started" >> "$DEBUG_LOG"
|
if [ "$1" = "--update" ]; then
|
||||||
|
UPDATE_MODE="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Log script entry
|
||||||
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Script started - Update mode: $UPDATE_MODE" >> "$DEBUG_LOG"
|
||||||
|
|
||||||
|
# Handle update mode
|
||||||
|
if [ "$UPDATE_MODE" = "true" ]; then
|
||||||
|
print_banner
|
||||||
|
print_info "🔄 PatchMon Update Mode"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Select installation to update
|
||||||
|
select_installation_to_update
|
||||||
|
|
||||||
|
# Perform update
|
||||||
|
update_installation
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Normal installation mode
|
||||||
# Run interactive setup
|
# Run interactive setup
|
||||||
interactive_setup
|
interactive_setup
|
||||||
|
|
||||||
@@ -1588,5 +2245,30 @@ main() {
|
|||||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] deploy_instance function completed" >> "$DEBUG_LOG"
|
echo "[$(date '+%Y-%m-%d %H:%M:%S')] deploy_instance function completed" >> "$DEBUG_LOG"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Run main function (no arguments needed for interactive mode)
|
# Show usage/help
|
||||||
main
|
show_usage() {
|
||||||
|
echo "PatchMon Self-Hosting Installation & Update Script"
|
||||||
|
echo "Version: $SCRIPT_VERSION"
|
||||||
|
echo ""
|
||||||
|
echo "Usage:"
|
||||||
|
echo " $0 # Interactive installation (default)"
|
||||||
|
echo " $0 --update # Update existing installation"
|
||||||
|
echo " $0 --help # Show this help message"
|
||||||
|
echo ""
|
||||||
|
echo "Examples:"
|
||||||
|
echo " # New installation:"
|
||||||
|
echo " sudo bash $0"
|
||||||
|
echo ""
|
||||||
|
echo " # Update existing installation:"
|
||||||
|
echo " sudo bash $0 --update"
|
||||||
|
echo ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for help flag
|
||||||
|
if [ "$1" = "--help" ] || [ "$1" = "-h" ]; then
|
||||||
|
show_usage
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
|
|||||||
263
tools/setup-redis.sh
Executable file
263
tools/setup-redis.sh
Executable file
@@ -0,0 +1,263 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# redis-setup.sh - Redis Database and User Setup for PatchMon
|
||||||
|
# This script creates a dedicated Redis database and user for a PatchMon instance
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Default Redis connection details
|
||||||
|
REDIS_HOST=${REDIS_HOST:-"localhost"}
|
||||||
|
REDIS_PORT=${REDIS_PORT:-6379}
|
||||||
|
REDIS_ADMIN_PASSWORD=${REDIS_ADMIN_PASSWORD:-"YOURREDISPASSHERE"}
|
||||||
|
|
||||||
|
echo -e "${BLUE}🔧 PatchMon Redis Setup${NC}"
|
||||||
|
echo "=================================="
|
||||||
|
|
||||||
|
# Function to generate random strings
|
||||||
|
generate_random_string() {
|
||||||
|
local length=${1:-16}
|
||||||
|
openssl rand -base64 $length | tr -d "=+/" | cut -c1-$length
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to check if Redis is accessible
|
||||||
|
check_redis_connection() {
|
||||||
|
echo -e "${YELLOW}📡 Checking Redis connection...${NC}"
|
||||||
|
|
||||||
|
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
|
||||||
|
# With password - use ACL admin user
|
||||||
|
if redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning ping > /dev/null 2>&1; then
|
||||||
|
echo -e "${GREEN}✅ Redis connection successful${NC}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "${RED}❌ Cannot connect to Redis with ACL admin user${NC}"
|
||||||
|
echo "Please ensure Redis is running and the admin password is correct"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Without password
|
||||||
|
if redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ping > /dev/null 2>&1; then
|
||||||
|
echo -e "${GREEN}✅ Redis connection successful${NC}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "${RED}❌ Cannot connect to Redis${NC}"
|
||||||
|
echo "Please ensure Redis is running"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to find next available database number
|
||||||
|
find_next_db() {
|
||||||
|
echo -e "${YELLOW}🔍 Finding next available database...${NC}" >&2
|
||||||
|
|
||||||
|
# Start from database 0 and keep checking until we find an empty one
|
||||||
|
local db_num=0
|
||||||
|
local max_attempts=100 # Safety limit to prevent infinite loop
|
||||||
|
|
||||||
|
while [ $db_num -lt $max_attempts ]; do
|
||||||
|
# Test if database is empty
|
||||||
|
local key_count
|
||||||
|
local redis_output
|
||||||
|
|
||||||
|
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
|
||||||
|
# With password - use ACL admin user
|
||||||
|
redis_output=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning -n "$db_num" DBSIZE 2>&1)
|
||||||
|
else
|
||||||
|
# Without password
|
||||||
|
redis_output=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" -n "$db_num" DBSIZE 2>&1)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for authentication errors
|
||||||
|
if echo "$redis_output" | grep -q "NOAUTH"; then
|
||||||
|
echo -e "${RED}❌ Authentication required but REDIS_ADMIN_PASSWORD not set${NC}" >&2
|
||||||
|
echo -e "${YELLOW}💡 Please set REDIS_ADMIN_PASSWORD environment variable:${NC}" >&2
|
||||||
|
echo -e "${YELLOW} export REDIS_ADMIN_PASSWORD='your_password'${NC}" >&2
|
||||||
|
echo -e "${YELLOW} Or run: REDIS_ADMIN_PASSWORD='your_password' ./setup-redis.sh${NC}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for other errors
|
||||||
|
if echo "$redis_output" | grep -q "ERR"; then
|
||||||
|
if echo "$redis_output" | grep -q "invalid DB index"; then
|
||||||
|
echo -e "${RED}❌ Reached maximum database limit at database $db_num${NC}" >&2
|
||||||
|
echo -e "${YELLOW}💡 Redis is configured with $db_num databases maximum.${NC}" >&2
|
||||||
|
echo -e "${YELLOW}💡 Increase 'databases' setting in redis.conf or clean up unused databases.${NC}" >&2
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo -e "${RED}❌ Error checking database $db_num: $redis_output${NC}" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
key_count="$redis_output"
|
||||||
|
|
||||||
|
# If database is empty, use it
|
||||||
|
if [ "$key_count" = "0" ]; then
|
||||||
|
echo -e "${GREEN}✅ Found available database: $db_num (empty)${NC}" >&2
|
||||||
|
echo "$db_num"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BLUE} Database $db_num has $key_count keys, checking next...${NC}" >&2
|
||||||
|
db_num=$((db_num + 1))
|
||||||
|
done
|
||||||
|
|
||||||
|
echo -e "${RED}❌ No available databases found (checked 0-$max_attempts)${NC}" >&2
|
||||||
|
echo -e "${YELLOW}💡 All checked databases are in use. Consider cleaning up unused databases.${NC}" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to create Redis user
|
||||||
|
create_redis_user() {
|
||||||
|
local username="$1"
|
||||||
|
local password="$2"
|
||||||
|
local db_num="$3"
|
||||||
|
|
||||||
|
echo -e "${YELLOW}👤 Creating Redis user: $username for database $db_num${NC}"
|
||||||
|
|
||||||
|
# Create user with password and permissions
|
||||||
|
# Note: >password syntax is for Redis ACL, we need to properly escape it
|
||||||
|
local result
|
||||||
|
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
|
||||||
|
# With password - use ACL admin user
|
||||||
|
result=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning ACL SETUSER "$username" on ">${password}" ~* +@all 2>&1)
|
||||||
|
else
|
||||||
|
# Without password
|
||||||
|
result=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ACL SETUSER "$username" on ">${password}" ~* +@all 2>&1)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ $? -eq 0 ] && [ "$result" = "OK" ]; then
|
||||||
|
echo -e "${GREEN}✅ Redis user '$username' created successfully for database $db_num${NC}"
|
||||||
|
|
||||||
|
# Save ACL users to file to persist across restarts
|
||||||
|
local save_result
|
||||||
|
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
|
||||||
|
save_result=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning ACL SAVE 2>&1)
|
||||||
|
else
|
||||||
|
save_result=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ACL SAVE 2>&1)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$save_result" = "OK" ]; then
|
||||||
|
echo -e "${GREEN}✅ Redis ACL users saved to file${NC}"
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}⚠️ Failed to save ACL users to file: $save_result${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify user was created
|
||||||
|
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
|
||||||
|
local verify=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning ACL GETUSER "$username" 2>&1)
|
||||||
|
else
|
||||||
|
local verify=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ACL GETUSER "$username" 2>&1)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$verify" = "(nil)" ]; then
|
||||||
|
echo -e "${RED}❌ User creation reported OK but user does not exist${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "${RED}❌ Failed to create Redis user${NC}"
|
||||||
|
echo -e "${RED}Error: $result${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to test user connection
|
||||||
|
test_user_connection() {
|
||||||
|
local username="$1"
|
||||||
|
local password="$2"
|
||||||
|
local db_num="$3"
|
||||||
|
|
||||||
|
echo -e "${YELLOW}🧪 Testing user connection...${NC}"
|
||||||
|
|
||||||
|
if redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user "$username" --pass "$password" --no-auth-warning -n "$db_num" ping > /dev/null 2>&1; then
|
||||||
|
echo -e "${GREEN}✅ User connection test successful${NC}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "${RED}❌ User connection test failed${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to mark database as in-use
|
||||||
|
mark_database_in_use() {
|
||||||
|
local db_num="$1"
|
||||||
|
|
||||||
|
echo -e "${YELLOW}📝 Marking database as in-use...${NC}"
|
||||||
|
|
||||||
|
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
|
||||||
|
redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning -n "$db_num" SET "patchmon:initialized" "$(date -u +%Y-%m-%dT%H:%M:%SZ)" > /dev/null
|
||||||
|
else
|
||||||
|
redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" -n "$db_num" SET "patchmon:initialized" "$(date -u +%Y-%m-%dT%H:%M:%SZ)" > /dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo -e "${GREEN}✅ Database marked as in-use${NC}"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
echo -e "${RED}❌ Failed to mark database${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main execution
|
||||||
|
main() {
|
||||||
|
# Check Redis connection
|
||||||
|
if ! check_redis_connection; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate random credentials
|
||||||
|
USERNAME="patchmon_$(generate_random_string 8)"
|
||||||
|
PASSWORD=$(generate_random_string 32)
|
||||||
|
DB_NUM=$(find_next_db)
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e "${BLUE}📋 Generated Configuration:${NC}"
|
||||||
|
echo "Username: $USERNAME"
|
||||||
|
echo "Password: $PASSWORD"
|
||||||
|
echo "Database: $DB_NUM"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Create Redis user
|
||||||
|
if ! create_redis_user "$USERNAME" "$PASSWORD" "$DB_NUM"; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test user connection
|
||||||
|
if ! test_user_connection "$USERNAME" "$PASSWORD" "$DB_NUM"; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Mark database as in-use to prevent reuse on next run
|
||||||
|
if ! mark_database_in_use "$DB_NUM"; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Output .env configuration
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}🎉 Redis setup completed successfully!${NC}"
|
||||||
|
echo ""
|
||||||
|
echo -e "${BLUE}📄 Add these lines to your .env file:${NC}"
|
||||||
|
echo "=================================="
|
||||||
|
echo "REDIS_HOST=$REDIS_HOST"
|
||||||
|
echo "REDIS_PORT=$REDIS_PORT"
|
||||||
|
echo "REDIS_USER=$USERNAME"
|
||||||
|
echo "REDIS_PASSWORD=$PASSWORD"
|
||||||
|
echo "REDIS_DB=$DB_NUM"
|
||||||
|
echo "=================================="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo -e "${YELLOW}💡 Copy the configuration above to your .env file${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
Reference in New Issue
Block a user