Compare commits

..

1 Commits

Author SHA1 Message Date
renovate[bot]
fb0f6ba028 Update dependency express to v5 2025-10-17 21:44:14 +00:00
70 changed files with 1631 additions and 5027 deletions

View File

@@ -1,34 +0,0 @@
# Environment files
**/.env
**/.env.*
**/env.example
# Node modules
**/node_modules
# Logs
**/logs
**/*.log
# Git
**/.git
**/.gitignore
# IDE files
**/.vscode
**/.idea
**/*.swp
**/*.swo
# OS files
**/.DS_Store
**/Thumbs.db
# Build artifacts
**/dist
**/build
**/coverage
# Temporary files
**/tmp
**/temp

View File

@@ -1,12 +1,12 @@
#!/bin/bash
# PatchMon Agent Script v1.2.8
# PatchMon Agent Script v1.2.9
# This script sends package update information to the PatchMon server using API credentials
# Configuration
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
API_VERSION="v1"
AGENT_VERSION="1.2.8"
AGENT_VERSION="1.2.9"
CONFIG_FILE="/etc/patchmon/agent.conf"
CREDENTIALS_FILE="/etc/patchmon/credentials"
LOG_FILE="/var/log/patchmon-agent.log"
@@ -38,21 +38,21 @@ error() {
exit 1
}
# Info logging (cleaner output - only stdout, no duplicate logging)
# Info logging (cleaner output - only stderr, no duplicate logging)
info() {
echo -e "${BLUE} $1${NC}"
echo -e "${BLUE} $1${NC}" >&2
log "INFO: $1"
}
# Success logging (cleaner output - only stdout, no duplicate logging)
# Success logging (cleaner output - only stderr, no duplicate logging)
success() {
echo -e "${GREEN}$1${NC}"
echo -e "${GREEN}$1${NC}" >&2
log "SUCCESS: $1"
}
# Warning logging (cleaner output - only stdout, no duplicate logging)
# Warning logging (cleaner output - only stderr, no duplicate logging)
warning() {
echo -e "${YELLOW}⚠️ $1${NC}"
echo -e "${YELLOW}⚠️ $1${NC}" >&2
log "WARNING: $1"
}
@@ -709,6 +709,135 @@ get_package_info() {
echo "$packages_json"
}
# Check and handle APT locks
handle_apt_locks() {
local interactive=${1:-false} # First parameter indicates if running interactively
local lock_files=(
"/var/lib/dpkg/lock"
"/var/lib/dpkg/lock-frontend"
"/var/lib/apt/lists/lock"
"/var/cache/apt/archives/lock"
)
local processes_found=false
local hung_processes=()
# Check for running APT processes
if pgrep -x "apt-get|apt|aptitude|dpkg|unattended-upgr" > /dev/null 2>&1; then
processes_found=true
info "Found running package management processes:"
echo "" >&2
# Get process info with ACTUAL elapsed time (not CPU time)
# Using ps -eo format to get real elapsed time
while IFS= read -r line; do
[[ -z "$line" ]] && continue
local pid=$(echo "$line" | awk '{print $1}')
local elapsed=$(echo "$line" | awk '{print $2}')
local cmd=$(echo "$line" | awk '{for(i=3;i<=NF;i++) printf "%s ", $i; print ""}')
# Display process info
echo " PID $pid: $cmd (running for $elapsed)" >&2
# Parse elapsed time and convert to seconds
# Format can be: MM:SS, HH:MM:SS, DD-HH:MM:SS, or just SS
# Use 10# prefix to force base-10 (avoid octal interpretation of leading zeros)
local runtime_seconds=0
if [[ "$elapsed" =~ ^([0-9]+)-([0-9]+):([0-9]+):([0-9]+)$ ]]; then
# Format: DD-HH:MM:SS
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 86400 + 10#${BASH_REMATCH[2]} * 3600 + 10#${BASH_REMATCH[3]} * 60 + 10#${BASH_REMATCH[4]} ))
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+):([0-9]+)$ ]]; then
# Format: HH:MM:SS
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 3600 + 10#${BASH_REMATCH[2]} * 60 + 10#${BASH_REMATCH[3]} ))
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+)$ ]]; then
# Format: MM:SS
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 60 + 10#${BASH_REMATCH[2]} ))
elif [[ "$elapsed" =~ ^([0-9]+)$ ]]; then
# Format: just seconds
runtime_seconds=$((10#${BASH_REMATCH[1]}))
fi
# Consider process hung if running for more than 5 minutes
if [[ $runtime_seconds -gt 300 ]]; then
hung_processes+=("$pid:$elapsed:$cmd")
fi
done < <(ps -eo pid,etime,cmd | grep -E "apt-get|apt[^-]|aptitude|dpkg|unattended-upgr" | grep -v grep | grep -v "ps -eo")
echo "" >&2
info "Detected ${#hung_processes[@]} hung process(es), interactive=$interactive"
# If hung processes found and running interactively, offer to kill them
if [[ ${#hung_processes[@]} -gt 0 && "$interactive" == "true" ]]; then
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
echo "" >&2
for process_info in "${hung_processes[@]}"; do
IFS=':' read -r pid elapsed cmd <<< "$process_info"
echo " PID $pid: $cmd (hung for $elapsed)" >&2
done
echo "" >&2
read -p "$(echo -e "${YELLOW}⚠️ Do you want to kill these processes? [y/N]:${NC} ")" -n 1 -r >&2
echo "" >&2
if [[ $REPLY =~ ^[Yy]$ ]]; then
for process_info in "${hung_processes[@]}"; do
IFS=':' read -r pid elapsed cmd <<< "$process_info"
info "Killing process $pid..."
if kill "$pid" 2>/dev/null; then
success "Killed process $pid"
sleep 1
# Check if process is still running
if kill -0 "$pid" 2>/dev/null; then
warning "Process $pid still running, using SIGKILL..."
kill -9 "$pid" 2>/dev/null
success "Force killed process $pid"
fi
else
warning "Could not kill process $pid (may require sudo)"
fi
done
# Wait a moment for locks to clear
sleep 2
else
info "Skipping process termination"
fi
elif [[ ${#hung_processes[@]} -gt 0 ]]; then
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
info "Run this command with sudo and interactively to kill hung processes"
fi
fi
# Check for stale lock files (files that exist but no process is holding them)
for lock_file in "${lock_files[@]}"; do
if [[ -f "$lock_file" ]]; then
# Try to get the PID from the lock file if it exists
if lsof "$lock_file" > /dev/null 2>&1; then
info "Lock file $lock_file is held by an active process"
else
warning "Found stale lock file: $lock_file"
info "Attempting to remove stale lock..."
if rm -f "$lock_file" 2>/dev/null; then
success "Removed stale lock: $lock_file"
else
warning "Could not remove lock (insufficient permissions): $lock_file"
fi
fi
fi
done
# If processes were found, return failure so caller can wait
if [[ "$processes_found" == true ]]; then
return 1
else
return 0
fi
}
# Get package info for APT-based systems
get_apt_packages() {
local -n packages_ref=$1
@@ -725,10 +854,25 @@ get_apt_packages() {
else
retry_count=$((retry_count + 1))
if [[ $retry_count -lt $max_retries ]]; then
warning "APT lock detected, retrying in ${retry_delay} seconds... (attempt $retry_count/$max_retries)"
warning "APT lock detected (attempt $retry_count/$max_retries)"
# On first retry, try to handle locks
if [[ $retry_count -eq 1 ]]; then
info "Checking for stale APT locks..."
# Check if running interactively (stdin is a terminal OR stdout is a terminal)
local is_interactive=false
if [[ -t 0 ]] || [[ -t 1 ]]; then
is_interactive=true
fi
info "Interactive mode: $is_interactive"
handle_apt_locks "$is_interactive"
fi
info "Waiting ${retry_delay} seconds before retry..."
sleep $retry_delay
else
warning "APT lock persists after $max_retries attempts, continuing without update..."
warning "APT lock persists after $max_retries attempts"
warning "Continuing without updating package lists (will use cached data)"
fi
fi
done
@@ -1564,9 +1708,21 @@ main() {
"diagnostics")
show_diagnostics
;;
"clear-locks"|"unlock")
check_root
info "Checking APT locks and hung processes..."
echo ""
handle_apt_locks true
echo ""
if [[ $? -eq 0 ]]; then
success "No APT locks or processes blocking package management"
else
info "APT processes are still running - they may be legitimate operations"
fi
;;
*)
echo "PatchMon Agent v$AGENT_VERSION - API Credential Based"
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|diagnostics}"
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|clear-locks|diagnostics}"
echo ""
echo "Commands:"
echo " configure <API_ID> <API_KEY> [SERVER_URL] - Configure API credentials for this host"
@@ -1578,6 +1734,7 @@ main() {
echo " check-agent-update - Check for agent updates using timestamp comparison"
echo " update-agent - Update agent to latest version"
echo " update-crontab - Update crontab with current policy"
echo " clear-locks - Check and clear APT locks (interactive)"
echo " diagnostics - Show detailed system diagnostics"
echo ""
echo "Setup Process:"

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,324 +0,0 @@
#!/bin/bash
# PatchMon Agent Migration Script v1.2.9
# This script migrates from legacy bash agent (v1.2.8) to Go agent (v1.3.0+)
# It acts as an intermediary during the upgrade process
# Configuration
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
API_VERSION="v1"
AGENT_VERSION="1.2.9"
CREDENTIALS_FILE="/etc/patchmon/credentials"
LOG_FILE="/var/log/patchmon-agent.log"
# This placeholder will be dynamically replaced by the server when serving this
# script based on the "ignore SSL self-signed" setting. If set to -k, curl will
# ignore certificate validation. Otherwise, it will be empty for secure default.
CURL_FLAGS=""
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Logging function
log() {
if [[ -w "$(dirname "$LOG_FILE")" ]] 2>/dev/null; then
echo "[$(date '+%Y-%m-%d %H:%M:%S')] MIGRATION: $1" >> "$LOG_FILE" 2>/dev/null
fi
}
# Error handling
error() {
echo -e "${RED}ERROR: $1${NC}" >&2
log "ERROR: $1"
exit 1
}
# Info logging
info() {
echo -e "${BLUE} $1${NC}" >&2
log "INFO: $1"
}
# Success logging
success() {
echo -e "${GREEN}$1${NC}" >&2
log "SUCCESS: $1"
}
# Warning logging
warning() {
echo -e "${YELLOW}⚠️ $1${NC}" >&2
log "WARNING: $1"
}
# Check if running as root
check_root() {
if [[ $EUID -ne 0 ]]; then
error "This migration script must be run as root"
fi
}
# Load API credentials from legacy format
load_legacy_credentials() {
if [[ ! -f "$CREDENTIALS_FILE" ]]; then
error "Legacy credentials file not found at $CREDENTIALS_FILE"
fi
source "$CREDENTIALS_FILE"
if [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
error "API_ID and API_KEY must be configured in $CREDENTIALS_FILE"
fi
# Use PATCHMON_URL from credentials if available
if [[ -n "$PATCHMON_URL" ]]; then
PATCHMON_SERVER="$PATCHMON_URL"
fi
}
# Remove cron entries
remove_cron_entries() {
info "Removing legacy cron entries..."
# Get current crontab
local current_crontab=$(crontab -l 2>/dev/null || echo "")
if [[ -n "$current_crontab" ]]; then
# Remove any lines containing patchmon-agent
local new_crontab=$(echo "$current_crontab" | grep -v "patchmon-agent" || true)
# Update crontab if it changed
if [[ "$current_crontab" != "$new_crontab" ]]; then
if [[ -n "$new_crontab" ]]; then
echo "$new_crontab" | crontab -
success "Legacy cron entries removed (kept other cron jobs)"
else
crontab -r 2>/dev/null || true
success "All cron entries removed"
fi
else
info "No patchmon cron entries found to remove"
fi
else
info "No crontab found"
fi
}
# Clean up legacy files (only after successful verification)
cleanup_legacy_files() {
info "Cleaning up legacy files..."
# Remove legacy script
if [[ -f "/usr/local/bin/patchmon-agent.sh" ]]; then
rm -f "/usr/local/bin/patchmon-agent.sh"
success "Removed legacy script"
fi
# Remove legacy credentials file
if [[ -f "$CREDENTIALS_FILE" ]]; then
rm -f "$CREDENTIALS_FILE"
success "Removed legacy credentials file"
fi
# Remove legacy config file (if it exists)
if [[ -f "/etc/patchmon/config" ]]; then
rm -f "/etc/patchmon/config"
success "Removed legacy config file"
fi
}
# Show migration summary
show_migration_summary() {
echo ""
echo "=========================================="
echo "PatchMon Agent Migration Complete!"
echo "=========================================="
echo ""
echo "✅ Successfully migrated from bash agent to Go agent"
echo ""
echo "What was done:"
echo " • Loaded legacy credentials"
echo " • Downloaded and ran PatchMon install script"
echo " • Installed Go agent binary and systemd service"
echo " • Verified service is running"
echo " • Removed legacy cron entries"
echo " • Cleaned up legacy files"
echo ""
echo "Next steps:"
echo " • The Go agent runs as a service, no cron needed"
echo " • Use: patchmon-agent serve (to run as service)"
echo " • Use: patchmon-agent report (for one-time report)"
echo " • Use: patchmon-agent --help (for all commands)"
echo ""
echo "Monitoring commands:"
echo " • Check status: systemctl status patchmon-agent"
echo " • View logs: tail -f /etc/patchmon/logs/patchmon-agent.log"
echo " • Run diagnostics: patchmon-agent diagnostics"
echo ""
echo "Configuration files:"
echo " • Config: /etc/patchmon/config.yml"
echo " • Credentials: /etc/patchmon/credentials.yml"
echo " • Logs: /etc/patchmon/logs/patchmon-agent.log"
echo ""
}
# Post-migration verification
post_migration_check() {
echo ""
echo "=========================================="
echo "Post-Migration Verification"
echo "=========================================="
echo ""
# Check if patchmon-agent is running
info "Checking if patchmon-agent is running..."
if pgrep -f "patchmon-agent serve" >/dev/null 2>&1; then
success "PatchMon agent is running"
else
warning "PatchMon agent is not running (this is normal if not started as service)"
info "To start as service: patchmon-agent serve"
fi
# Check WebSocket connection (if agent is running)
if pgrep -f "patchmon-agent serve" >/dev/null 2>&1; then
info "Checking WebSocket connection..."
if /usr/local/bin/patchmon-agent ping >/dev/null 2>&1; then
success "WebSocket connection is active"
else
warning "WebSocket connection test failed"
fi
else
info "Skipping WebSocket check (agent not running)"
fi
# Run diagnostics
info "Running system diagnostics..."
echo ""
if [[ -f "/usr/local/bin/patchmon-agent" ]]; then
if /usr/local/bin/patchmon-agent diagnostics >/dev/null 2>&1; then
success "Diagnostics completed successfully"
echo ""
echo "Full diagnostics output:"
echo "----------------------------------------"
/usr/local/bin/patchmon-agent diagnostics
echo "----------------------------------------"
else
warning "Diagnostics failed to run"
fi
else
warning "Go agent binary not found - cannot run diagnostics"
fi
echo ""
echo "=========================================="
echo "Migration Verification Complete!"
echo "=========================================="
echo ""
success "Thank you for using PatchMon Agent!"
echo ""
}
# Main migration function
perform_migration() {
info "Starting PatchMon Agent migration from bash to Go..."
echo ""
# Load legacy credentials
load_legacy_credentials
# Set environment variables for install script
export API_ID="$API_ID"
export API_KEY="$API_KEY"
export PATCHMON_URL="$PATCHMON_SERVER"
# Detect architecture
local arch=$(uname -m)
local goarch=""
case "$arch" in
"x86_64") goarch="amd64" ;;
"i386"|"i686") goarch="386" ;;
"aarch64"|"arm64") goarch="arm64" ;;
"armv7l"|"armv6l"|"armv5l") goarch="arm" ;;
*) error "Unsupported architecture: $arch" ;;
esac
info "Downloading and running PatchMon install script..."
# Download and run the install script
if curl $CURL_FLAGS -H "X-API-ID: $API_ID" -H "X-API-KEY: $API_KEY" \
"$PATCHMON_SERVER/api/v1/hosts/install?arch=$goarch" | bash; then
success "PatchMon Go agent installed successfully"
# Wait a moment for service to start
sleep 3
# Test if the service is running
if systemctl is-active --quiet patchmon-agent.service; then
success "PatchMon agent service is running"
# Clean up legacy files
remove_cron_entries
cleanup_legacy_files
# Show summary
show_migration_summary
post_migration_check
success "Migration completed successfully!"
else
warning "PatchMon agent service failed to start"
warning "Legacy files preserved for debugging"
show_migration_summary
fi
else
error "Failed to install PatchMon Go agent"
fi
# Exit here to prevent the legacy script from continuing
exit 0
}
# Handle command line arguments
case "$1" in
"migrate")
check_root
perform_migration
;;
"test")
check_root
load_legacy_credentials
test_go_agent
;;
"update-agent")
# This is called by legacy agents during update
check_root
perform_migration
;;
*)
# If no arguments provided, check if we're being executed by a legacy agent
# Legacy agents will call this script directly during update
if [[ -f "$CREDENTIALS_FILE" ]]; then
info "Detected legacy agent execution - starting migration..."
check_root
perform_migration
else
echo "PatchMon Agent Migration Script v$AGENT_VERSION"
echo "Usage: $0 {migrate|test|update-agent}"
echo ""
echo "Commands:"
echo " migrate - Perform full migration from bash to Go agent"
echo " test - Test Go agent after migration"
echo " update-agent - Called by legacy agents during update"
echo ""
echo "This script should be executed by the legacy agent during update."
exit 1
fi
;;
esac

View File

@@ -1,12 +1,12 @@
#!/bin/bash
# PatchMon Docker Agent Script v1.3.0
# PatchMon Docker Agent Script v1.2.9
# This script collects Docker container and image information and sends it to PatchMon
# Configuration
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
API_VERSION="v1"
AGENT_VERSION="1.3.0"
AGENT_VERSION="1.2.9"
CONFIG_FILE="/etc/patchmon/agent.conf"
CREDENTIALS_FILE="/etc/patchmon/credentials"
LOG_FILE="/var/log/patchmon-docker-agent.log"

View File

@@ -399,7 +399,7 @@ fi
curl $CURL_FLAGS \
-H "X-API-ID: $API_ID" \
-H "X-API-KEY: $API_KEY" \
"$PATCHMON_URL/api/v1/hosts/agent/download?arch=$ARCHITECTURE&force=binary" \
"$PATCHMON_URL/api/v1/hosts/agent/download?arch=$ARCHITECTURE" \
-o /usr/local/bin/patchmon-agent
chmod +x /usr/local/bin/patchmon-agent

View File

@@ -153,32 +153,6 @@ while IFS= read -r line; do
ip_address=$(timeout 5 pct exec "$vmid" -- hostname -I 2>/dev/null </dev/null | awk '{print $1}' || echo "unknown")
os_info=$(timeout 5 pct exec "$vmid" -- cat /etc/os-release 2>/dev/null </dev/null | grep "^PRETTY_NAME=" | cut -d'"' -f2 || echo "unknown")
# Detect container architecture
debug " Detecting container architecture..."
arch_raw=$(timeout 5 pct exec "$vmid" -- uname -m 2>/dev/null </dev/null || echo "unknown")
# Map architecture to supported values
case "$arch_raw" in
"x86_64")
architecture="amd64"
;;
"i386"|"i686")
architecture="386"
;;
"aarch64"|"arm64")
architecture="arm64"
;;
"armv7l"|"armv6l"|"arm")
architecture="arm"
;;
*)
warn " ⚠ Unknown architecture '$arch_raw', defaulting to amd64"
architecture="amd64"
;;
esac
debug " Detected architecture: $arch_raw -> $architecture"
# Get machine ID from container
machine_id=$(timeout 5 pct exec "$vmid" -- bash -c "cat /etc/machine-id 2>/dev/null || cat /var/lib/dbus/machine-id 2>/dev/null || echo 'proxmox-lxc-$vmid-'$(cat /proc/sys/kernel/random/uuid)" </dev/null 2>/dev/null || echo "proxmox-lxc-$vmid-unknown")
@@ -187,7 +161,6 @@ while IFS= read -r line; do
info " Hostname: $hostname"
info " IP Address: $ip_address"
info " OS: $os_info"
info " Architecture: $architecture ($arch_raw)"
info " Machine ID: ${machine_id:0:16}..."
if [[ "$DRY_RUN" == "true" ]]; then
@@ -271,13 +244,12 @@ while IFS= read -r line; do
# Install PatchMon agent in container
info " Installing PatchMon agent..."
# Build install URL with force flag and architecture if enabled
install_url="$PATCHMON_URL/api/v1/hosts/install?arch=$architecture"
# Build install URL with force flag if enabled
install_url="$PATCHMON_URL/api/v1/hosts/install"
if [[ "$FORCE_INSTALL" == "true" ]]; then
install_url="$install_url&force=true"
install_url="$install_url?force=true"
info " Using force mode - will bypass broken packages"
fi
info " Using architecture: $architecture"
# Reset exit code for this container
install_exit_code=0
@@ -428,7 +400,7 @@ if [[ ${#dpkg_error_containers[@]} -gt 0 ]]; then
-H \"X-API-ID: $api_id\" \
-H \"X-API-KEY: $api_key\" \
-o patchmon-install.sh \
'$PATCHMON_URL/api/v1/hosts/install?arch=$architecture' && \
'$PATCHMON_URL/api/v1/hosts/install' && \
bash patchmon-install.sh && \
rm -f patchmon-install.sh
" 2>&1 </dev/null) || install_exit_code=$?

View File

@@ -1,29 +1,22 @@
# Database Configuration
DATABASE_URL="postgresql://patchmon_user:your-password-here@localhost:5432/patchmon_db"
DATABASE_URL="postgresql://patchmon_user:p@tchm0n_p@55@localhost:5432/patchmon_db"
PM_DB_CONN_MAX_ATTEMPTS=30
PM_DB_CONN_WAIT_INTERVAL=2
# JWT Configuration
JWT_SECRET=your-secure-random-secret-key-change-this-in-production
JWT_EXPIRES_IN=1h
JWT_REFRESH_EXPIRES_IN=7d
# Redis Configuration
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_PASSWORD=your-redis-password-here
REDIS_DB=0
# Server Configuration
PORT=3001
NODE_ENV=production
NODE_ENV=development
# API Configuration
API_VERSION=v1
# CORS Configuration
CORS_ORIGIN=http://localhost:3000
# Session Configuration
SESSION_INACTIVITY_TIMEOUT_MINUTES=30
# User Configuration
DEFAULT_USER_ROLE=user
# Rate Limiting (times in milliseconds)
RATE_LIMIT_WINDOW_MS=900000
RATE_LIMIT_MAX=5000
@@ -32,18 +25,20 @@ AUTH_RATE_LIMIT_MAX=500
AGENT_RATE_LIMIT_WINDOW_MS=60000
AGENT_RATE_LIMIT_MAX=1000
# Redis Configuration
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_USER=your-redis-username-here
REDIS_PASSWORD=your-redis-password-here
REDIS_DB=0
# Logging
LOG_LEVEL=info
ENABLE_LOGGING=true
# TFA Configuration (optional - used if TFA is enabled)
# User Registration
DEFAULT_USER_ROLE=user
# JWT Configuration
JWT_SECRET=your-secure-random-secret-key-change-this-in-production
JWT_EXPIRES_IN=1h
JWT_REFRESH_EXPIRES_IN=7d
SESSION_INACTIVITY_TIMEOUT_MINUTES=30
# TFA Configuration
TFA_REMEMBER_ME_EXPIRES_IN=30d
TFA_MAX_REMEMBER_SESSIONS=5
TFA_SUSPICIOUS_ACTIVITY_THRESHOLD=3

View File

@@ -1,6 +1,6 @@
{
"name": "patchmon-backend",
"version": "1.3.0",
"version": "1.2.9",
"description": "Backend API for Linux Patch Monitoring System",
"license": "AGPL-3.0",
"main": "src/server.js",
@@ -17,13 +17,12 @@
"@bull-board/api": "^6.13.1",
"@bull-board/express": "^6.13.1",
"@prisma/client": "^6.1.0",
"axios": "^1.7.9",
"bcryptjs": "^2.4.3",
"bullmq": "^5.61.0",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
"dotenv": "^16.4.7",
"express": "^4.21.2",
"express": "^5.0.0",
"express-rate-limit": "^7.5.0",
"express-validator": "^7.2.0",
"helmet": "^8.0.0",

View File

@@ -179,7 +179,7 @@ model settings {
updated_at DateTime
update_interval Int @default(60)
auto_update Boolean @default(false)
github_repo_url String @default("https://github.com/PatchMon/PatchMon.git")
github_repo_url String @default("git@github.com:9technologygroup/patchmon.net.git")
ssh_key_path String?
repository_type String @default("public")
last_update_check DateTime?

View File

@@ -1,6 +1,6 @@
/**
* Centralized Prisma Client Singleton
* Prevents multiple Prisma clients from creating connection leaks
* Database configuration for multiple instances
* Optimizes connection pooling to prevent "too many connections" errors
*/
const { PrismaClient } = require("@prisma/client");
@@ -26,43 +26,22 @@ function getOptimizedDatabaseUrl() {
return url.toString();
}
// Singleton Prisma client instance
let prismaInstance = null;
// Create optimized Prisma client
function createPrismaClient() {
const optimizedUrl = getOptimizedDatabaseUrl();
function getPrismaClient() {
if (!prismaInstance) {
const optimizedUrl = getOptimizedDatabaseUrl();
prismaInstance = new PrismaClient({
datasources: {
db: {
url: optimizedUrl,
},
return new PrismaClient({
datasources: {
db: {
url: optimizedUrl,
},
log:
process.env.PRISMA_LOG_QUERIES === "true"
? ["query", "info", "warn", "error"]
: ["warn", "error"],
errorFormat: "pretty",
});
// Handle graceful shutdown
process.on("beforeExit", async () => {
await prismaInstance.$disconnect();
});
process.on("SIGINT", async () => {
await prismaInstance.$disconnect();
process.exit(0);
});
process.on("SIGTERM", async () => {
await prismaInstance.$disconnect();
process.exit(0);
});
}
return prismaInstance;
},
log:
process.env.PRISMA_LOG_QUERIES === "true"
? ["query", "info", "warn", "error"]
: ["warn", "error"],
errorFormat: "pretty",
});
}
// Connection health check
@@ -71,7 +50,7 @@ async function checkDatabaseConnection(prisma) {
await prisma.$queryRaw`SELECT 1`;
return true;
} catch (error) {
console.error("Database connection check failed:", error.message);
console.error("Database connection failed:", error.message);
return false;
}
}
@@ -142,8 +121,9 @@ async function disconnectPrisma(prisma, maxRetries = 3) {
}
module.exports = {
getPrismaClient,
createPrismaClient,
checkDatabaseConnection,
waitForDatabase,
disconnectPrisma,
getOptimizedDatabaseUrl,
};

View File

@@ -1,12 +1,12 @@
const jwt = require("jsonwebtoken");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const {
validate_session,
update_session_activity,
is_tfa_bypassed,
} = require("../utils/session_manager");
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Middleware to verify JWT token with session validation
const authenticateToken = async (req, res, next) => {

View File

@@ -1,5 +1,5 @@
const { getPrismaClient } = require("../config/prisma");
const prisma = getPrismaClient();
const { PrismaClient } = require("@prisma/client");
const prisma = new PrismaClient();
// Permission middleware factory
const requirePermission = (permission) => {

View File

@@ -1,419 +0,0 @@
const express = require("express");
const router = express.Router();
const agentVersionService = require("../services/agentVersionService");
const { authenticateToken } = require("../middleware/auth");
const { requirePermission } = require("../middleware/permissions");
// Test GitHub API connectivity
router.get(
"/test-github",
authenticateToken,
requirePermission("can_manage_settings"),
async (_req, res) => {
try {
const axios = require("axios");
const response = await axios.get(
"https://api.github.com/repos/PatchMon/PatchMon-agent/releases",
{
timeout: 10000,
headers: {
"User-Agent": "PatchMon-Server/1.0",
Accept: "application/vnd.github.v3+json",
},
},
);
res.json({
success: true,
status: response.status,
releasesFound: response.data.length,
latestRelease: response.data[0]?.tag_name || "No releases",
rateLimitRemaining: response.headers["x-ratelimit-remaining"],
rateLimitLimit: response.headers["x-ratelimit-limit"],
});
} catch (error) {
console.error("❌ GitHub API test failed:", error.message);
res.status(500).json({
success: false,
error: error.message,
status: error.response?.status,
statusText: error.response?.statusText,
rateLimitRemaining: error.response?.headers["x-ratelimit-remaining"],
rateLimitLimit: error.response?.headers["x-ratelimit-limit"],
});
}
},
);
// Get current version information
router.get("/version", authenticateToken, async (_req, res) => {
try {
const versionInfo = await agentVersionService.getVersionInfo();
console.log(
"📊 Version info response:",
JSON.stringify(versionInfo, null, 2),
);
res.json(versionInfo);
} catch (error) {
console.error("❌ Failed to get version info:", error.message);
res.status(500).json({
error: "Failed to get version information",
details: error.message,
status: "error",
});
}
});
// Refresh current version by executing agent binary
router.post(
"/version/refresh",
authenticateToken,
requirePermission("can_manage_settings"),
async (_req, res) => {
try {
console.log("🔄 Refreshing current agent version...");
const currentVersion = await agentVersionService.refreshCurrentVersion();
console.log("📊 Refreshed current version:", currentVersion);
res.json({
success: true,
currentVersion: currentVersion,
message: currentVersion
? `Current version refreshed: ${currentVersion}`
: "No agent binary found",
});
} catch (error) {
console.error("❌ Failed to refresh current version:", error.message);
res.status(500).json({
success: false,
error: "Failed to refresh current version",
details: error.message,
});
}
},
);
// Download latest update
router.post(
"/version/download",
authenticateToken,
requirePermission("can_manage_settings"),
async (_req, res) => {
try {
console.log("🔄 Downloading latest agent update...");
const downloadResult = await agentVersionService.downloadLatestUpdate();
console.log(
"📊 Download result:",
JSON.stringify(downloadResult, null, 2),
);
res.json(downloadResult);
} catch (error) {
console.error("❌ Failed to download latest update:", error.message);
res.status(500).json({
success: false,
error: "Failed to download latest update",
details: error.message,
});
}
},
);
// Check for updates
router.post(
"/version/check",
authenticateToken,
requirePermission("can_manage_settings"),
async (_req, res) => {
try {
console.log("🔄 Manual update check triggered");
const updateInfo = await agentVersionService.checkForUpdates();
console.log(
"📊 Update check result:",
JSON.stringify(updateInfo, null, 2),
);
res.json(updateInfo);
} catch (error) {
console.error("❌ Failed to check for updates:", error.message);
res.status(500).json({ error: "Failed to check for updates" });
}
},
);
// Get available versions
router.get("/versions", authenticateToken, async (_req, res) => {
try {
const versions = await agentVersionService.getAvailableVersions();
console.log(
"📦 Available versions response:",
JSON.stringify(versions, null, 2),
);
res.json({ versions });
} catch (error) {
console.error("❌ Failed to get available versions:", error.message);
res.status(500).json({ error: "Failed to get available versions" });
}
});
// Get binary information
router.get(
"/binary/:version/:architecture",
authenticateToken,
async (_req, res) => {
try {
const { version, architecture } = req.params;
const binaryInfo = await agentVersionService.getBinaryInfo(
version,
architecture,
);
res.json(binaryInfo);
} catch (error) {
console.error("❌ Failed to get binary info:", error.message);
res.status(404).json({ error: error.message });
}
},
);
// Download agent binary
router.get(
"/download/:version/:architecture",
authenticateToken,
async (_req, res) => {
try {
const { version, architecture } = req.params;
// Validate architecture
if (!agentVersionService.supportedArchitectures.includes(architecture)) {
return res.status(400).json({ error: "Unsupported architecture" });
}
await agentVersionService.serveBinary(version, architecture, res);
} catch (error) {
console.error("❌ Failed to serve binary:", error.message);
res.status(500).json({ error: "Failed to serve binary" });
}
},
);
// Get latest binary for architecture (for agents to query)
router.get("/latest/:architecture", async (req, res) => {
try {
const { architecture } = req.params;
// Validate architecture
if (!agentVersionService.supportedArchitectures.includes(architecture)) {
return res.status(400).json({ error: "Unsupported architecture" });
}
const versionInfo = await agentVersionService.getVersionInfo();
if (!versionInfo.latestVersion) {
return res.status(404).json({ error: "No latest version available" });
}
const binaryInfo = await agentVersionService.getBinaryInfo(
versionInfo.latestVersion,
architecture,
);
res.json({
version: binaryInfo.version,
architecture: binaryInfo.architecture,
size: binaryInfo.size,
hash: binaryInfo.hash,
downloadUrl: `/api/v1/agent/download/${binaryInfo.version}/${binaryInfo.architecture}`,
});
} catch (error) {
console.error("❌ Failed to get latest binary info:", error.message);
res.status(500).json({ error: "Failed to get latest binary information" });
}
});
// Push update notification to specific agent
router.post(
"/notify-update/:apiId",
authenticateToken,
requirePermission("admin"),
async (_req, res) => {
try {
const { apiId } = req.params;
const { version, force = false } = req.body;
const versionInfo = await agentVersionService.getVersionInfo();
const targetVersion = version || versionInfo.latestVersion;
if (!targetVersion) {
return res
.status(400)
.json({ error: "No version specified or available" });
}
// Import WebSocket service
const { pushUpdateNotification } = require("../services/agentWs");
// Push update notification via WebSocket
pushUpdateNotification(apiId, {
version: targetVersion,
force,
downloadUrl: `/api/v1/agent/latest/${req.body.architecture || "linux-amd64"}`,
message: `Update available: ${targetVersion}`,
});
res.json({
success: true,
message: `Update notification sent to agent ${apiId}`,
version: targetVersion,
});
} catch (error) {
console.error("❌ Failed to notify agent update:", error.message);
res.status(500).json({ error: "Failed to notify agent update" });
}
},
);
// Push update notification to all agents
router.post(
"/notify-update-all",
authenticateToken,
requirePermission("admin"),
async (_req, res) => {
try {
const { version, force = false } = req.body;
const versionInfo = await agentVersionService.getVersionInfo();
const targetVersion = version || versionInfo.latestVersion;
if (!targetVersion) {
return res
.status(400)
.json({ error: "No version specified or available" });
}
// Import WebSocket service
const { pushUpdateNotificationToAll } = require("../services/agentWs");
// Push update notification to all connected agents
const result = await pushUpdateNotificationToAll({
version: targetVersion,
force,
message: `Update available: ${targetVersion}`,
});
res.json({
success: true,
message: `Update notification sent to ${result.notifiedCount} agents`,
version: targetVersion,
notifiedCount: result.notifiedCount,
failedCount: result.failedCount,
});
} catch (error) {
console.error("❌ Failed to notify all agents update:", error.message);
res.status(500).json({ error: "Failed to notify all agents update" });
}
},
);
// Check if specific agent needs update and push notification
router.post(
"/check-update/:apiId",
authenticateToken,
requirePermission("can_manage_settings"),
async (_req, res) => {
try {
const { apiId } = req.params;
const { version, force = false } = req.body;
if (!version) {
return res.status(400).json({
success: false,
error: "Agent version is required",
});
}
console.log(
`🔍 Checking update for agent ${apiId} (version: ${version})`,
);
const result = await agentVersionService.checkAndPushAgentUpdate(
apiId,
version,
force,
);
console.log(
"📊 Agent update check result:",
JSON.stringify(result, null, 2),
);
res.json({
success: true,
...result,
});
} catch (error) {
console.error("❌ Failed to check agent update:", error.message);
res.status(500).json({
success: false,
error: "Failed to check agent update",
details: error.message,
});
}
},
);
// Push updates to all connected agents
router.post(
"/push-updates-all",
authenticateToken,
requirePermission("can_manage_settings"),
async (_req, res) => {
try {
const { force = false } = req.body;
console.log(`🔄 Pushing updates to all agents (force: ${force})`);
const result = await agentVersionService.checkAndPushUpdatesToAll(force);
console.log("📊 Bulk update result:", JSON.stringify(result, null, 2));
res.json(result);
} catch (error) {
console.error("❌ Failed to push updates to all agents:", error.message);
res.status(500).json({
success: false,
error: "Failed to push updates to all agents",
details: error.message,
});
}
},
);
// Agent reports its version (for automatic update checking)
router.post("/report-version", authenticateToken, async (req, res) => {
try {
const { apiId, version } = req.body;
if (!apiId || !version) {
return res.status(400).json({
success: false,
error: "API ID and version are required",
});
}
console.log(`📊 Agent ${apiId} reported version: ${version}`);
// Check if agent needs update and push notification if needed
const updateResult = await agentVersionService.checkAndPushAgentUpdate(
apiId,
version,
);
res.json({
success: true,
message: "Version reported successfully",
updateCheck: updateResult,
});
} catch (error) {
console.error("❌ Failed to process agent version report:", error.message);
res.status(500).json({
success: false,
error: "Failed to process version report",
details: error.message,
});
}
});
module.exports = router;

View File

@@ -1,7 +1,7 @@
const express = require("express");
const bcrypt = require("bcryptjs");
const jwt = require("jsonwebtoken");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const { body, validationResult } = require("express-validator");
const { authenticateToken, _requireAdmin } = require("../middleware/auth");
const {
@@ -20,7 +20,7 @@ const {
} = require("../utils/session_manager");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
/**
* Parse user agent string to extract browser and OS info

View File

@@ -1,5 +1,5 @@
const express = require("express");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const crypto = require("node:crypto");
const bcrypt = require("bcryptjs");
const { body, validationResult } = require("express-validator");
@@ -8,7 +8,7 @@ const { requireManageSettings } = require("../middleware/permissions");
const { v4: uuidv4 } = require("uuid");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Generate auto-enrollment token credentials
const generate_auto_enrollment_token = () => {
@@ -570,24 +570,21 @@ router.post(
os_version: "unknown",
api_id: api_id,
api_key: api_key,
host_group_id: req.auto_enrollment_token.default_host_group_id,
status: "pending",
notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`,
updated_at: new Date(),
},
});
// Create host group membership if default host group is specified
let hostGroupMembership = null;
if (req.auto_enrollment_token.default_host_group_id) {
hostGroupMembership = await prisma.host_group_memberships.create({
data: {
id: uuidv4(),
host_id: host.id,
host_group_id: req.auto_enrollment_token.default_host_group_id,
created_at: new Date(),
include: {
host_groups: {
select: {
id: true,
name: true,
color: true,
},
},
});
}
},
});
// Update token usage stats
await prisma.auto_enrollment_tokens.update({
@@ -603,19 +600,6 @@ router.post(
`Auto-enrolled host: ${friendly_name} (${host.id}) via token: ${req.auto_enrollment_token.token_name}`,
);
// Get host group details for response if membership was created
let hostGroup = null;
if (hostGroupMembership) {
hostGroup = await prisma.host_groups.findUnique({
where: { id: req.auto_enrollment_token.default_host_group_id },
select: {
id: true,
name: true,
color: true,
},
});
}
res.status(201).json({
message: "Host enrolled successfully",
host: {
@@ -623,7 +607,7 @@ router.post(
friendly_name: host.friendly_name,
api_id: api_id,
api_key: api_key,
host_group: hostGroup,
host_group: host.host_groups,
status: host.status,
},
});
@@ -714,24 +698,13 @@ router.post(
os_version: "unknown",
api_id: api_id,
api_key: api_key,
host_group_id: req.auto_enrollment_token.default_host_group_id,
status: "pending",
notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`,
updated_at: new Date(),
},
});
// Create host group membership if default host group is specified
if (req.auto_enrollment_token.default_host_group_id) {
await prisma.host_group_memberships.create({
data: {
id: uuidv4(),
host_id: host.id,
host_group_id: req.auto_enrollment_token.default_host_group_id,
created_at: new Date(),
},
});
}
results.success.push({
id: host.id,
friendly_name: host.friendly_name,

View File

@@ -1,11 +1,11 @@
const express = require("express");
const { body, validationResult } = require("express-validator");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const { authenticateToken } = require("../middleware/auth");
const { v4: uuidv4 } = require("uuid");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Helper function to get user permissions based on role
async function getUserPermissions(userRole) {

View File

@@ -1,5 +1,5 @@
const express = require("express");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const moment = require("moment");
const { authenticateToken } = require("../middleware/auth");
const {
@@ -11,7 +11,7 @@ const {
const { queueManager } = require("../services/automation");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Get dashboard statistics
router.get(
@@ -61,15 +61,9 @@ router.get(
},
}),
// Total unique packages that need updates
prisma.packages.count({
where: {
host_packages: {
some: {
needs_update: true,
},
},
},
// Total outdated packages across all hosts
prisma.host_packages.count({
where: { needs_update: true },
}),
// Errored hosts (not updated within threshold based on update interval)
@@ -82,15 +76,11 @@ router.get(
},
}),
// Security updates count (unique packages)
prisma.packages.count({
// Security updates count
prisma.host_packages.count({
where: {
host_packages: {
some: {
needs_update: true,
is_security_update: true,
},
},
needs_update: true,
is_security_update: true,
},
}),

View File

@@ -1,9 +1,9 @@
const express = require("express");
const { authenticateToken } = require("../middleware/auth");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const { v4: uuidv4 } = require("uuid");
const prisma = getPrismaClient();
const prisma = new PrismaClient();
const router = express.Router();
// Helper function to convert BigInt fields to strings for JSON serialization

View File

@@ -1,9 +1,9 @@
const express = require("express");
const { getPrismaClient } = require("../config/prisma");
const { createPrismaClient } = require("../config/database");
const bcrypt = require("bcryptjs");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = createPrismaClient();
// Middleware to authenticate API key
const authenticateApiKey = async (req, res, next) => {
@@ -114,15 +114,9 @@ router.get("/stats", authenticateApiKey, async (_req, res) => {
where: { status: "active" },
});
// Get total unique packages that need updates (consistent with dashboard)
const totalOutdatedPackages = await prisma.packages.count({
where: {
host_packages: {
some: {
needs_update: true,
},
},
},
// Get total outdated packages count
const totalOutdatedPackages = await prisma.host_packages.count({
where: { needs_update: true },
});
// Get total repositories count
@@ -142,15 +136,11 @@ router.get("/stats", authenticateApiKey, async (_req, res) => {
},
});
// Get security updates count (unique packages - consistent with dashboard)
const securityUpdates = await prisma.packages.count({
// Get security updates count
const securityUpdates = await prisma.host_packages.count({
where: {
host_packages: {
some: {
needs_update: true,
is_security_update: true,
},
},
needs_update: true,
is_security_update: true,
},
});

View File

@@ -1,12 +1,12 @@
const express = require("express");
const { body, validationResult } = require("express-validator");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const { randomUUID } = require("node:crypto");
const { authenticateToken } = require("../middleware/auth");
const { requireManageHosts } = require("../middleware/permissions");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Get all host groups
router.get("/", authenticateToken, async (_req, res) => {

View File

@@ -1,5 +1,5 @@
const express = require("express");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const { body, validationResult } = require("express-validator");
const { v4: uuidv4 } = require("uuid");
const crypto = require("node:crypto");
@@ -12,9 +12,9 @@ const {
} = require("../middleware/permissions");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Secure endpoint to download the agent script/binary (requires API authentication)
// Secure endpoint to download the agent binary (requires API authentication)
router.get("/agent/download", async (req, res) => {
try {
// Verify API credentials
@@ -34,186 +34,83 @@ router.get("/agent/download", async (req, res) => {
return res.status(401).json({ error: "Invalid API credentials" });
}
// Get architecture parameter (default to amd64)
const architecture = req.query.arch || "amd64";
// Validate architecture
const validArchitectures = ["amd64", "386", "arm64"];
if (!validArchitectures.includes(architecture)) {
return res.status(400).json({
error: "Invalid architecture. Must be one of: amd64, 386, arm64",
});
}
// Serve agent binary directly from file system
const fs = require("node:fs");
const path = require("node:path");
// Check if this is a legacy agent (bash script) requesting update
// Legacy agents will have agent_version < 1.2.9 (excluding 1.2.9 itself)
// But allow forcing binary download for fresh installations
const forceBinary = req.query.force === "binary";
const isLegacyAgent =
!forceBinary &&
host.agent_version &&
((host.agent_version.startsWith("1.2.") &&
host.agent_version !== "1.2.9") ||
host.agent_version.startsWith("1.1.") ||
host.agent_version.startsWith("1.0."));
const binaryName = `patchmon-agent-linux-${architecture}`;
const binaryPath = path.join(__dirname, "../../../agents", binaryName);
if (isLegacyAgent) {
// Serve migration script for legacy agents
const migrationScriptPath = path.join(
__dirname,
"../../../agents/patchmon-agent.sh",
);
if (!fs.existsSync(migrationScriptPath)) {
return res.status(404).json({ error: "Migration script not found" });
}
// Set appropriate headers for script download
res.setHeader("Content-Type", "text/plain");
res.setHeader(
"Content-Disposition",
'attachment; filename="patchmon-agent.sh"',
);
// Stream the migration script
const fileStream = fs.createReadStream(migrationScriptPath);
fileStream.pipe(res);
fileStream.on("error", (error) => {
console.error("Migration script stream error:", error);
if (!res.headersSent) {
res.status(500).json({ error: "Failed to stream migration script" });
}
});
} else {
// Serve Go binary for new agents
const architecture = req.query.arch || "amd64";
// Validate architecture
const validArchitectures = ["amd64", "386", "arm64", "arm"];
if (!validArchitectures.includes(architecture)) {
return res.status(400).json({
error: "Invalid architecture. Must be one of: amd64, 386, arm64, arm",
});
}
const binaryName = `patchmon-agent-linux-${architecture}`;
const binaryPath = path.join(__dirname, "../../../agents", binaryName);
if (!fs.existsSync(binaryPath)) {
return res.status(404).json({
error: `Agent binary not found for architecture: ${architecture}`,
});
}
// Set appropriate headers for binary download
res.setHeader("Content-Type", "application/octet-stream");
res.setHeader(
"Content-Disposition",
`attachment; filename="${binaryName}"`,
);
// Stream the binary file
const fileStream = fs.createReadStream(binaryPath);
fileStream.pipe(res);
fileStream.on("error", (error) => {
console.error("Binary stream error:", error);
if (!res.headersSent) {
res.status(500).json({ error: "Failed to stream agent binary" });
}
if (!fs.existsSync(binaryPath)) {
return res.status(404).json({
error: `Agent binary not found for architecture: ${architecture}`,
});
}
// Set appropriate headers for binary download
res.setHeader("Content-Type", "application/octet-stream");
res.setHeader(
"Content-Disposition",
`attachment; filename="${binaryName}"`,
);
// Stream the binary file
const fileStream = fs.createReadStream(binaryPath);
fileStream.pipe(res);
fileStream.on("error", (error) => {
console.error("Binary stream error:", error);
if (!res.headersSent) {
res.status(500).json({ error: "Failed to stream agent binary" });
}
});
} catch (error) {
console.error("Agent download error:", error);
res.status(500).json({ error: "Failed to serve agent" });
res.status(500).json({ error: "Failed to serve agent binary" });
}
});
// Version check endpoint for agents
router.get("/agent/version", async (req, res) => {
router.get("/agent/version", async (_req, res) => {
try {
const fs = require("node:fs");
const path = require("node:path");
const { exec } = require("node:child_process");
const { promisify } = require("node:util");
const execAsync = promisify(exec);
// Get architecture parameter (default to amd64 for Go agents)
const architecture = req.query.arch || "amd64";
const agentType = req.query.type || "go"; // "go" or "legacy"
// Read version directly from agent script file
const agentPath = path.join(__dirname, "../../../agents/patchmon-agent.sh");
if (agentType === "legacy") {
// Legacy agent version check (bash script)
const agentPath = path.join(
__dirname,
"../../../agents/patchmon-agent.sh",
);
if (!fs.existsSync(agentPath)) {
return res.status(404).json({ error: "Legacy agent script not found" });
}
const scriptContent = fs.readFileSync(agentPath, "utf8");
const versionMatch = scriptContent.match(/AGENT_VERSION="([^"]+)"/);
if (!versionMatch) {
return res
.status(500)
.json({ error: "Could not extract version from agent script" });
}
const currentVersion = versionMatch[1];
res.json({
currentVersion: currentVersion,
downloadUrl: `/api/v1/hosts/agent/download`,
releaseNotes: `PatchMon Agent v${currentVersion}`,
minServerVersion: null,
});
} else {
// Go agent version check (binary)
const binaryName = `patchmon-agent-linux-${architecture}`;
const binaryPath = path.join(__dirname, "../../../agents", binaryName);
if (!fs.existsSync(binaryPath)) {
return res.status(404).json({
error: `Go agent binary not found for architecture: ${architecture}`,
});
}
// Execute the binary to get its version
try {
const { stdout } = await execAsync(`${binaryPath} --help`, {
timeout: 10000,
});
// Parse version from help output (e.g., "PatchMon Agent v1.3.1")
const versionMatch = stdout.match(
/PatchMon Agent v([0-9]+\.[0-9]+\.[0-9]+)/i,
);
if (!versionMatch) {
return res.status(500).json({
error: "Could not extract version from agent binary",
});
}
const serverVersion = versionMatch[1];
const agentVersion = req.query.currentVersion || serverVersion;
// Simple version comparison (assuming semantic versioning)
const hasUpdate = agentVersion !== serverVersion;
res.json({
currentVersion: agentVersion,
latestVersion: serverVersion,
hasUpdate: hasUpdate,
downloadUrl: `/api/v1/hosts/agent/download?arch=${architecture}`,
releaseNotes: `PatchMon Agent v${serverVersion}`,
minServerVersion: null,
architecture: architecture,
agentType: "go",
});
} catch (execError) {
console.error("Failed to execute agent binary:", execError.message);
return res.status(500).json({
error: "Failed to get version from agent binary",
});
}
if (!fs.existsSync(agentPath)) {
return res.status(404).json({ error: "Agent script not found" });
}
const scriptContent = fs.readFileSync(agentPath, "utf8");
const versionMatch = scriptContent.match(/AGENT_VERSION="([^"]+)"/);
if (!versionMatch) {
return res
.status(500)
.json({ error: "Could not extract version from agent script" });
}
const currentVersion = versionMatch[1];
res.json({
currentVersion: currentVersion,
downloadUrl: `/api/v1/hosts/agent/download`,
releaseNotes: `PatchMon Agent v${currentVersion}`,
minServerVersion: null,
});
} catch (error) {
console.error("Version check error:", error);
res.status(500).json({ error: "Failed to get agent version" });
@@ -866,117 +763,89 @@ router.post(
},
);
router.put(
"/bulk/groups",
authenticateToken,
requireManageHosts,
[
body("hostIds").isArray().withMessage("Host IDs must be an array"),
body("hostIds.*")
.isLength({ min: 1 })
.withMessage("Each host ID must be provided"),
body("groupIds").isArray().optional(),
body("groupIds.*")
.optional()
.isUUID()
.withMessage("Each group ID must be a valid UUID"),
],
async (req, res) => {
try {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({ errors: errors.array() });
}
// TODO: Admin endpoint to bulk update host groups - needs to be rewritten for many-to-many relationship
// router.put(
// "/bulk/group",
// authenticateToken,
// requireManageHosts,
// [
// body("hostIds").isArray().withMessage("Host IDs must be an array"),
// body("hostIds.*")
// .isLength({ min: 1 })
// .withMessage("Each host ID must be provided"),
// body("hostGroupId").optional(),
// ],
// async (req, res) => {
// try {
// const errors = validationResult(req);
// if (!errors.isEmpty()) {
// return res.status(400).json({ errors: errors.array() });
// }
const { hostIds, groupIds = [] } = req.body;
// const { hostIds, hostGroupId } = req.body;
// Verify all groups exist if provided
if (groupIds.length > 0) {
const existingGroups = await prisma.host_groups.findMany({
where: { id: { in: groupIds } },
select: { id: true },
});
// // If hostGroupId is provided, verify the group exists
// if (hostGroupId) {
// const hostGroup = await prisma.host_groups.findUnique({
// where: { id: hostGroupId },
// });
if (existingGroups.length !== groupIds.length) {
return res.status(400).json({
error: "One or more host groups not found",
provided: groupIds,
found: existingGroups.map((g) => g.id),
});
}
}
// if (!hostGroup) {
// return res.status(400).json({ error: "Host group not found" });
// }
// }
// Check if all hosts exist
const existingHosts = await prisma.hosts.findMany({
where: { id: { in: hostIds } },
select: { id: true, friendly_name: true },
});
// // Check if all hosts exist
// const existingHosts = await prisma.hosts.findMany({
// where: { id: { in: hostIds } },
// select: { id: true, friendly_name: true },
// });
if (existingHosts.length !== hostIds.length) {
const foundIds = existingHosts.map((h) => h.id);
const missingIds = hostIds.filter((id) => !foundIds.includes(id));
return res.status(400).json({
error: "Some hosts not found",
missingHostIds: missingIds,
});
}
// if (existingHosts.length !== hostIds.length) {
// const foundIds = existingHosts.map((h) => h.id);
// const missingIds = hostIds.filter((id) => !foundIds.includes(id));
// return res.status(400).json({
// error: "Some hosts not found",
// missingHostIds: missingIds,
// });
// }
// Use transaction to update group memberships for all hosts
const updatedHosts = await prisma.$transaction(async (tx) => {
const results = [];
// // Bulk update host groups
// const updateResult = await prisma.hosts.updateMany({
// where: { id: { in: hostIds } },
// data: {
// host_group_id: hostGroupId || null,
// updated_at: new Date(),
// },
// });
for (const hostId of hostIds) {
// Remove existing memberships for this host
await tx.host_group_memberships.deleteMany({
where: { host_id: hostId },
});
// // Get updated hosts with group information
// const updatedHosts = await prisma.hosts.findMany({
// where: { id: { in: hostIds } },
// select: {
// id: true,
// friendly_name: true,
// host_groups: {
// select: {
// id: true,
// name: true,
// color: true,
// },
// },
// },
// });
// Add new memberships for this host
if (groupIds.length > 0) {
await tx.host_group_memberships.createMany({
data: groupIds.map((groupId) => ({
id: crypto.randomUUID(),
host_id: hostId,
host_group_id: groupId,
})),
});
}
// Get updated host with groups
const updatedHost = await tx.hosts.findUnique({
where: { id: hostId },
include: {
host_group_memberships: {
include: {
host_groups: {
select: {
id: true,
name: true,
color: true,
},
},
},
},
},
});
results.push(updatedHost);
}
return results;
});
res.json({
message: `Successfully updated ${updatedHosts.length} host${updatedHosts.length !== 1 ? "s" : ""}`,
updatedCount: updatedHosts.length,
hosts: updatedHosts,
});
} catch (error) {
console.error("Bulk host groups update error:", error);
res.status(500).json({ error: "Failed to update host groups" });
}
},
);
// res.json({
// message: `Successfully updated ${updateResult.count} host${updateResult.count !== 1 ? "s" : ""}`,
// updatedCount: updateResult.count,
// hosts: updatedHosts,
// });
// } catch (error) {
// console.error("Bulk host group update error:", error);
// res.status(500).json({ error: "Failed to update host groups" });
// }
// },
// );
// Admin endpoint to update host groups (many-to-many)
router.put(

View File

@@ -1,8 +1,8 @@
const express = require("express");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Get all packages with their update status
router.get("/", async (req, res) => {

View File

@@ -1,5 +1,5 @@
const express = require("express");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const { authenticateToken } = require("../middleware/auth");
const {
requireManageSettings,
@@ -7,7 +7,7 @@ const {
} = require("../middleware/permissions");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Get all role permissions (allow users who can manage users to view roles)
router.get(

View File

@@ -1,6 +1,6 @@
const express = require("express");
const { body, validationResult } = require("express-validator");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const { authenticateToken } = require("../middleware/auth");
const {
requireViewHosts,
@@ -8,7 +8,7 @@ const {
} = require("../middleware/permissions");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Get all repositories with host count
router.get("/", authenticateToken, requireViewHosts, async (_req, res) => {

View File

@@ -1,9 +1,9 @@
const express = require("express");
const router = express.Router();
const { getPrismaClient } = require("../config/prisma");
const { createPrismaClient } = require("../config/database");
const { authenticateToken } = require("../middleware/auth");
const prisma = getPrismaClient();
const prisma = createPrismaClient();
/**
* Global search endpoint

View File

@@ -1,12 +1,12 @@
const express = require("express");
const { body, validationResult } = require("express-validator");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const { authenticateToken } = require("../middleware/auth");
const { requireManageSettings } = require("../middleware/permissions");
const { getSettings, updateSettings } = require("../services/settingsService");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// WebSocket broadcaster for agent policy updates (no longer used - queue-based delivery preferred)
// const { broadcastSettingsUpdate } = require("../services/agentWs");

View File

@@ -1,12 +1,12 @@
const express = require("express");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const speakeasy = require("speakeasy");
const QRCode = require("qrcode");
const { authenticateToken } = require("../middleware/auth");
const { body, validationResult } = require("express-validator");
const router = express.Router();
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Generate TFA secret and QR code
router.get("/setup", authenticateToken, async (req, res) => {

View File

@@ -1,12 +1,12 @@
const express = require("express");
const { authenticateToken } = require("../middleware/auth");
const { requireManageSettings } = require("../middleware/permissions");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Default GitHub repository URL
const DEFAULT_GITHUB_REPO = "https://github.com/PatchMon/PatchMon.git";
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
const router = express.Router();
@@ -14,13 +14,13 @@ const router = express.Router();
function getCurrentVersion() {
try {
const packageJson = require("../../package.json");
return packageJson?.version || "1.3.0";
return packageJson?.version || "1.2.9";
} catch (packageError) {
console.warn(
"Could not read version from package.json, using fallback:",
packageError.message,
);
return "1.3.0";
return "1.2.9";
}
}

View File

@@ -53,6 +53,8 @@ router.get("/status/:apiId/stream", async (req, res) => {
// Validate session (same as regular auth middleware)
const validation = await validate_session(decoded.sessionId, token);
if (!validation.valid) {
console.error("[SSE] Session validation failed:", validation.reason);
console.error("[SSE] Invalid session for api_id:", apiId);
return res.status(401).json({ error: "Invalid or expired session" });
}
@@ -60,7 +62,9 @@ router.get("/status/:apiId/stream", async (req, res) => {
await update_session_activity(decoded.sessionId);
req.user = validation.user;
} catch (_err) {
} catch (err) {
console.error("[SSE] JWT verification failed:", err.message);
console.error("[SSE] Invalid token for api_id:", apiId);
return res.status(401).json({ error: "Invalid or expired token" });
}

View File

@@ -41,10 +41,10 @@ const helmet = require("helmet");
const rateLimit = require("express-rate-limit");
const cookieParser = require("cookie-parser");
const {
getPrismaClient,
createPrismaClient,
waitForDatabase,
disconnectPrisma,
} = require("./config/prisma");
} = require("./config/database");
const winston = require("winston");
// Import routes
@@ -67,7 +67,6 @@ const gethomepageRoutes = require("./routes/gethomepageRoutes");
const automationRoutes = require("./routes/automationRoutes");
const dockerRoutes = require("./routes/dockerRoutes");
const wsRoutes = require("./routes/wsRoutes");
const agentVersionRoutes = require("./routes/agentVersionRoutes");
const { initSettings } = require("./services/settingsService");
const { queueManager } = require("./services/automation");
const { authenticateToken, requireAdmin } = require("./middleware/auth");
@@ -76,7 +75,7 @@ const { BullMQAdapter } = require("@bull-board/api/bullMQAdapter");
const { ExpressAdapter } = require("@bull-board/express");
// Initialize Prisma client with optimized connection pooling for multiple instances
const prisma = getPrismaClient();
const prisma = createPrismaClient();
// Function to check and create default role permissions on startup
async function checkAndCreateRolePermissions() {
@@ -263,7 +262,6 @@ const PORT = process.env.PORT || 3001;
const http = require("node:http");
const server = http.createServer(app);
const { init: initAgentWs } = require("./services/agentWs");
const agentVersionService = require("./services/agentVersionService");
// Trust proxy (needed when behind reverse proxy) and remove X-Powered-By
if (process.env.TRUST_PROXY) {
@@ -295,7 +293,7 @@ app.disable("x-powered-by");
// Rate limiting with monitoring
const limiter = rateLimit({
windowMs: parseInt(process.env.RATE_LIMIT_WINDOW_MS, 10) || 15 * 60 * 1000,
max: parseInt(process.env.RATE_LIMIT_MAX, 10) || 5000,
max: parseInt(process.env.RATE_LIMIT_MAX, 10) || 100,
message: {
error: "Too many requests from this IP, please try again later.",
retryAfter: Math.ceil(
@@ -345,46 +343,18 @@ const allowedOrigins = parseOrigins(
process.env.CORS_ORIGIN ||
"http://localhost:3000",
);
// Add Bull Board origin to allowed origins if not already present
const bullBoardOrigin = process.env.CORS_ORIGIN || "http://localhost:3000";
if (!allowedOrigins.includes(bullBoardOrigin)) {
allowedOrigins.push(bullBoardOrigin);
}
app.use(
cors({
origin: (origin, callback) => {
// Allow non-browser/SSR tools with no origin
if (!origin) return callback(null, true);
if (allowedOrigins.includes(origin)) return callback(null, true);
// Allow Bull Board requests from the same origin as CORS_ORIGIN
if (origin === bullBoardOrigin) return callback(null, true);
// Allow same-origin requests (e.g., Bull Board accessing its own API)
// This allows http://hostname:3001 to make requests to http://hostname:3001
if (origin?.includes(":3001")) return callback(null, true);
// Allow Bull Board requests from the frontend origin (same host, different port)
// This handles cases where frontend is on port 3000 and backend on 3001
const frontendOrigin = origin?.replace(/:3001$/, ":3000");
if (frontendOrigin && allowedOrigins.includes(frontendOrigin)) {
return callback(null, true);
}
return callback(new Error("Not allowed by CORS"));
},
credentials: true,
// Additional CORS options for better cookie handling
optionsSuccessStatus: 200,
methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS"],
allowedHeaders: [
"Content-Type",
"Authorization",
"Cookie",
"X-Requested-With",
],
}),
);
app.use(limiter);
@@ -424,7 +394,7 @@ const apiVersion = process.env.API_VERSION || "v1";
const authLimiter = rateLimit({
windowMs:
parseInt(process.env.AUTH_RATE_LIMIT_WINDOW_MS, 10) || 10 * 60 * 1000,
max: parseInt(process.env.AUTH_RATE_LIMIT_MAX, 10) || 500,
max: parseInt(process.env.AUTH_RATE_LIMIT_MAX, 10) || 20,
message: {
error: "Too many authentication requests, please try again later.",
retryAfter: Math.ceil(
@@ -438,7 +408,7 @@ const authLimiter = rateLimit({
});
const agentLimiter = rateLimit({
windowMs: parseInt(process.env.AGENT_RATE_LIMIT_WINDOW_MS, 10) || 60 * 1000,
max: parseInt(process.env.AGENT_RATE_LIMIT_MAX, 10) || 1000,
max: parseInt(process.env.AGENT_RATE_LIMIT_MAX, 10) || 120,
message: {
error: "Too many agent requests, please try again later.",
retryAfter: Math.ceil(
@@ -472,190 +442,29 @@ app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes);
app.use(`/api/${apiVersion}/automation`, automationRoutes);
app.use(`/api/${apiVersion}/docker`, dockerRoutes);
app.use(`/api/${apiVersion}/ws`, wsRoutes);
app.use(`/api/${apiVersion}/agent`, agentVersionRoutes);
// Bull Board - will be populated after queue manager initializes
let bullBoardRouter = null;
const _bullBoardSessions = new Map(); // Store authenticated sessions
const bullBoardSessions = new Map(); // Store authenticated sessions
// Mount Bull Board at /bullboard for cleaner URL
app.use(`/bullboard`, (_req, res, next) => {
// Mount Bull Board at /admin instead of /api/v1/admin to avoid path conflicts
app.use(`/admin/queues`, (_req, res, next) => {
// Relax COOP/COEP for Bull Board in non-production to avoid browser warnings
if (process.env.NODE_ENV !== "production") {
res.setHeader("Cross-Origin-Opener-Policy", "same-origin-allow-popups");
res.setHeader("Cross-Origin-Embedder-Policy", "unsafe-none");
}
// Add headers to help with WebSocket connections
res.setHeader("X-Frame-Options", "SAMEORIGIN");
res.setHeader(
"Content-Security-Policy",
"default-src 'self' 'unsafe-inline' 'unsafe-eval' data: blob:; style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; font-src 'self' https://fonts.gstatic.com; connect-src 'self' ws: wss:;",
);
next();
});
// Simplified Bull Board authentication - just validate token once and set a simple auth cookie
app.use(`/bullboard`, async (req, res, next) => {
// Skip authentication for static assets
// Authentication middleware for Bull Board
app.use(`/admin/queues`, async (req, res, next) => {
// Skip authentication for static assets only
if (req.path.includes("/static/") || req.path.includes("/favicon")) {
return next();
}
// Check for existing Bull Board auth cookie
if (req.cookies["bull-board-auth"]) {
// Already authenticated, allow access
return next();
}
// No auth cookie - check for token in query
const token = req.query.token;
if (!token) {
return res.status(401).json({
error:
"Authentication required. Please access Bull Board from the Automation page.",
});
}
// Validate token and set auth cookie
req.headers.authorization = `Bearer ${token}`;
return authenticateToken(req, res, (err) => {
if (err) {
return res.status(401).json({ error: "Invalid authentication token" });
}
return requireAdmin(req, res, (adminErr) => {
if (adminErr) {
return res.status(403).json({ error: "Admin access required" });
}
// Set a simple auth cookie that will persist for the session
res.cookie("bull-board-auth", token, {
httpOnly: false,
secure: false,
maxAge: 3600000, // 1 hour
path: "/bullboard",
sameSite: "lax",
});
console.log("Bull Board - Authentication successful, cookie set");
return next();
});
});
});
// Remove all the old complex middleware below and replace with the new Bull Board router setup
app.use(`/bullboard`, (req, res, next) => {
if (bullBoardRouter) {
return bullBoardRouter(req, res, next);
}
return res.status(503).json({ error: "Bull Board not initialized yet" });
});
/*
// OLD MIDDLEWARE - REMOVED FOR SIMPLIFICATION - DO NOT USE
if (false) {
const sessionId = req.cookies["bull-board-session"];
console.log("Bull Board API call - Session ID:", sessionId ? "present" : "missing");
console.log("Bull Board API call - Cookies:", req.cookies);
console.log("Bull Board API call - Bull Board token cookie:", req.cookies["bull-board-token"] ? "present" : "missing");
console.log("Bull Board API call - Query token:", req.query.token ? "present" : "missing");
console.log("Bull Board API call - Auth header:", req.headers.authorization ? "present" : "missing");
console.log("Bull Board API call - Origin:", req.headers.origin || "missing");
console.log("Bull Board API call - Referer:", req.headers.referer || "missing");
// Check if we have any authentication method available
const hasSession = !!sessionId;
const hasTokenCookie = !!req.cookies["bull-board-token"];
const hasQueryToken = !!req.query.token;
const hasAuthHeader = !!req.headers.authorization;
const hasReferer = !!req.headers.referer;
console.log("Bull Board API call - Auth methods available:", {
session: hasSession,
tokenCookie: hasTokenCookie,
queryToken: hasQueryToken,
authHeader: hasAuthHeader,
referer: hasReferer
});
// Check for valid session first
if (sessionId) {
const session = bullBoardSessions.get(sessionId);
console.log("Bull Board API call - Session found:", !!session);
if (session && Date.now() - session.timestamp < 3600000) {
// Valid session, extend it
session.timestamp = Date.now();
console.log("Bull Board API call - Using existing session, proceeding");
return next();
} else if (session) {
// Expired session, remove it
console.log("Bull Board API call - Session expired, removing");
bullBoardSessions.delete(sessionId);
}
}
// No valid session, check for token as fallback
let token = req.query.token;
if (!token && req.headers.authorization) {
token = req.headers.authorization.replace("Bearer ", "");
}
if (!token && req.cookies["bull-board-token"]) {
token = req.cookies["bull-board-token"];
}
// For API calls, also check if the token is in the referer URL
// This handles cases where the main page hasn't set the cookie yet
if (!token && req.headers.referer) {
try {
const refererUrl = new URL(req.headers.referer);
const refererToken = refererUrl.searchParams.get('token');
if (refererToken) {
token = refererToken;
console.log("Bull Board API call - Token found in referer URL:", refererToken.substring(0, 20) + "...");
} else {
console.log("Bull Board API call - No token found in referer URL");
// If no token in referer and no session, return 401 with redirect info
if (!sessionId) {
console.log("Bull Board API call - No authentication available, returning 401");
return res.status(401).json({
error: "Authentication required",
message: "Please refresh the page to re-authenticate"
});
}
}
} catch (error) {
console.log("Bull Board API call - Error parsing referer URL:", error.message);
}
}
if (token) {
console.log("Bull Board API call - Token found, authenticating");
// Add token to headers for authentication
req.headers.authorization = `Bearer ${token}`;
// Authenticate the user
return authenticateToken(req, res, (err) => {
if (err) {
console.log("Bull Board API call - Token authentication failed");
return res.status(401).json({ error: "Authentication failed" });
}
return requireAdmin(req, res, (adminErr) => {
if (adminErr) {
console.log("Bull Board API call - Admin access required");
return res.status(403).json({ error: "Admin access required" });
}
console.log("Bull Board API call - Token authentication successful");
return next();
});
});
}
// No valid session or token for API calls, deny access
console.log("Bull Board API call - No valid session or token, denying access");
return res.status(401).json({ error: "Valid Bull Board session or token required" });
}
// Check for bull-board-session cookie first
const sessionId = req.cookies["bull-board-session"];
if (sessionId) {
@@ -676,9 +485,6 @@ if (false) {
if (!token && req.headers.authorization) {
token = req.headers.authorization.replace("Bearer ", "");
}
if (!token && req.cookies["bull-board-token"]) {
token = req.cookies["bull-board-token"];
}
// If no token, deny access
if (!token) {
@@ -707,23 +513,13 @@ if (false) {
userId: req.user.id,
});
// Set session cookie with proper configuration for domain access
const isHttps = process.env.NODE_ENV === "production" || process.env.SERVER_PROTOCOL === "https";
const cookieOptions = {
// Set session cookie
res.cookie("bull-board-session", newSessionId, {
httpOnly: true,
secure: isHttps,
secure: process.env.NODE_ENV === "production",
sameSite: "lax",
maxAge: 3600000, // 1 hour
path: "/", // Set path to root so it's available for all Bull Board requests
};
// Configure sameSite based on protocol and environment
if (isHttps) {
cookieOptions.sameSite = "none"; // Required for HTTPS cross-origin
} else {
cookieOptions.sameSite = "lax"; // Better for HTTP same-origin
}
res.cookie("bull-board-session", newSessionId, cookieOptions);
});
// Clean up old sessions periodically
if (bullBoardSessions.size > 100) {
@@ -739,114 +535,16 @@ if (false) {
});
});
});
*/
// Second middleware block - COMMENTED OUT - using simplified version above instead
/*
app.use(`/bullboard`, (req, res, next) => {
app.use(`/admin/queues`, (req, res, next) => {
if (bullBoardRouter) {
// If this is the main Bull Board page (not an API call), inject the token and create session
if (!req.path.includes("/api/") && !req.path.includes("/static/") && req.path === "/bullboard") {
const token = req.query.token;
console.log("Bull Board main page - Token:", token ? "present" : "missing");
console.log("Bull Board main page - Query params:", req.query);
console.log("Bull Board main page - Origin:", req.headers.origin || "missing");
console.log("Bull Board main page - Referer:", req.headers.referer || "missing");
console.log("Bull Board main page - Cookies:", req.cookies);
if (token) {
// Authenticate the user and create a session immediately on page load
req.headers.authorization = `Bearer ${token}`;
return authenticateToken(req, res, (err) => {
if (err) {
console.log("Bull Board main page - Token authentication failed");
return res.status(401).json({ error: "Authentication failed" });
}
return requireAdmin(req, res, (adminErr) => {
if (adminErr) {
console.log("Bull Board main page - Admin access required");
return res.status(403).json({ error: "Admin access required" });
}
console.log("Bull Board main page - Token authentication successful, creating session");
// Create a Bull Board session immediately
const newSessionId = require("node:crypto")
.randomBytes(32)
.toString("hex");
bullBoardSessions.set(newSessionId, {
timestamp: Date.now(),
userId: req.user.id,
});
// Set session cookie with proper configuration for domain access
const sessionCookieOptions = {
httpOnly: true,
secure: false, // Always false for HTTP
maxAge: 3600000, // 1 hour
path: "/", // Set path to root so it's available for all Bull Board requests
sameSite: "lax", // Always lax for HTTP
};
res.cookie("bull-board-session", newSessionId, sessionCookieOptions);
console.log("Bull Board main page - Session created:", newSessionId);
console.log("Bull Board main page - Cookie options:", sessionCookieOptions);
// Also set a token cookie for API calls as a fallback
const tokenCookieOptions = {
httpOnly: false, // Allow JavaScript to access it
secure: false, // Always false for HTTP
maxAge: 3600000, // 1 hour
path: "/", // Set path to root for broader compatibility
sameSite: "lax", // Always lax for HTTP
};
res.cookie("bull-board-token", token, tokenCookieOptions);
console.log("Bull Board main page - Token cookie also set for API fallback");
// Clean up old sessions periodically
if (bullBoardSessions.size > 100) {
const now = Date.now();
for (const [sid, session] of bullBoardSessions.entries()) {
if (now - session.timestamp > 3600000) {
bullBoardSessions.delete(sid);
}
}
}
// Now proceed to serve the Bull Board page
return bullBoardRouter(req, res, next);
});
});
} else {
console.log("Bull Board main page - No token provided, checking for existing session");
// Check if we have an existing session
const sessionId = req.cookies["bull-board-session"];
if (sessionId) {
const session = bullBoardSessions.get(sessionId);
if (session && Date.now() - session.timestamp < 3600000) {
console.log("Bull Board main page - Using existing session");
// Extend session
session.timestamp = Date.now();
return bullBoardRouter(req, res, next);
} else if (session) {
console.log("Bull Board main page - Session expired, removing");
bullBoardSessions.delete(sessionId);
}
}
console.log("Bull Board main page - No valid session, denying access");
return res.status(401).json({ error: "Access token required" });
}
}
return bullBoardRouter(req, res, next);
}
return res.status(503).json({ error: "Bull Board not initialized yet" });
});
*/
// Error handler specifically for Bull Board routes
app.use("/bullboard", (err, req, res, _next) => {
app.use("/admin/queues", (err, req, res, _next) => {
console.error("Bull Board error on", req.method, req.url);
console.error("Error details:", err.message);
console.error("Stack:", err.stack);
@@ -866,15 +564,6 @@ app.use((err, _req, res, _next) => {
if (process.env.ENABLE_LOGGING === "true") {
logger.error(err.stack);
}
// Special handling for CORS errors - always include the message
if (err.message?.includes("Not allowed by CORS")) {
return res.status(500).json({
error: "Something went wrong!",
message: err.message, // Always include CORS error message
});
}
res.status(500).json({
error: "Something went wrong!",
message: process.env.NODE_ENV === "development" ? err.message : undefined,
@@ -1178,7 +867,7 @@ async function startServer() {
// Set up Bull Board for queue monitoring
const serverAdapter = new ExpressAdapter();
// Set basePath to match where we mount the router
serverAdapter.setBasePath("/bullboard");
serverAdapter.setBasePath("/admin/queues");
const { QUEUE_NAMES } = require("./services/automation");
const bullAdapters = Object.values(QUEUE_NAMES).map(
@@ -1192,11 +881,10 @@ async function startServer() {
// Set the router for the Bull Board middleware (secured middleware above)
bullBoardRouter = serverAdapter.getRouter();
console.log("✅ Bull Board mounted at /bullboard (secured)");
console.log("✅ Bull Board mounted at /admin/queues (secured)");
// Initialize WS layer with the underlying HTTP server
initAgentWs(server, prisma);
await agentVersionService.initialize();
server.listen(PORT, () => {
if (process.env.ENABLE_LOGGING === "true") {

View File

@@ -1,743 +0,0 @@
const axios = require("axios");
const fs = require("node:fs").promises;
const path = require("node:path");
const { exec, spawn } = require("node:child_process");
const { promisify } = require("node:util");
const _execAsync = promisify(exec);
// Simple semver comparison function
function compareVersions(version1, version2) {
const v1parts = version1.split(".").map(Number);
const v2parts = version2.split(".").map(Number);
// Ensure both arrays have the same length
while (v1parts.length < 3) v1parts.push(0);
while (v2parts.length < 3) v2parts.push(0);
for (let i = 0; i < 3; i++) {
if (v1parts[i] > v2parts[i]) return 1;
if (v1parts[i] < v2parts[i]) return -1;
}
return 0;
}
const crypto = require("node:crypto");
class AgentVersionService {
constructor() {
this.githubApiUrl =
"https://api.github.com/repos/PatchMon/PatchMon-agent/releases";
this.agentsDir = path.resolve(__dirname, "../../../agents");
this.supportedArchitectures = [
"linux-amd64",
"linux-arm64",
"linux-386",
"linux-arm",
];
this.currentVersion = null;
this.latestVersion = null;
this.lastChecked = null;
this.checkInterval = 30 * 60 * 1000; // 30 minutes
}
async initialize() {
try {
// Ensure agents directory exists
await fs.mkdir(this.agentsDir, { recursive: true });
console.log("🔍 Testing GitHub API connectivity...");
try {
const testResponse = await axios.get(
"https://api.github.com/repos/PatchMon/PatchMon-agent/releases",
{
timeout: 5000,
headers: {
"User-Agent": "PatchMon-Server/1.0",
Accept: "application/vnd.github.v3+json",
},
},
);
console.log(
`✅ GitHub API accessible - found ${testResponse.data.length} releases`,
);
} catch (testError) {
console.error("❌ GitHub API not accessible:", testError.message);
if (testError.response) {
console.error(
"❌ Status:",
testError.response.status,
testError.response.statusText,
);
if (testError.response.status === 403) {
console.log("⚠️ GitHub API rate limit exceeded - will retry later");
}
}
}
// Get current agent version by executing the binary
await this.getCurrentAgentVersion();
// Try to check for updates, but don't fail initialization if GitHub API is unavailable
try {
await this.checkForUpdates();
} catch (updateError) {
console.log(
"⚠️ Failed to check for updates on startup, will retry later:",
updateError.message,
);
}
// Set up periodic checking
setInterval(() => {
this.checkForUpdates().catch((error) => {
console.log("⚠️ Periodic update check failed:", error.message);
});
}, this.checkInterval);
console.log("✅ Agent Version Service initialized");
} catch (error) {
console.error(
"❌ Failed to initialize Agent Version Service:",
error.message,
);
}
}
async getCurrentAgentVersion() {
try {
console.log("🔍 Getting current agent version...");
// Try to find the agent binary in agents/ folder only (what gets distributed)
const possiblePaths = [
path.join(this.agentsDir, "patchmon-agent-linux-amd64"),
path.join(this.agentsDir, "patchmon-agent"),
];
let agentPath = null;
for (const testPath of possiblePaths) {
try {
await fs.access(testPath);
agentPath = testPath;
console.log(`✅ Found agent binary at: ${testPath}`);
break;
} catch {
// Path doesn't exist, continue to next
}
}
if (!agentPath) {
console.log(
"⚠️ No agent binary found in agents/ folder, current version will be unknown",
);
console.log("💡 Use the Download Updates button to get agent binaries");
this.currentVersion = null;
return;
}
// Execute the agent binary with help flag to get version info
try {
const child = spawn(agentPath, ["--help"], {
timeout: 10000,
});
let stdout = "";
let stderr = "";
child.stdout.on("data", (data) => {
stdout += data.toString();
});
child.stderr.on("data", (data) => {
stderr += data.toString();
});
const result = await new Promise((resolve, reject) => {
child.on("close", (code) => {
resolve({ stdout, stderr, code });
});
child.on("error", reject);
});
if (result.stderr) {
console.log("⚠️ Agent help stderr:", result.stderr);
}
// Parse version from help output (e.g., "PatchMon Agent v1.3.0")
const versionMatch = result.stdout.match(
/PatchMon Agent v([0-9]+\.[0-9]+\.[0-9]+)/i,
);
if (versionMatch) {
this.currentVersion = versionMatch[1];
console.log(`✅ Current agent version: ${this.currentVersion}`);
} else {
console.log(
"⚠️ Could not parse version from agent help output:",
result.stdout,
);
this.currentVersion = null;
}
} catch (execError) {
console.error("❌ Failed to execute agent binary:", execError.message);
this.currentVersion = null;
}
} catch (error) {
console.error("❌ Failed to get current agent version:", error.message);
this.currentVersion = null;
}
}
async checkForUpdates() {
try {
console.log("🔍 Checking for agent updates...");
const response = await axios.get(this.githubApiUrl, {
timeout: 10000,
headers: {
"User-Agent": "PatchMon-Server/1.0",
Accept: "application/vnd.github.v3+json",
},
});
console.log(`📡 GitHub API response status: ${response.status}`);
console.log(`📦 Found ${response.data.length} releases`);
const releases = response.data;
if (releases.length === 0) {
console.log(" No releases found");
this.latestVersion = null;
this.lastChecked = new Date();
return {
latestVersion: null,
currentVersion: this.currentVersion,
hasUpdate: false,
lastChecked: this.lastChecked,
};
}
const latestRelease = releases[0];
this.latestVersion = latestRelease.tag_name.replace("v", ""); // Remove 'v' prefix
this.lastChecked = new Date();
console.log(`📦 Latest agent version: ${this.latestVersion}`);
// Don't download binaries automatically - only when explicitly requested
console.log(
" Skipping automatic binary download - binaries will be downloaded on demand",
);
return {
latestVersion: this.latestVersion,
currentVersion: this.currentVersion,
hasUpdate: this.currentVersion !== this.latestVersion,
lastChecked: this.lastChecked,
};
} catch (error) {
console.error("❌ Failed to check for updates:", error.message);
if (error.response) {
console.error(
"❌ GitHub API error:",
error.response.status,
error.response.statusText,
);
console.error(
"❌ Rate limit info:",
error.response.headers["x-ratelimit-remaining"],
"/",
error.response.headers["x-ratelimit-limit"],
);
}
throw error;
}
}
async downloadBinariesToAgentsFolder(release) {
try {
console.log(
`⬇️ Downloading binaries for version ${release.tag_name} to agents folder...`,
);
for (const arch of this.supportedArchitectures) {
const assetName = `patchmon-agent-${arch}`;
const asset = release.assets.find((a) => a.name === assetName);
if (!asset) {
console.warn(`⚠️ Binary not found for architecture: ${arch}`);
continue;
}
const binaryPath = path.join(this.agentsDir, assetName);
console.log(`⬇️ Downloading ${assetName}...`);
const response = await axios.get(asset.browser_download_url, {
responseType: "stream",
timeout: 60000,
});
const writer = require("node:fs").createWriteStream(binaryPath);
response.data.pipe(writer);
await new Promise((resolve, reject) => {
writer.on("finish", resolve);
writer.on("error", reject);
});
// Make executable
await fs.chmod(binaryPath, "755");
console.log(`✅ Downloaded: ${assetName} to agents folder`);
}
} catch (error) {
console.error(
"❌ Failed to download binaries to agents folder:",
error.message,
);
throw error;
}
}
async downloadBinaryForVersion(version, architecture) {
try {
console.log(
`⬇️ Downloading binary for version ${version} architecture ${architecture}...`,
);
// Get the release info from GitHub
const response = await axios.get(this.githubApiUrl, {
timeout: 10000,
headers: {
"User-Agent": "PatchMon-Server/1.0",
Accept: "application/vnd.github.v3+json",
},
});
const releases = response.data;
const release = releases.find(
(r) => r.tag_name.replace("v", "") === version,
);
if (!release) {
throw new Error(`Release ${version} not found`);
}
const assetName = `patchmon-agent-${architecture}`;
const asset = release.assets.find((a) => a.name === assetName);
if (!asset) {
throw new Error(`Binary not found for architecture: ${architecture}`);
}
const binaryPath = path.join(
this.agentBinariesDir,
`${release.tag_name}-${assetName}`,
);
console.log(`⬇️ Downloading ${assetName}...`);
const downloadResponse = await axios.get(asset.browser_download_url, {
responseType: "stream",
timeout: 60000,
});
const writer = require("node:fs").createWriteStream(binaryPath);
downloadResponse.data.pipe(writer);
await new Promise((resolve, reject) => {
writer.on("finish", resolve);
writer.on("error", reject);
});
// Make executable
await fs.chmod(binaryPath, "755");
console.log(`✅ Downloaded: ${assetName}`);
return binaryPath;
} catch (error) {
console.error(
`❌ Failed to download binary ${version}-${architecture}:`,
error.message,
);
throw error;
}
}
async getBinaryPath(version, architecture) {
const binaryName = `patchmon-agent-${architecture}`;
const binaryPath = path.join(this.agentsDir, binaryName);
try {
await fs.access(binaryPath);
return binaryPath;
} catch {
throw new Error(`Binary not found: ${binaryName} version ${version}`);
}
}
async serveBinary(version, architecture, res) {
try {
// Check if binary exists, if not download it
const binaryPath = await this.getBinaryPath(version, architecture);
const stats = await fs.stat(binaryPath);
res.setHeader("Content-Type", "application/octet-stream");
res.setHeader(
"Content-Disposition",
`attachment; filename="patchmon-agent-${architecture}"`,
);
res.setHeader("Content-Length", stats.size);
// Add cache headers
res.setHeader("Cache-Control", "public, max-age=3600");
res.setHeader("ETag", `"${version}-${architecture}"`);
const stream = require("node:fs").createReadStream(binaryPath);
stream.pipe(res);
} catch (_error) {
// Binary doesn't exist, try to download it
console.log(
`⬇️ Binary not found locally, attempting to download ${version}-${architecture}...`,
);
try {
await this.downloadBinaryForVersion(version, architecture);
// Retry serving the binary
const binaryPath = await this.getBinaryPath(version, architecture);
const stats = await fs.stat(binaryPath);
res.setHeader("Content-Type", "application/octet-stream");
res.setHeader(
"Content-Disposition",
`attachment; filename="patchmon-agent-${architecture}"`,
);
res.setHeader("Content-Length", stats.size);
res.setHeader("Cache-Control", "public, max-age=3600");
res.setHeader("ETag", `"${version}-${architecture}"`);
const stream = require("node:fs").createReadStream(binaryPath);
stream.pipe(res);
} catch (downloadError) {
console.error(
`❌ Failed to download binary ${version}-${architecture}:`,
downloadError.message,
);
res
.status(404)
.json({ error: "Binary not found and could not be downloaded" });
}
}
}
async getVersionInfo() {
let hasUpdate = false;
let updateStatus = "unknown";
let effectiveLatestVersion = this.currentVersion; // Always use local version if available
// If we have a local version, use it as the latest regardless of GitHub
if (this.currentVersion) {
effectiveLatestVersion = this.currentVersion;
console.log(
`🔄 Using local agent version ${this.currentVersion} as latest`,
);
} else if (this.latestVersion) {
// Fallback to GitHub version only if no local version
effectiveLatestVersion = this.latestVersion;
console.log(
`🔄 No local version found, using GitHub version ${this.latestVersion}`,
);
}
if (this.currentVersion && effectiveLatestVersion) {
const comparison = compareVersions(
this.currentVersion,
effectiveLatestVersion,
);
if (comparison < 0) {
hasUpdate = true;
updateStatus = "update-available";
} else if (comparison > 0) {
hasUpdate = false;
updateStatus = "newer-version";
} else {
hasUpdate = false;
updateStatus = "up-to-date";
}
} else if (effectiveLatestVersion && !this.currentVersion) {
hasUpdate = true;
updateStatus = "no-agent";
} else if (this.currentVersion && !effectiveLatestVersion) {
// We have a current version but no latest version (GitHub API unavailable)
hasUpdate = false;
updateStatus = "github-unavailable";
} else if (!this.currentVersion && !effectiveLatestVersion) {
updateStatus = "no-data";
}
return {
currentVersion: this.currentVersion,
latestVersion: effectiveLatestVersion,
hasUpdate: hasUpdate,
updateStatus: updateStatus,
lastChecked: this.lastChecked,
supportedArchitectures: this.supportedArchitectures,
status: effectiveLatestVersion ? "ready" : "no-releases",
};
}
async refreshCurrentVersion() {
await this.getCurrentAgentVersion();
return this.currentVersion;
}
async downloadLatestUpdate() {
try {
console.log("⬇️ Downloading latest agent update...");
// First check for updates to get the latest release info
const _updateInfo = await this.checkForUpdates();
if (!this.latestVersion) {
throw new Error("No latest version available to download");
}
// Get the release info from GitHub
const response = await axios.get(this.githubApiUrl, {
timeout: 10000,
headers: {
"User-Agent": "PatchMon-Server/1.0",
Accept: "application/vnd.github.v3+json",
},
});
const releases = response.data;
const latestRelease = releases[0];
if (!latestRelease) {
throw new Error("No releases found");
}
console.log(
`⬇️ Downloading binaries for version ${latestRelease.tag_name}...`,
);
// Download binaries for all architectures directly to agents folder
await this.downloadBinariesToAgentsFolder(latestRelease);
console.log("✅ Latest update downloaded successfully");
return {
success: true,
version: this.latestVersion,
downloadedArchitectures: this.supportedArchitectures,
message: `Successfully downloaded version ${this.latestVersion}`,
};
} catch (error) {
console.error("❌ Failed to download latest update:", error.message);
throw error;
}
}
async getAvailableVersions() {
// No local caching - only return latest from GitHub
if (this.latestVersion) {
return [this.latestVersion];
}
return [];
}
async getBinaryInfo(version, architecture) {
try {
// Always use local version if it matches the requested version
if (version === this.currentVersion && this.currentVersion) {
const binaryPath = await this.getBinaryPath(
this.currentVersion,
architecture,
);
const stats = await fs.stat(binaryPath);
// Calculate file hash
const fileBuffer = await fs.readFile(binaryPath);
const hash = crypto
.createHash("sha256")
.update(fileBuffer)
.digest("hex");
return {
version: this.currentVersion,
architecture,
size: stats.size,
hash,
lastModified: stats.mtime,
path: binaryPath,
};
}
// For other versions, try to find them in the agents folder
const binaryPath = await this.getBinaryPath(version, architecture);
const stats = await fs.stat(binaryPath);
// Calculate file hash
const fileBuffer = await fs.readFile(binaryPath);
const hash = crypto.createHash("sha256").update(fileBuffer).digest("hex");
return {
version,
architecture,
size: stats.size,
hash,
lastModified: stats.mtime,
path: binaryPath,
};
} catch (error) {
throw new Error(`Failed to get binary info: ${error.message}`);
}
}
/**
* Check if an agent needs an update and push notification if needed
* @param {string} agentApiId - The agent's API ID
* @param {string} agentVersion - The agent's current version
* @param {boolean} force - Force update regardless of version
* @returns {Object} Update check result
*/
async checkAndPushAgentUpdate(agentApiId, agentVersion, force = false) {
try {
console.log(
`🔍 Checking update for agent ${agentApiId} (version: ${agentVersion})`,
);
// Get current server version info
const versionInfo = await this.getVersionInfo();
if (!versionInfo.latestVersion) {
console.log(`⚠️ No latest version available for agent ${agentApiId}`);
return {
needsUpdate: false,
reason: "no-latest-version",
message: "No latest version available on server",
};
}
// Compare versions
const comparison = compareVersions(
agentVersion,
versionInfo.latestVersion,
);
const needsUpdate = force || comparison < 0;
if (needsUpdate) {
console.log(
`📤 Agent ${agentApiId} needs update: ${agentVersion}${versionInfo.latestVersion}`,
);
// Import agentWs service to push notification
const { pushUpdateNotification } = require("./agentWs");
const updateInfo = {
version: versionInfo.latestVersion,
force: force,
downloadUrl: `/api/v1/agent/binary/${versionInfo.latestVersion}/linux-amd64`,
message: force
? "Force update requested"
: `Update available: ${versionInfo.latestVersion}`,
};
const pushed = pushUpdateNotification(agentApiId, updateInfo);
if (pushed) {
console.log(`✅ Update notification pushed to agent ${agentApiId}`);
return {
needsUpdate: true,
reason: force ? "force-update" : "version-outdated",
message: `Update notification sent: ${agentVersion}${versionInfo.latestVersion}`,
targetVersion: versionInfo.latestVersion,
};
} else {
console.log(
`⚠️ Failed to push update notification to agent ${agentApiId} (not connected)`,
);
return {
needsUpdate: true,
reason: "agent-offline",
message: "Agent needs update but is not connected",
targetVersion: versionInfo.latestVersion,
};
}
} else {
console.log(`✅ Agent ${agentApiId} is up to date: ${agentVersion}`);
return {
needsUpdate: false,
reason: "up-to-date",
message: `Agent is up to date: ${agentVersion}`,
};
}
} catch (error) {
console.error(
`❌ Failed to check update for agent ${agentApiId}:`,
error.message,
);
return {
needsUpdate: false,
reason: "error",
message: `Error checking update: ${error.message}`,
};
}
}
/**
* Check and push updates to all connected agents
* @param {boolean} force - Force update regardless of version
* @returns {Object} Bulk update result
*/
async checkAndPushUpdatesToAll(force = false) {
try {
console.log(
`🔍 Checking updates for all connected agents (force: ${force})`,
);
// Import agentWs service to get connected agents
const { pushUpdateNotificationToAll } = require("./agentWs");
const versionInfo = await this.getVersionInfo();
if (!versionInfo.latestVersion) {
return {
success: false,
message: "No latest version available on server",
updatedAgents: 0,
totalAgents: 0,
};
}
const updateInfo = {
version: versionInfo.latestVersion,
force: force,
downloadUrl: `/api/v1/agent/binary/${versionInfo.latestVersion}/linux-amd64`,
message: force
? "Force update requested for all agents"
: `Update available: ${versionInfo.latestVersion}`,
};
const result = await pushUpdateNotificationToAll(updateInfo);
console.log(
`✅ Bulk update notification sent to ${result.notifiedCount} agents`,
);
return {
success: true,
message: `Update notifications sent to ${result.notifiedCount} agents`,
updatedAgents: result.notifiedCount,
totalAgents: result.totalAgents,
targetVersion: versionInfo.latestVersion,
};
} catch (error) {
console.error("❌ Failed to push updates to all agents:", error.message);
return {
success: false,
message: `Error pushing updates: ${error.message}`,
updatedAgents: 0,
totalAgents: 0,
};
}
}
}
module.exports = new AgentVersionService();

View File

@@ -26,37 +26,7 @@ function init(server, prismaClient) {
server.on("upgrade", async (request, socket, head) => {
try {
const { pathname } = url.parse(request.url);
if (!pathname) {
socket.destroy();
return;
}
// Handle Bull Board WebSocket connections
if (pathname.startsWith("/bullboard")) {
// For Bull Board, we need to check if the user is authenticated
// Check for session cookie or authorization header
const sessionCookie = request.headers.cookie?.match(
/bull-board-session=([^;]+)/,
)?.[1];
const authHeader = request.headers.authorization;
if (!sessionCookie && !authHeader) {
socket.destroy();
return;
}
// Accept the WebSocket connection for Bull Board
wss.handleUpgrade(request, socket, head, (ws) => {
ws.on("message", (message) => {
// Echo back for Bull Board WebSocket
ws.send(message);
});
});
return;
}
// Handle agent WebSocket connections
if (!pathname.startsWith("/api/")) {
if (!pathname || !pathname.startsWith("/api/")) {
socket.destroy();
return;
}
@@ -162,66 +132,6 @@ function pushSettingsUpdate(apiId, newInterval) {
);
}
function pushUpdateNotification(apiId, updateInfo) {
const ws = apiIdToSocket.get(apiId);
if (ws && ws.readyState === WebSocket.OPEN) {
safeSend(
ws,
JSON.stringify({
type: "update_notification",
version: updateInfo.version,
force: updateInfo.force || false,
downloadUrl: updateInfo.downloadUrl,
message: updateInfo.message,
}),
);
console.log(
`📤 Pushed update notification to agent ${apiId}: version ${updateInfo.version}`,
);
return true;
} else {
console.log(
`⚠️ Agent ${apiId} not connected, cannot push update notification`,
);
return false;
}
}
async function pushUpdateNotificationToAll(updateInfo) {
let notifiedCount = 0;
let failedCount = 0;
for (const [apiId, ws] of apiIdToSocket) {
if (ws && ws.readyState === WebSocket.OPEN) {
try {
safeSend(
ws,
JSON.stringify({
type: "update_notification",
version: updateInfo.version,
force: updateInfo.force || false,
message: updateInfo.message,
}),
);
notifiedCount++;
console.log(
`📤 Pushed update notification to agent ${apiId}: version ${updateInfo.version}`,
);
} catch (error) {
failedCount++;
console.error(`❌ Failed to notify agent ${apiId}:`, error.message);
}
} else {
failedCount++;
}
}
console.log(
`📤 Update notification sent to ${notifiedCount} agents, ${failedCount} failed`,
);
return { notifiedCount, failedCount };
}
// Notify all subscribers when connection status changes
function notifyConnectionChange(apiId, connected) {
const subscribers = connectionChangeSubscribers.get(apiId);
@@ -260,8 +170,6 @@ module.exports = {
broadcastSettingsUpdate,
pushReportNow,
pushSettingsUpdate,
pushUpdateNotification,
pushUpdateNotificationToAll,
// Expose read-only view of connected agents
getConnectedApiIds: () => Array.from(apiIdToSocket.keys()),
isConnected: (apiId) => {

View File

@@ -21,7 +21,7 @@ class GitHubUpdateCheck {
try {
// Get settings
const settings = await prisma.settings.findFirst();
const DEFAULT_GITHUB_REPO = "https://github.com/PatchMon/PatchMon.git";
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
let owner, repo;
@@ -52,7 +52,7 @@ class GitHubUpdateCheck {
}
// Read version from package.json
let currentVersion = "1.3.0"; // fallback
let currentVersion = "1.2.7"; // fallback
try {
const packageJson = require("../../../package.json");
if (packageJson?.version) {

View File

@@ -99,27 +99,16 @@ class QueueManager {
* Initialize all workers
*/
async initializeWorkers() {
// Optimized worker options to reduce Redis connections
const workerOptions = {
connection: redisConnection,
concurrency: 1, // Keep concurrency low to reduce connections
// Connection optimization
maxStalledCount: 1,
stalledInterval: 30000,
// Reduce connection churn
settings: {
stalledInterval: 30000,
maxStalledCount: 1,
},
};
// GitHub Update Check Worker
this.workers[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new Worker(
QUEUE_NAMES.GITHUB_UPDATE_CHECK,
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].process.bind(
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
),
workerOptions,
{
connection: redisConnection,
concurrency: 1,
},
);
// Session Cleanup Worker
@@ -128,7 +117,10 @@ class QueueManager {
this.automations[QUEUE_NAMES.SESSION_CLEANUP].process.bind(
this.automations[QUEUE_NAMES.SESSION_CLEANUP],
),
workerOptions,
{
connection: redisConnection,
concurrency: 1,
},
);
// Orphaned Repo Cleanup Worker
@@ -137,7 +129,10 @@ class QueueManager {
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].process.bind(
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
),
workerOptions,
{
connection: redisConnection,
concurrency: 1,
},
);
// Orphaned Package Cleanup Worker
@@ -146,33 +141,167 @@ class QueueManager {
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].process.bind(
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
),
workerOptions,
{
connection: redisConnection,
concurrency: 1,
},
);
// Agent Commands Worker
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
QUEUE_NAMES.AGENT_COMMANDS,
async (job) => {
const { api_id, type } = job.data;
console.log(`Processing agent command: ${type} for ${api_id}`);
const { api_id, type, update_interval } = job.data || {};
console.log("[agent-commands] processing job", job.id, api_id, type);
// Send command via WebSocket based on type
if (type === "report_now") {
agentWs.pushReportNow(api_id);
} else if (type === "settings_update") {
// For settings update, we need additional data
const { update_interval } = job.data;
agentWs.pushSettingsUpdate(api_id, update_interval);
} else {
console.error(`Unknown agent command type: ${type}`);
// Log job attempt to history - use job.id as the unique identifier
const attemptNumber = job.attemptsMade || 1;
const historyId = job.id; // Single row per job, updated with each attempt
try {
if (!api_id || !type) {
throw new Error("invalid job data");
}
// Find host by api_id
const host = await prisma.hosts.findUnique({
where: { api_id },
select: { id: true },
});
// Ensure agent is connected; if not, retry later
if (!agentWs.isConnected(api_id)) {
const error = new Error("agent not connected");
// Log failed attempt
await prisma.job_history.upsert({
where: { id: historyId },
create: {
id: historyId,
job_id: job.id,
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
job_name: type,
host_id: host?.id,
api_id,
status: "failed",
attempt_number: attemptNumber,
error_message: error.message,
created_at: new Date(),
updated_at: new Date(),
},
update: {
status: "failed",
attempt_number: attemptNumber,
error_message: error.message,
updated_at: new Date(),
},
});
console.log(
"[agent-commands] agent not connected, will retry",
api_id,
);
throw error;
}
// Process the command
let result;
if (type === "settings_update") {
agentWs.pushSettingsUpdate(api_id, update_interval);
console.log(
"[agent-commands] delivered settings_update",
api_id,
update_interval,
);
result = { delivered: true, update_interval };
} else if (type === "report_now") {
agentWs.pushReportNow(api_id);
console.log("[agent-commands] delivered report_now", api_id);
result = { delivered: true };
} else {
throw new Error("unsupported agent command");
}
// Log successful completion
await prisma.job_history.upsert({
where: { id: historyId },
create: {
id: historyId,
job_id: job.id,
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
job_name: type,
host_id: host?.id,
api_id,
status: "completed",
attempt_number: attemptNumber,
output: result,
created_at: new Date(),
updated_at: new Date(),
completed_at: new Date(),
},
update: {
status: "completed",
attempt_number: attemptNumber,
output: result,
error_message: null,
updated_at: new Date(),
completed_at: new Date(),
},
});
return result;
} catch (error) {
// Log error to history (if not already logged above)
if (error.message !== "agent not connected") {
const host = await prisma.hosts
.findUnique({
where: { api_id },
select: { id: true },
})
.catch(() => null);
await prisma.job_history
.upsert({
where: { id: historyId },
create: {
id: historyId,
job_id: job.id,
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
job_name: type || "unknown",
host_id: host?.id,
api_id,
status: "failed",
attempt_number: attemptNumber,
error_message: error.message,
created_at: new Date(),
updated_at: new Date(),
},
update: {
status: "failed",
attempt_number: attemptNumber,
error_message: error.message,
updated_at: new Date(),
},
})
.catch((err) =>
console.error("[agent-commands] failed to log error:", err),
);
}
throw error;
}
},
workerOptions,
{
connection: redisConnection,
concurrency: 10,
},
);
console.log(
"✅ All workers initialized with optimized connection settings",
);
// Add error handling for all workers
Object.values(this.workers).forEach((worker) => {
worker.on("error", (error) => {
console.error("Worker error:", error);
});
});
console.log("✅ All workers initialized");
}
/**

View File

@@ -1,5 +1,5 @@
const { getPrismaClient } = require("../../../config/prisma");
const { PrismaClient } = require("@prisma/client");
const prisma = getPrismaClient();
const prisma = new PrismaClient();
module.exports = { prisma };

View File

@@ -1,56 +1,16 @@
const IORedis = require("ioredis");
// Redis connection configuration with connection pooling
// Redis connection configuration
const redisConnection = {
host: process.env.REDIS_HOST || "localhost",
port: parseInt(process.env.REDIS_PORT, 10) || 6379,
password: process.env.REDIS_PASSWORD || undefined,
username: process.env.REDIS_USER || undefined,
db: parseInt(process.env.REDIS_DB, 10) || 0,
// Connection pooling settings
lazyConnect: true,
keepAlive: 30000,
connectTimeout: 30000, // Increased from 10s to 30s
commandTimeout: 30000, // Increased from 5s to 30s
enableReadyCheck: false,
// Reduce connection churn
family: 4, // Force IPv4
// Retry settings
retryDelayOnClusterDown: 300,
retryDelayOnFailover: 100,
maxRetriesPerRequest: null, // BullMQ requires this to be null
// Connection pool settings
maxLoadingTimeout: 30000,
};
// Create Redis connection with singleton pattern
let redisInstance = null;
// Create Redis connection
const redis = new IORedis(redisConnection);
function getRedisConnection() {
if (!redisInstance) {
redisInstance = new IORedis(redisConnection);
// Handle graceful shutdown
process.on("beforeExit", async () => {
await redisInstance.quit();
});
process.on("SIGINT", async () => {
await redisInstance.quit();
process.exit(0);
});
process.on("SIGTERM", async () => {
await redisInstance.quit();
process.exit(0);
});
}
return redisInstance;
}
module.exports = {
redis: getRedisConnection(),
redisConnection,
getRedisConnection,
};
module.exports = { redis, redisConnection };

View File

@@ -33,7 +33,7 @@ async function checkPublicRepo(owner, repo) {
try {
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
let currentVersion = "1.3.0"; // fallback
let currentVersion = "1.2.7"; // fallback
try {
const packageJson = require("../../../package.json");
if (packageJson?.version) {

View File

@@ -1,7 +1,7 @@
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const { v4: uuidv4 } = require("uuid");
const prisma = getPrismaClient();
const prisma = new PrismaClient();
// Cached settings instance
let cachedSettings = null;

View File

@@ -1,8 +1,8 @@
const jwt = require("jsonwebtoken");
const crypto = require("node:crypto");
const { getPrismaClient } = require("../config/prisma");
const { PrismaClient } = require("@prisma/client");
const prisma = getPrismaClient();
const prisma = new PrismaClient();
/**
* Session Manager - Handles secure session management with inactivity timeout

View File

@@ -39,15 +39,15 @@ These tags are available for both backend and frontend images as they are versio
environment:
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
```
4. Set a Redis password in the Redis service command where it says:
4. Set a Redis password in the Redis service where it says:
```yaml
command: redis-server --requirepass your-redis-password-here
environment:
REDIS_PASSWORD: # CREATE A STRONG REDIS PASSWORD AND PUT IT HERE
```
Note: The Redis service uses a hardcoded password in the command line for better reliability and to avoid environment variable parsing issues.
5. Update the corresponding `REDIS_PASSWORD` in the backend service where it says:
```yaml
environment:
REDIS_PASSWORD: your-redis-password-here
REDIS_PASSWORD: REPLACE_YOUR_REDIS_PASSWORD_HERE
```
6. Generate a strong JWT secret. You can do this like so:
```bash
@@ -123,9 +123,6 @@ When you do this, updating to a new version requires manually updating the image
| -------------- | ------------------ | ---------------- |
| `REDIS_PASSWORD` | Redis password | **MUST BE SET!** |
> [!NOTE]
> The Redis service uses a hardcoded password in the command line (`redis-server --requirepass your-password`) instead of environment variables or configuration files. This approach eliminates parsing issues and provides better reliability. The password must be set in both the Redis command and the backend service environment variables.
#### Backend Service
##### Database Configuration

View File

@@ -8,7 +8,7 @@ ENV NODE_ENV=development \
PM_LOG_TO_CONSOLE=true \
PORT=3001
RUN apk add --no-cache openssl tini curl libc6-compat
RUN apk add --no-cache openssl tini curl
USER node
@@ -64,7 +64,7 @@ ENV NODE_ENV=production \
JWT_REFRESH_EXPIRES_IN=7d \
SESSION_INACTIVITY_TIMEOUT_MINUTES=30
RUN apk add --no-cache openssl tini curl libc6-compat
RUN apk add --no-cache openssl tini curl
USER node

View File

@@ -1,3 +1 @@
**/env.example
**/.env
**/.env.*

View File

@@ -8,7 +8,7 @@ log() {
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" >&2
}
# Function to extract version from agent script (legacy)
# Function to extract version from agent script
get_agent_version() {
local file="$1"
if [ -f "$file" ]; then
@@ -18,32 +18,6 @@ get_agent_version() {
fi
}
# Function to get version from binary using --help flag
get_binary_version() {
local binary="$1"
if [ -f "$binary" ]; then
# Make sure binary is executable
chmod +x "$binary" 2>/dev/null || true
# Try to execute the binary and extract version from help output
# The Go binary shows version in the --help output as "PatchMon Agent v1.3.0"
local version=$("$binary" --help 2>&1 | grep -oE 'v[0-9]+\.[0-9]+\.[0-9]+' | head -n 1 | tr -d 'v')
if [ -n "$version" ]; then
echo "$version"
else
# Fallback: try --version flag
version=$("$binary" --version 2>&1 | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -n 1)
if [ -n "$version" ]; then
echo "$version"
else
echo "0.0.0"
fi
fi
else
echo "0.0.0"
fi
}
# Function to compare versions (returns 0 if $1 > $2)
version_greater() {
# Use sort -V for version comparison
@@ -54,8 +28,6 @@ version_greater() {
update_agents() {
local backup_agent="/app/agents_backup/patchmon-agent.sh"
local current_agent="/app/agents/patchmon-agent.sh"
local backup_binary="/app/agents_backup/patchmon-agent-linux-amd64"
local current_binary="/app/agents/patchmon-agent-linux-amd64"
# Check if agents directory exists
if [ ! -d "/app/agents" ]; then
@@ -69,72 +41,51 @@ update_agents() {
return 0
fi
# Get versions from both script and binary
local backup_script_version=$(get_agent_version "$backup_agent")
local current_script_version=$(get_agent_version "$current_agent")
local backup_binary_version=$(get_binary_version "$backup_binary")
local current_binary_version=$(get_binary_version "$current_binary")
# Get versions
local backup_version=$(get_agent_version "$backup_agent")
local current_version=$(get_agent_version "$current_agent")
log "Agent version check:"
log " Image script version: ${backup_script_version}"
log " Volume script version: ${current_script_version}"
log " Image binary version: ${backup_binary_version}"
log " Volume binary version: ${current_binary_version}"
log " Image version: ${backup_version}"
log " Volume version: ${current_version}"
# Determine if update is needed
local needs_update=0
# Case 1: No agents in volume at all (first time setup)
if [ -z "$(find /app/agents -maxdepth 1 -type f 2>/dev/null | head -n 1)" ]; then
# Case 1: No agents in volume (first time setup)
if [ -z "$(find /app/agents -maxdepth 1 -type f -name '*.sh' 2>/dev/null | head -n 1)" ]; then
log "Agents directory is empty - performing initial copy"
needs_update=1
# Case 2: Binary exists but backup binary is newer
elif [ "$current_binary_version" != "0.0.0" ] && version_greater "$backup_binary_version" "$current_binary_version"; then
log "Newer agent binary available (${backup_binary_version} > ${current_binary_version})"
needs_update=1
# Case 3: No binary in volume, but shell scripts exist (legacy setup) - copy binaries
elif [ "$current_binary_version" = "0.0.0" ] && [ "$backup_binary_version" != "0.0.0" ]; then
log "No binary found in volume but backup has binaries - performing update"
# Case 2: Backup version is newer
elif version_greater "$backup_version" "$current_version"; then
log "Newer agent version available (${backup_version} > ${current_version})"
needs_update=1
else
log "Agents are up to date (binary: ${current_binary_version})"
log "Agents are up to date"
needs_update=0
fi
# Perform update if needed
if [ $needs_update -eq 1 ]; then
log "Updating agents to version ${backup_binary_version}..."
log "Updating agents to version ${backup_version}..."
# Create backup of existing agents if they exist
if [ -f "$current_agent" ] || [ -f "$current_binary" ]; then
if [ -f "$current_agent" ]; then
local backup_timestamp=$(date +%Y%m%d_%H%M%S)
mkdir -p "/app/agents/backups"
# Backup shell script if it exists
if [ -f "$current_agent" ]; then
cp "$current_agent" "/app/agents/backups/patchmon-agent.sh.${backup_timestamp}" 2>/dev/null || true
log "Previous script backed up"
fi
# Backup binary if it exists
if [ -f "$current_binary" ]; then
cp "$current_binary" "/app/agents/backups/patchmon-agent-linux-amd64.${backup_timestamp}" 2>/dev/null || true
log "Previous binary backed up"
fi
local backup_name="/app/agents/patchmon-agent.sh.backup.${backup_timestamp}"
cp "$current_agent" "$backup_name" 2>/dev/null || true
log "Previous agent backed up to: $(basename $backup_name)"
fi
# Copy new agents (both scripts and binaries)
# Copy new agents
cp -r /app/agents_backup/* /app/agents/
# Make agent binaries executable
chmod +x /app/agents/patchmon-agent-linux-* 2>/dev/null || true
# Verify update
local new_binary_version=$(get_binary_version "$current_binary")
if [ "$new_binary_version" = "$backup_binary_version" ]; then
log "✅ Agents successfully updated to version ${new_binary_version}"
local new_version=$(get_agent_version "$current_agent")
if [ "$new_version" = "$backup_version" ]; then
log "✅ Agents successfully updated to version ${new_version}"
else
log "⚠️ Warning: Agent update may have failed (expected: ${backup_binary_version}, got: ${new_binary_version})"
log "⚠️ Warning: Agent update may have failed (expected: ${backup_version}, got: ${new_version})"
fi
fi
}

View File

@@ -50,13 +50,6 @@ services:
SERVER_HOST: localhost
SERVER_PORT: 3000
CORS_ORIGIN: http://localhost:3000
# Rate Limiting (times in milliseconds)
RATE_LIMIT_WINDOW_MS: 900000
RATE_LIMIT_MAX: 5000
AUTH_RATE_LIMIT_WINDOW_MS: 600000
AUTH_RATE_LIMIT_MAX: 500
AGENT_RATE_LIMIT_WINDOW_MS: 60000
AGENT_RATE_LIMIT_MAX: 1000
# Redis Configuration
REDIS_HOST: redis
REDIS_PORT: 6379

View File

@@ -1,19 +1,3 @@
# Change 3 Passwords in this file:
# Generate passwords with 'openssl rand -hex 64'
#
# 1. The database password in the environment variable POSTGRES_PASSWORD
# 2. The redis password in the command redis-server --requirepass your-redis-password-here
# 3. The jwt secret in the environment variable JWT_SECRET
#
#
# Change 2 URL areas in this file:
# 1. Setup your CORS_ORIGIN to what url you will use for accessing PatchMon frontend url
# 2. Setup your SERVER_PROTOCOL, SERVER_HOST and SERVER_PORT to what you will use for linux agents to access PatchMon
#
# This is generally the same as your CORS_ORIGIN url , in some cases it might be different - SERVER_* variables are used in the scripts for Server connection.
# You can also change this in the front-end but in the case of docker-compose - it is overwritten by the variables set here.
name: patchmon
services:
@@ -23,7 +7,7 @@ services:
environment:
POSTGRES_DB: patchmon_db
POSTGRES_USER: patchmon_user
POSTGRES_PASSWORD: # CREATE A STRONG DB PASSWORD AND PUT IT HERE
POSTGRES_PASSWORD: # CREATE A STRONG PASSWORD AND PUT IT HERE
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
@@ -35,11 +19,14 @@ services:
redis:
image: redis:7-alpine
restart: unless-stopped
command: redis-server --requirepass your-redis-password-here # CHANGE THIS TO YOUR REDIS PASSWORD
command: redis-server /usr/local/etc/redis/redis.conf
environment:
REDIS_PASSWORD: # CREATE A STRONG REDIS PASSWORD AND PUT IT HERE
volumes:
- redis_data:/data
- ./docker/redis.conf:/usr/local/etc/redis/redis.conf:ro
healthcheck:
test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "your-redis-password-here", "ping"] # CHANGE THIS TO YOUR REDIS PASSWORD
test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "${REDIS_PASSWORD}", "ping"]
interval: 3s
timeout: 5s
retries: 7
@@ -51,22 +38,15 @@ services:
environment:
LOG_LEVEL: info
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE - Generate with 'openssl rand -hex 64'
SERVER_PROTOCOL: http
SERVER_HOST: localhost
SERVER_PORT: 3000
CORS_ORIGIN: http://localhost:3000
# Rate Limiting (times in milliseconds)
RATE_LIMIT_WINDOW_MS: 900000
RATE_LIMIT_MAX: 5000
AUTH_RATE_LIMIT_WINDOW_MS: 600000
AUTH_RATE_LIMIT_MAX: 500
AGENT_RATE_LIMIT_WINDOW_MS: 60000
AGENT_RATE_LIMIT_MAX: 1000
# Redis Configuration
REDIS_HOST: redis
REDIS_PORT: 6379
REDIS_PASSWORD: your-redis-password-here
REDIS_PASSWORD: REPLACE_YOUR_REDIS_PASSWORD_HERE
REDIS_DB: 0
volumes:
- agent_files:/app/agents

View File

@@ -24,38 +24,6 @@ server {
add_header X-XSS-Protection "1; mode=block" always;
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
# Bull Board proxy - must come before the root location to avoid conflicts
location /bullboard {
proxy_pass http://${BACKEND_HOST}:${BACKEND_PORT};
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header Cookie $http_cookie; # Forward cookies to backend
proxy_cache_bypass $http_upgrade;
proxy_read_timeout 300s;
proxy_connect_timeout 75s;
# Enable cookie passthrough in both directions
proxy_pass_header Set-Cookie;
proxy_cookie_path / /;
# Preserve original client IP through proxy chain
proxy_set_header X-Original-Forwarded-For $http_x_forwarded_for;
# CORS headers for Bull Board - let backend handle CORS properly
# Note: Backend handles CORS with proper origin validation and credentials
# Handle preflight requests
if ($request_method = 'OPTIONS') {
return 204;
}
}
# Handle client-side routing
location / {
try_files $uri $uri/ /index.html;
@@ -70,19 +38,13 @@ server {
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
# For the Websocket connection:
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_cache_bypass $http_upgrade;
proxy_read_timeout 300s;
proxy_connect_timeout 75s;
# Preserve original client IP through proxy chain
proxy_set_header X-Original-Forwarded-For $http_x_forwarded_for;
# CORS headers for API calls - let backend handle CORS properly
# Note: Backend handles CORS with proper origin validation and credentials
# CORS headers for API calls
add_header Access-Control-Allow-Origin * always;
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
# Handle preflight requests
if ($request_method = 'OPTIONS') {
@@ -90,9 +52,66 @@ server {
}
}
# SSE (Server-Sent Events) specific configuration
location /api/v1/ws/status/ {
proxy_pass http://${BACKEND_HOST}:${BACKEND_PORT};
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
# Static assets caching (exclude Bull Board assets)
location ~* ^/(?!bullboard).*\.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
# Critical SSE settings
proxy_buffering off;
proxy_cache off;
proxy_set_header Connection '';
proxy_http_version 1.1;
chunked_transfer_encoding off;
# Timeout settings for long-lived connections
proxy_read_timeout 24h;
proxy_send_timeout 24h;
proxy_connect_timeout 60s;
# Disable nginx buffering for real-time streaming
proxy_request_buffering off;
proxy_max_temp_file_size 0;
# CORS headers for SSE
add_header Access-Control-Allow-Origin * always;
add_header Access-Control-Allow-Methods "GET, OPTIONS" always;
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
# Handle preflight requests
if ($request_method = 'OPTIONS') {
return 204;
}
}
# WebSocket upgrade handling
location /api/v1/agents/ws {
proxy_pass http://${BACKEND_HOST}:${BACKEND_PORT};
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
# WebSocket timeout settings
proxy_read_timeout 24h;
proxy_send_timeout 24h;
proxy_connect_timeout 60s;
# Disable buffering for WebSocket
proxy_buffering off;
proxy_cache off;
}
# Static assets caching
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
expires 1y;
add_header Cache-Control "public, immutable";
}

35
docker/redis.conf Normal file
View File

@@ -0,0 +1,35 @@
# Redis Configuration for PatchMon Production
# Security settings
requirepass ${REDIS_PASSWORD}
rename-command FLUSHDB ""
rename-command FLUSHALL ""
rename-command DEBUG ""
rename-command CONFIG "CONFIG_${REDIS_PASSWORD}"
# Memory management
maxmemory 256mb
maxmemory-policy allkeys-lru
# Persistence settings
save 900 1
save 300 10
save 60 10000
# Logging
loglevel notice
logfile ""
# Network security
bind 127.0.0.1
protected-mode yes
# Performance tuning
tcp-keepalive 300
timeout 0
# Disable dangerous commands
rename-command SHUTDOWN "SHUTDOWN_${REDIS_PASSWORD}"
rename-command KEYS ""
rename-command MONITOR ""
rename-command SLAVEOF ""
rename-command REPLICAOF ""

View File

@@ -1,10 +0,0 @@
# Frontend Environment Configuration
# This file is used by Vite during build and runtime
# API URL - Update this to match your backend server
VITE_API_URL=http://localhost:3001/api/v1
# Application Metadata
VITE_APP_NAME=PatchMon
VITE_APP_VERSION=1.3.0

View File

@@ -1,7 +1,7 @@
{
"name": "patchmon-frontend",
"private": true,
"version": "1.3.0",
"version": "1.2.9",
"license": "AGPL-3.0",
"type": "module",
"scripts": {
@@ -20,7 +20,7 @@
"clsx": "^2.1.1",
"cors": "^2.8.5",
"date-fns": "^4.1.0",
"express": "^4.21.2",
"express": "^5.0.0",
"http-proxy-middleware": "^3.0.3",
"lucide-react": "^0.468.0",
"react": "^18.3.1",

View File

@@ -2,7 +2,6 @@ import { AlertCircle, CheckCircle, Shield, UserPlus } from "lucide-react";
import { useId, useState } from "react";
import { useNavigate } from "react-router-dom";
import { useAuth } from "../contexts/AuthContext";
import { isCorsError } from "../utils/api";
const FirstTimeAdminSetup = () => {
const { login, setAuthState } = useAuth();
@@ -122,39 +121,11 @@ const FirstTimeAdminSetup = () => {
}, 2000);
}
} else {
// Handle HTTP error responses (like 500 CORS errors)
console.log("HTTP error response:", response.status, data);
// Check if this is a CORS error based on the response data
if (
data.message?.includes("Not allowed by CORS") ||
data.message?.includes("CORS") ||
data.error?.includes("CORS")
) {
setError(
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
);
} else {
setError(data.error || "Failed to create admin user");
}
setError(data.error || "Failed to create admin user");
}
} catch (error) {
console.error("Setup error:", error);
// Check for CORS/network errors first
if (isCorsError(error)) {
setError(
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
);
} else if (
error.name === "TypeError" &&
error.message?.includes("Failed to fetch")
) {
setError(
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
);
} else {
setError("Network error. Please try again.");
}
setError("Network error. Please try again.");
} finally {
setIsLoading(false);
}

View File

@@ -117,7 +117,7 @@ const Layout = ({ children }) => {
name: "Automation",
href: "/automation",
icon: RefreshCw,
new: true,
beta: true,
});
if (canViewReports()) {
@@ -440,11 +440,6 @@ const Layout = ({ children }) => {
Beta
</span>
)}
{subItem.new && (
<span className="text-xs bg-green-100 dark:bg-green-900 text-green-600 dark:text-green-200 px-1.5 py-0.5 rounded font-medium">
New
</span>
)}
</span>
</Link>
)}
@@ -721,11 +716,6 @@ const Layout = ({ children }) => {
Beta
</span>
)}
{subItem.new && (
<span className="text-xs bg-green-100 dark:bg-green-900 text-green-600 dark:text-green-200 px-1.5 py-0.5 rounded font-medium">
New
</span>
)}
{subItem.showUpgradeIcon && (
<UpgradeNotificationIcon className="h-3 w-3" />
)}

View File

@@ -1,453 +1,388 @@
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import {
AlertCircle,
CheckCircle,
Clock,
Download,
ExternalLink,
RefreshCw,
X,
} from "lucide-react";
import { useEffect, useState } from "react";
import api from "../../utils/api";
import { useMutation, useQuery } from "@tanstack/react-query";
import { AlertCircle, Code, Download, Plus, Shield, X } from "lucide-react";
import { useId, useState } from "react";
import { agentFileAPI, settingsAPI } from "../../utils/api";
const AgentManagementTab = () => {
const _queryClient = useQueryClient();
const [toast, setToast] = useState(null);
const scriptFileId = useId();
const scriptContentId = useId();
const [showUploadModal, setShowUploadModal] = useState(false);
// Auto-hide toast after 5 seconds
useEffect(() => {
if (toast) {
const timer = setTimeout(() => {
setToast(null);
}, 5000);
return () => clearTimeout(timer);
}
}, [toast]);
// Agent file queries and mutations
const {
data: agentFileInfo,
isLoading: agentFileLoading,
error: agentFileError,
refetch: refetchAgentFile,
} = useQuery({
queryKey: ["agentFile"],
queryFn: () => agentFileAPI.getInfo().then((res) => res.data),
});
const showToast = (message, type = "success") => {
setToast({ message, type });
// Fetch settings for dynamic curl flags
const { data: settings } = useQuery({
queryKey: ["settings"],
queryFn: () => settingsAPI.get().then((res) => res.data),
});
// Helper function to get curl flags based on settings
const _getCurlFlags = () => {
return settings?.ignore_ssl_self_signed ? "-sk" : "-s";
};
// Agent version queries
const {
data: versionInfo,
isLoading: versionLoading,
error: versionError,
refetch: refetchVersion,
} = useQuery({
queryKey: ["agentVersion"],
queryFn: async () => {
try {
const response = await api.get("/agent/version");
console.log("🔍 Frontend received version info:", response.data);
return response.data;
} catch (error) {
console.error("Failed to fetch version info:", error);
throw error;
}
},
refetchInterval: 5 * 60 * 1000, // Refetch every 5 minutes
enabled: true, // Always enabled
retry: 3, // Retry failed requests
});
const {
data: _availableVersions,
isLoading: _versionsLoading,
error: _versionsError,
} = useQuery({
queryKey: ["agentVersions"],
queryFn: async () => {
try {
const response = await api.get("/agent/versions");
console.log("🔍 Frontend received available versions:", response.data);
return response.data;
} catch (error) {
console.error("Failed to fetch available versions:", error);
throw error;
}
},
enabled: true,
retry: 3,
});
const checkUpdatesMutation = useMutation({
mutationFn: async () => {
// First check GitHub for updates
await api.post("/agent/version/check");
// Then refresh current agent version detection
await api.post("/agent/version/refresh");
},
const uploadAgentMutation = useMutation({
mutationFn: (scriptContent) =>
agentFileAPI.upload(scriptContent).then((res) => res.data),
onSuccess: () => {
refetchVersion();
showToast("Successfully checked for updates", "success");
refetchAgentFile();
setShowUploadModal(false);
},
onError: (error) => {
console.error("Check updates error:", error);
showToast(`Failed to check for updates: ${error.message}`, "error");
console.error("Upload agent error:", error);
},
});
const downloadUpdateMutation = useMutation({
mutationFn: async () => {
// Download the latest binaries
const downloadResult = await api.post("/agent/version/download");
// Refresh current agent version detection after download
await api.post("/agent/version/refresh");
// Return the download result for success handling
return downloadResult;
},
onSuccess: (data) => {
console.log("Download completed:", data);
console.log("Download response data:", data.data);
refetchVersion();
// Show success message
const message =
data.data?.message || "Agent binaries downloaded successfully";
showToast(message, "success");
},
onError: (error) => {
console.error("Download update error:", error);
showToast(`Download failed: ${error.message}`, "error");
},
});
const getVersionStatus = () => {
console.log("🔍 getVersionStatus called with:", {
versionError,
versionInfo,
versionLoading,
});
if (versionError) {
console.log("❌ Version error detected:", versionError);
return {
status: "error",
message: "Failed to load version info",
Icon: AlertCircle,
color: "text-red-600",
};
}
if (!versionInfo || versionLoading) {
console.log("⏳ Loading state:", { versionInfo, versionLoading });
return {
status: "loading",
message: "Loading version info...",
Icon: RefreshCw,
color: "text-gray-600",
};
}
// Use the backend's updateStatus for proper semver comparison
switch (versionInfo.updateStatus) {
case "update-available":
return {
status: "update-available",
message: `Update available: ${versionInfo.latestVersion}`,
Icon: Clock,
color: "text-yellow-600",
};
case "newer-version":
return {
status: "newer-version",
message: `Newer version running: ${versionInfo.currentVersion}`,
Icon: CheckCircle,
color: "text-blue-600",
};
case "up-to-date":
return {
status: "up-to-date",
message: `Up to date: ${versionInfo.latestVersion}`,
Icon: CheckCircle,
color: "text-green-600",
};
case "no-agent":
return {
status: "no-agent",
message: "No agent binary found",
Icon: AlertCircle,
color: "text-orange-600",
};
case "github-unavailable":
return {
status: "github-unavailable",
message: `Agent running: ${versionInfo.currentVersion} (GitHub API unavailable)`,
Icon: CheckCircle,
color: "text-purple-600",
};
case "no-data":
return {
status: "no-data",
message: "No version data available",
Icon: AlertCircle,
color: "text-gray-600",
};
default:
return {
status: "unknown",
message: "Version status unknown",
Icon: AlertCircle,
color: "text-gray-600",
};
}
};
const versionStatus = getVersionStatus();
const StatusIcon = versionStatus.Icon;
return (
<div className="space-y-6">
{/* Toast Notification */}
{toast && (
<div
className={`fixed top-4 right-4 z-50 max-w-md rounded-lg shadow-lg border-2 p-4 flex items-start space-x-3 animate-in slide-in-from-top-5 ${
toast.type === "success"
? "bg-green-50 dark:bg-green-900/90 border-green-500 dark:border-green-600"
: "bg-red-50 dark:bg-red-900/90 border-red-500 dark:border-red-600"
}`}
>
<div
className={`flex-shrink-0 rounded-full p-1 ${
toast.type === "success"
? "bg-green-100 dark:bg-green-800"
: "bg-red-100 dark:bg-red-800"
}`}
>
{toast.type === "success" ? (
<CheckCircle className="h-5 w-5 text-green-600 dark:text-green-400" />
) : (
<AlertCircle className="h-5 w-5 text-red-600 dark:text-red-400" />
)}
</div>
<div className="flex-1">
<p
className={`text-sm font-medium ${
toast.type === "success"
? "text-green-800 dark:text-green-100"
: "text-red-800 dark:text-red-100"
}`}
>
{toast.message}
</p>
{/* Header */}
<div className="flex items-center justify-between mb-6">
<div>
<div className="flex items-center mb-2">
<Code className="h-6 w-6 text-primary-600 mr-3" />
<h2 className="text-xl font-semibold text-secondary-900 dark:text-white">
Agent File Management
</h2>
</div>
<p className="text-sm text-secondary-500 dark:text-secondary-300">
Manage the PatchMon agent script file used for installations and
updates
</p>
</div>
<div className="flex items-center gap-2">
<button
type="button"
onClick={() => setToast(null)}
className={`flex-shrink-0 rounded-lg p-1 transition-colors ${
toast.type === "success"
? "hover:bg-green-100 dark:hover:bg-green-800 text-green-600 dark:text-green-400"
: "hover:bg-red-100 dark:hover:bg-red-800 text-red-600 dark:text-red-400"
}`}
onClick={() => {
const url = "/api/v1/hosts/agent/download";
const link = document.createElement("a");
link.href = url;
link.download = "patchmon-agent.sh";
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
}}
className="btn-outline flex items-center gap-2"
>
<X className="h-4 w-4" />
<Download className="h-4 w-4" />
Download
</button>
<button
type="button"
onClick={() => setShowUploadModal(true)}
className="btn-primary flex items-center gap-2"
>
<Plus className="h-4 w-4" />
Replace Script
</button>
</div>
</div>
{/* Content */}
{agentFileLoading ? (
<div className="flex items-center justify-center py-8">
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-primary-600"></div>
</div>
) : agentFileError ? (
<div className="text-center py-8">
<p className="text-red-600 dark:text-red-400">
Error loading agent file: {agentFileError.message}
</p>
</div>
) : !agentFileInfo?.exists ? (
<div className="text-center py-8">
<Code className="h-12 w-12 text-secondary-400 mx-auto mb-4" />
<p className="text-secondary-500 dark:text-secondary-300">
No agent script found
</p>
<p className="text-sm text-secondary-400 dark:text-secondary-400 mt-2">
Upload an agent script to get started
</p>
</div>
) : (
<div className="space-y-6">
{/* Agent File Info */}
<div className="bg-secondary-50 dark:bg-secondary-700 rounded-lg p-6">
<h3 className="text-lg font-medium text-secondary-900 dark:text-white mb-4">
Current Agent Script
</h3>
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
<div className="flex items-center gap-2">
<Code className="h-4 w-4 text-blue-600 dark:text-blue-400" />
<span className="text-sm font-medium text-secondary-700 dark:text-secondary-300">
Version:
</span>
<span className="text-sm text-secondary-900 dark:text-white font-mono">
{agentFileInfo.version}
</span>
</div>
<div className="flex items-center gap-2">
<Download className="h-4 w-4 text-green-600 dark:text-green-400" />
<span className="text-sm font-medium text-secondary-700 dark:text-secondary-300">
Size:
</span>
<span className="text-sm text-secondary-900 dark:text-white">
{agentFileInfo.sizeFormatted}
</span>
</div>
<div className="flex items-center gap-2">
<Code className="h-4 w-4 text-yellow-600 dark:text-yellow-400" />
<span className="text-sm font-medium text-secondary-700 dark:text-secondary-300">
Modified:
</span>
<span className="text-sm text-secondary-900 dark:text-white">
{new Date(agentFileInfo.lastModified).toLocaleDateString()}
</span>
</div>
</div>
</div>
{/* Usage Instructions */}
<div className="bg-blue-50 dark:bg-blue-900 border border-blue-200 dark:border-blue-700 rounded-md p-4">
<div className="flex">
<Shield className="h-5 w-5 text-blue-400 dark:text-blue-300" />
<div className="ml-3">
<h3 className="text-sm font-medium text-blue-800 dark:text-blue-200">
Agent Script Usage
</h3>
<div className="mt-2 text-sm text-blue-700 dark:text-blue-300">
<p className="mb-2">This script is used for:</p>
<ul className="list-disc list-inside space-y-1">
<li>New agent installations via the install script</li>
<li>
Agent downloads from the /api/v1/hosts/agent/download
endpoint
</li>
<li>Manual agent deployments and updates</li>
</ul>
</div>
</div>
</div>
</div>
{/* Uninstall Instructions */}
<div className="bg-red-50 dark:bg-red-900 border border-red-200 dark:border-red-700 rounded-md p-4">
<div className="flex">
<Shield className="h-5 w-5 text-red-400 dark:text-red-300" />
<div className="ml-3">
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
Agent Uninstall Command
</h3>
<div className="mt-2 text-sm text-red-700 dark:text-red-300">
<p className="mb-3">
To completely remove PatchMon from a host:
</p>
{/* Go Agent Uninstall */}
<div className="mb-3">
<div className="space-y-2">
<div className="flex items-center gap-2">
<div className="bg-red-100 dark:bg-red-800 rounded p-2 font-mono text-xs flex-1">
sudo patchmon-agent uninstall
</div>
<button
type="button"
onClick={() => {
navigator.clipboard.writeText(
"sudo patchmon-agent uninstall",
);
}}
className="px-2 py-1 bg-red-200 dark:bg-red-700 text-red-800 dark:text-red-200 rounded text-xs hover:bg-red-300 dark:hover:bg-red-600 transition-colors"
>
Copy
</button>
</div>
<div className="text-xs text-red-600 dark:text-red-400">
Options: <code>--remove-config</code>,{" "}
<code>--remove-logs</code>, <code>--remove-all</code>,{" "}
<code>--force</code>
</div>
</div>
</div>
<p className="mt-2 text-xs">
This command will remove all PatchMon files,
configuration, and crontab entries
</p>
</div>
</div>
</div>
</div>
</div>
)}
{/* Header */}
<div className="mb-6">
<h2 className="text-2xl font-bold text-secondary-900 dark:text-white mb-2">
Agent Version Management
</h2>
<p className="text-secondary-600 dark:text-secondary-400">
Monitor and manage agent versions across your infrastructure
</p>
</div>
{/* Agent Upload Modal */}
{showUploadModal && (
<AgentUploadModal
isOpen={showUploadModal}
onClose={() => setShowUploadModal(false)}
onSubmit={uploadAgentMutation.mutate}
isLoading={uploadAgentMutation.isPending}
error={uploadAgentMutation.error}
scriptFileId={scriptFileId}
scriptContentId={scriptContentId}
/>
)}
</div>
);
};
{/* Status Banner */}
<div
className={`rounded-xl shadow-sm p-6 border-2 ${
versionStatus.status === "up-to-date"
? "bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800"
: versionStatus.status === "update-available"
? "bg-yellow-50 dark:bg-yellow-900/20 border-yellow-200 dark:border-yellow-800"
: versionStatus.status === "no-agent"
? "bg-orange-50 dark:bg-orange-900/20 border-orange-200 dark:border-orange-800"
: "bg-white dark:bg-secondary-800 border-secondary-200 dark:border-secondary-600"
}`}
>
<div className="flex items-start justify-between">
<div className="flex items-start space-x-4">
<div
className={`p-3 rounded-lg ${
versionStatus.status === "up-to-date"
? "bg-green-100 dark:bg-green-800"
: versionStatus.status === "update-available"
? "bg-yellow-100 dark:bg-yellow-800"
: versionStatus.status === "no-agent"
? "bg-orange-100 dark:bg-orange-800"
: "bg-secondary-100 dark:bg-secondary-700"
}`}
// Agent Upload Modal Component
const AgentUploadModal = ({
isOpen,
onClose,
onSubmit,
isLoading,
error,
scriptFileId,
scriptContentId,
}) => {
const [scriptContent, setScriptContent] = useState("");
const [uploadError, setUploadError] = useState("");
const handleSubmit = (e) => {
e.preventDefault();
setUploadError("");
if (!scriptContent.trim()) {
setUploadError("Script content is required");
return;
}
if (!scriptContent.trim().startsWith("#!/")) {
setUploadError(
"Script must start with a shebang (#!/bin/bash or #!/bin/sh)",
);
return;
}
onSubmit(scriptContent);
};
const handleFileUpload = (e) => {
const file = e.target.files[0];
if (file) {
const reader = new FileReader();
reader.onload = (event) => {
setScriptContent(event.target.result);
setUploadError("");
};
reader.readAsText(file);
}
};
if (!isOpen) return null;
return (
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
<div className="bg-white dark:bg-secondary-800 rounded-lg shadow-xl max-w-4xl w-full mx-4 max-h-[90vh] overflow-y-auto">
<div className="px-6 py-4 border-b border-secondary-200 dark:border-secondary-600">
<div className="flex items-center justify-between">
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
Replace Agent Script
</h3>
<button
type="button"
onClick={onClose}
className="text-secondary-400 hover:text-secondary-600 dark:text-secondary-500 dark:hover:text-secondary-300"
>
{StatusIcon && (
<StatusIcon className={`h-6 w-6 ${versionStatus.color}`} />
)}
</div>
<X className="h-5 w-5" />
</button>
</div>
</div>
<form onSubmit={handleSubmit} className="px-6 py-4">
<div className="space-y-4">
<div>
<h3 className="text-lg font-semibold text-secondary-900 dark:text-white mb-1">
{versionStatus.message}
</h3>
<p className="text-sm text-secondary-600 dark:text-secondary-400">
{versionStatus.status === "up-to-date" &&
"All agent binaries are current"}
{versionStatus.status === "update-available" &&
"A newer version is available for download"}
{versionStatus.status === "no-agent" &&
"Download agent binaries to get started"}
{versionStatus.status === "github-unavailable" &&
"Cannot check for updates at this time"}
{![
"up-to-date",
"update-available",
"no-agent",
"github-unavailable",
].includes(versionStatus.status) &&
"Version information unavailable"}
<label
htmlFor={scriptFileId}
className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-2"
>
Upload Script File
</label>
<input
id={scriptFileId}
type="file"
accept=".sh"
onChange={handleFileUpload}
className="block w-full text-sm text-secondary-500 dark:text-secondary-400 file:mr-4 file:py-2 file:px-4 file:rounded-md file:border-0 file:text-sm file:font-medium file:bg-primary-50 file:text-primary-700 hover:file:bg-primary-100 dark:file:bg-primary-900 dark:file:text-primary-200"
/>
<p className="mt-1 text-xs text-secondary-500 dark:text-secondary-400">
Select a .sh file to upload, or paste the script content below
</p>
</div>
</div>
<button
type="button"
onClick={() => checkUpdatesMutation.mutate()}
disabled={checkUpdatesMutation.isPending}
className="flex items-center px-4 py-2 bg-white dark:bg-secondary-700 text-secondary-700 dark:text-secondary-200 rounded-lg hover:bg-secondary-50 dark:hover:bg-secondary-600 border border-secondary-300 dark:border-secondary-600 disabled:opacity-50 disabled:cursor-not-allowed transition-all duration-200 shadow-sm hover:shadow"
>
<RefreshCw
className={`h-4 w-4 mr-2 ${checkUpdatesMutation.isPending ? "animate-spin" : ""}`}
/>
{checkUpdatesMutation.isPending
? "Checking..."
: "Check for Updates"}
</button>
</div>
</div>
{/* Version Information Grid */}
<div className="grid grid-cols-1 md:grid-cols-3 gap-6">
{/* Current Version Card */}
<div className="bg-white dark:bg-secondary-800 rounded-xl shadow-sm p-6 border border-secondary-200 dark:border-secondary-600 hover:shadow-md transition-shadow duration-200">
<h4 className="text-sm font-medium text-secondary-500 dark:text-secondary-400 mb-2">
Current Version
</h4>
<p className="text-2xl font-bold text-secondary-900 dark:text-white">
{versionInfo?.currentVersion || (
<span className="text-lg text-secondary-400 dark:text-secondary-500">
Not detected
</span>
)}
</p>
</div>
{/* Latest Version Card */}
<div className="bg-white dark:bg-secondary-800 rounded-xl shadow-sm p-6 border border-secondary-200 dark:border-secondary-600 hover:shadow-md transition-shadow duration-200">
<h4 className="text-sm font-medium text-secondary-500 dark:text-secondary-400 mb-2">
Latest Available
</h4>
<p className="text-2xl font-bold text-secondary-900 dark:text-white">
{versionInfo?.latestVersion || (
<span className="text-lg text-secondary-400 dark:text-secondary-500">
Unknown
</span>
)}
</p>
</div>
{/* Last Checked Card */}
<div className="bg-white dark:bg-secondary-800 rounded-xl shadow-sm p-6 border border-secondary-200 dark:border-secondary-600 hover:shadow-md transition-shadow duration-200">
<h4 className="text-sm font-medium text-secondary-500 dark:text-secondary-400 mb-2">
Last Checked
</h4>
<p className="text-lg font-semibold text-secondary-900 dark:text-white">
{versionInfo?.lastChecked
? new Date(versionInfo.lastChecked).toLocaleString("en-US", {
month: "short",
day: "numeric",
hour: "2-digit",
minute: "2-digit",
})
: "Never"}
</p>
</div>
</div>
{/* Download Updates Section */}
<div className="bg-gradient-to-br from-primary-50 to-blue-50 dark:from-secondary-800 dark:to-secondary-800 rounded-xl shadow-sm p-8 border border-primary-200 dark:border-secondary-600">
<div className="flex items-center justify-between">
<div className="flex-1">
<h3 className="text-xl font-bold text-secondary-900 dark:text-white mb-3">
{!versionInfo?.currentVersion
? "Get Started with Agent Binaries"
: versionStatus.status === "update-available"
? "New Agent Version Available"
: "Agent Binaries"}
</h3>
<p className="text-secondary-700 dark:text-secondary-300 mb-4">
{!versionInfo?.currentVersion
? "No agent binaries detected. Download from GitHub to begin managing your agents."
: versionStatus.status === "update-available"
? `A new agent version (${versionInfo.latestVersion}) is available. Download the latest binaries from GitHub.`
: "Download or redownload agent binaries from GitHub."}
</p>
<div className="flex items-center space-x-4">
<button
type="button"
onClick={() => downloadUpdateMutation.mutate()}
disabled={downloadUpdateMutation.isPending}
className="flex items-center px-6 py-3 bg-primary-600 text-white rounded-lg hover:bg-primary-700 disabled:opacity-50 disabled:cursor-not-allowed transition-all duration-200 shadow-md hover:shadow-lg font-medium"
<div>
<label
htmlFor={scriptContentId}
className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-2"
>
{downloadUpdateMutation.isPending ? (
<>
<RefreshCw className="h-5 w-5 mr-2 animate-spin" />
Downloading...
</>
) : (
<>
<Download className="h-5 w-5 mr-2" />
{!versionInfo?.currentVersion
? "Download Binaries"
: versionStatus.status === "update-available"
? "Download New Agent Version"
: "Redownload Binaries"}
</>
)}
</button>
<a
href="https://github.com/PatchMon/PatchMon-agent/releases"
target="_blank"
rel="noopener noreferrer"
className="flex items-center px-4 py-3 text-secondary-700 dark:text-secondary-300 hover:text-primary-600 dark:hover:text-primary-400 transition-colors duration-200 font-medium"
>
<ExternalLink className="h-4 w-4 mr-2" />
View on GitHub
</a>
Script Content *
</label>
<textarea
id={scriptContentId}
value={scriptContent}
onChange={(e) => {
setScriptContent(e.target.value);
setUploadError("");
}}
rows={15}
className="block w-full border border-secondary-300 dark:border-secondary-600 rounded-md shadow-sm focus:ring-primary-500 focus:border-primary-500 bg-white dark:bg-secondary-700 text-secondary-900 dark:text-white font-mono text-sm"
placeholder="#!/bin/bash&#10;&#10;# PatchMon Agent Script&#10;VERSION=&quot;1.0.0&quot;&#10;&#10;# Your script content here..."
/>
</div>
</div>
</div>
</div>
{/* Supported Architectures */}
{versionInfo?.supportedArchitectures &&
versionInfo.supportedArchitectures.length > 0 && (
<div className="bg-white dark:bg-secondary-800 rounded-xl shadow-sm p-6 border border-secondary-200 dark:border-secondary-600">
<h4 className="text-lg font-semibold text-secondary-900 dark:text-white mb-4">
Supported Architectures
</h4>
<div className="grid grid-cols-2 md:grid-cols-4 gap-3">
{versionInfo.supportedArchitectures.map((arch) => (
<div
key={arch}
className="flex items-center justify-center px-4 py-3 bg-secondary-50 dark:bg-secondary-700 rounded-lg border border-secondary-200 dark:border-secondary-600"
>
<code className="text-sm font-mono text-secondary-700 dark:text-secondary-300">
{arch}
</code>
{(uploadError || error) && (
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-md p-3">
<p className="text-sm text-red-800 dark:text-red-200">
{uploadError ||
error?.response?.data?.error ||
error?.message}
</p>
</div>
)}
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-md p-3">
<div className="flex">
<AlertCircle className="h-4 w-4 text-yellow-600 dark:text-yellow-400 mr-2 mt-0.5" />
<div className="text-sm text-yellow-800 dark:text-yellow-200">
<p className="font-medium">Important:</p>
<ul className="mt-1 list-disc list-inside space-y-1">
<li>This will replace the current agent script file</li>
<li>A backup will be created automatically</li>
<li>All new installations will use this script</li>
<li>
Existing agents will download this version on their next
update
</li>
</ul>
</div>
))}
</div>
</div>
</div>
)}
<div className="flex justify-end gap-3 mt-6">
<button type="button" onClick={onClose} className="btn-outline">
Cancel
</button>
<button
type="submit"
disabled={isLoading || !scriptContent.trim()}
className="btn-primary"
>
{isLoading ? "Uploading..." : "Replace Script"}
</button>
</div>
</form>
</div>
</div>
);
};

View File

@@ -7,7 +7,6 @@ import {
} from "react";
import { flushSync } from "react-dom";
import { AUTH_PHASES, isAuthPhase } from "../constants/authPhases";
import { isCorsError } from "../utils/api";
const AuthContext = createContext();
@@ -121,50 +120,9 @@ export const AuthProvider = ({ children }) => {
return { success: true };
} else {
// Handle HTTP error responses (like 500 CORS errors)
console.log("HTTP error response:", response.status, data);
// Check if this is a CORS error based on the response data
if (
data.message?.includes("Not allowed by CORS") ||
data.message?.includes("CORS") ||
data.error?.includes("CORS")
) {
return {
success: false,
error:
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
};
}
return { success: false, error: data.error || "Login failed" };
}
} catch (error) {
console.log("Login error:", error);
console.log("Error response:", error.response);
console.log("Error message:", error.message);
// Check for CORS/network errors first
if (isCorsError(error)) {
return {
success: false,
error:
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
};
}
// Check for other network errors
if (
error.name === "TypeError" &&
error.message?.includes("Failed to fetch")
) {
return {
success: false,
error:
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
};
}
} catch {
return { success: false, error: "Network error occurred" };
}
};
@@ -209,46 +167,9 @@ export const AuthProvider = ({ children }) => {
localStorage.setItem("user", JSON.stringify(data.user));
return { success: true, user: data.user };
} else {
// Handle HTTP error responses (like 500 CORS errors)
console.log("HTTP error response:", response.status, data);
// Check if this is a CORS error based on the response data
if (
data.message?.includes("Not allowed by CORS") ||
data.message?.includes("CORS") ||
data.error?.includes("CORS")
) {
return {
success: false,
error:
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
};
}
return { success: false, error: data.error || "Update failed" };
}
} catch (error) {
// Check for CORS/network errors first
if (isCorsError(error)) {
return {
success: false,
error:
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
};
}
// Check for other network errors
if (
error.name === "TypeError" &&
error.message?.includes("Failed to fetch")
) {
return {
success: false,
error:
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
};
}
} catch {
return { success: false, error: "Network error occurred" };
}
};
@@ -269,49 +190,12 @@ export const AuthProvider = ({ children }) => {
if (response.ok) {
return { success: true };
} else {
// Handle HTTP error responses (like 500 CORS errors)
console.log("HTTP error response:", response.status, data);
// Check if this is a CORS error based on the response data
if (
data.message?.includes("Not allowed by CORS") ||
data.message?.includes("CORS") ||
data.error?.includes("CORS")
) {
return {
success: false,
error:
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
};
}
return {
success: false,
error: data.error || "Password change failed",
};
}
} catch (error) {
// Check for CORS/network errors first
if (isCorsError(error)) {
return {
success: false,
error:
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
};
}
// Check for other network errors
if (
error.name === "TypeError" &&
error.message?.includes("Failed to fetch")
) {
return {
success: false,
error:
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
};
}
} catch {
return { success: false, error: "Network error occurred" };
}
};

View File

@@ -227,34 +227,8 @@ const Automation = () => {
// Use the proxied URL through the frontend (port 3000)
// This avoids CORS issues as everything goes through the same origin
const url = `/bullboard?token=${encodeURIComponent(token)}`;
// Open in a new tab instead of a new window
const bullBoardWindow = window.open(url, "_blank");
// Add a message listener to handle authentication failures
if (bullBoardWindow) {
// Listen for authentication failures and refresh with token
const checkAuth = () => {
try {
// Check if the Bull Board window is still open
if (bullBoardWindow.closed) return;
// Inject a script to handle authentication failures
bullBoardWindow.postMessage(
{
type: "BULL_BOARD_TOKEN",
token: token,
},
window.location.origin,
);
} catch (e) {
console.log("Could not communicate with Bull Board window:", e);
}
};
// Send token after a short delay to ensure Bull Board is loaded
setTimeout(checkAuth, 1000);
}
const url = `/admin/queues?token=${encodeURIComponent(token)}`;
window.open(url, "_blank", "width=1200,height=800");
};
const triggerManualJob = async (jobType, data = {}) => {

View File

@@ -52,7 +52,6 @@ const HostDetail = () => {
const [historyPage, setHistoryPage] = useState(0);
const [historyLimit] = useState(10);
const [notes, setNotes] = useState("");
const [notesMessage, setNotesMessage] = useState({ text: "", type: "" });
const {
data: host,
@@ -213,17 +212,6 @@ const HostDetail = () => {
onSuccess: () => {
queryClient.invalidateQueries(["host", hostId]);
queryClient.invalidateQueries(["hosts"]);
setNotesMessage({ text: "Notes saved successfully!", type: "success" });
// Clear message after 3 seconds
setTimeout(() => setNotesMessage({ text: "", type: "" }), 3000);
},
onError: (error) => {
setNotesMessage({
text: error.response?.data?.error || "Failed to save notes",
type: "error",
});
// Clear message after 5 seconds for errors
setTimeout(() => setNotesMessage({ text: "", type: "" }), 5000);
},
});
@@ -1245,37 +1233,6 @@ const HostDetail = () => {
Host Notes
</h3>
</div>
{/* Success/Error Message */}
{notesMessage.text && (
<div
className={`rounded-md p-4 ${
notesMessage.type === "success"
? "bg-green-50 dark:bg-green-900 border border-green-200 dark:border-green-700"
: "bg-red-50 dark:bg-red-900 border border-red-200 dark:border-red-700"
}`}
>
<div className="flex">
{notesMessage.type === "success" ? (
<CheckCircle className="h-5 w-5 text-green-400 dark:text-green-300" />
) : (
<AlertCircle className="h-5 w-5 text-red-400 dark:text-red-300" />
)}
<div className="ml-3">
<p
className={`text-sm font-medium ${
notesMessage.type === "success"
? "text-green-800 dark:text-green-200"
: "text-red-800 dark:text-red-200"
}`}
>
{notesMessage.text}
</p>
</div>
</div>
</div>
)}
<div className="bg-secondary-50 dark:bg-secondary-700 rounded-lg p-4">
<textarea
value={notes}

View File

@@ -505,8 +505,8 @@ const Hosts = () => {
}, [hosts]);
const bulkUpdateGroupMutation = useMutation({
mutationFn: ({ hostIds, groupIds }) =>
adminHostsAPI.bulkUpdateGroups(hostIds, groupIds),
mutationFn: ({ hostIds, hostGroupId }) =>
adminHostsAPI.bulkUpdateGroup(hostIds, hostGroupId),
onSuccess: (data) => {
console.log("bulkUpdateGroupMutation success:", data);
@@ -517,7 +517,11 @@ const Hosts = () => {
return oldData.map((host) => {
const updatedHost = data.hosts.find((h) => h.id === host.id);
if (updatedHost) {
return updatedHost;
// Ensure hostGroupId is set correctly
return {
...updatedHost,
hostGroupId: updatedHost.host_groups?.id || null,
};
}
return host;
});
@@ -667,8 +671,8 @@ const Hosts = () => {
}
};
const handleBulkAssign = (groupIds) => {
bulkUpdateGroupMutation.mutate({ hostIds: selectedHosts, groupIds });
const handleBulkAssign = (hostGroupId) => {
bulkUpdateGroupMutation.mutate({ hostIds: selectedHosts, hostGroupId });
};
const handleBulkDelete = () => {
@@ -1793,7 +1797,8 @@ const BulkAssignModal = ({
onAssign,
isLoading,
}) => {
const [selectedGroupIds, setSelectedGroupIds] = useState([]);
const [selectedGroupId, setSelectedGroupId] = useState("");
const bulkHostGroupId = useId();
// Fetch host groups for selection
const { data: hostGroups } = useQuery({
@@ -1807,17 +1812,7 @@ const BulkAssignModal = ({
const handleSubmit = (e) => {
e.preventDefault();
onAssign(selectedGroupIds);
};
const toggleGroup = (groupId) => {
setSelectedGroupIds((prev) => {
if (prev.includes(groupId)) {
return prev.filter((id) => id !== groupId);
} else {
return [...prev, groupId];
}
});
onAssign(selectedGroupId || null);
};
return (
@@ -1825,7 +1820,7 @@ const BulkAssignModal = ({
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md">
<div className="flex justify-between items-center mb-4">
<h3 className="text-lg font-semibold text-secondary-900 dark:text-white">
Assign to Host Groups
Assign to Host Group
</h3>
<button
type="button"
@@ -1855,43 +1850,27 @@ const BulkAssignModal = ({
<form onSubmit={handleSubmit} className="space-y-4">
<div>
<span className="block text-sm font-medium text-secondary-700 dark:text-secondary-200 mb-3">
Host Groups
</span>
<div className="space-y-2 max-h-48 overflow-y-auto">
{/* Host Group Options */}
<label
htmlFor={bulkHostGroupId}
className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-1"
>
Host Group
</label>
<select
id={bulkHostGroupId}
value={selectedGroupId}
onChange={(e) => setSelectedGroupId(e.target.value)}
className="w-full px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-700 text-secondary-900 dark:text-white focus:outline-none focus:ring-2 focus:ring-primary-500"
>
<option value="">No group (ungrouped)</option>
{hostGroups?.map((group) => (
<label
key={group.id}
className={`flex items-center gap-3 p-3 border-2 rounded-lg transition-all duration-200 cursor-pointer ${
selectedGroupIds.includes(group.id)
? "border-primary-500 bg-primary-50 dark:bg-primary-900/30"
: "border-secondary-300 dark:border-secondary-600 bg-white dark:bg-secondary-700 hover:border-secondary-400 dark:hover:border-secondary-500"
}`}
>
<input
type="checkbox"
checked={selectedGroupIds.includes(group.id)}
onChange={() => toggleGroup(group.id)}
className="w-4 h-4 text-primary-600 bg-gray-100 border-gray-300 rounded focus:ring-primary-500 dark:focus:ring-primary-600 dark:ring-offset-gray-800 focus:ring-2 dark:bg-gray-700 dark:border-gray-600"
/>
<div className="flex items-center gap-2 flex-1">
{group.color && (
<div
className="w-3 h-3 rounded-full border border-secondary-300 dark:border-secondary-500 flex-shrink-0"
style={{ backgroundColor: group.color }}
></div>
)}
<div className="text-sm font-medium text-secondary-700 dark:text-secondary-200">
{group.name}
</div>
</div>
</label>
<option key={group.id} value={group.id}>
{group.name}
</option>
))}
</div>
<p className="mt-2 text-sm text-secondary-500 dark:text-secondary-400">
Select one or more groups to assign these hosts to, or leave
ungrouped.
</select>
<p className="mt-1 text-sm text-secondary-500 dark:text-secondary-400">
Select a group to assign these hosts to, or leave ungrouped.
</p>
</div>
@@ -1905,7 +1884,7 @@ const BulkAssignModal = ({
Cancel
</button>
<button type="submit" className="btn-primary" disabled={isLoading}>
{isLoading ? "Assigning..." : "Assign to Groups"}
{isLoading ? "Assigning..." : "Assign to Group"}
</button>
</div>
</form>

View File

@@ -13,7 +13,7 @@ import { useEffect, useId, useState } from "react";
import { useNavigate } from "react-router-dom";
import { useAuth } from "../contexts/AuthContext";
import { authAPI, isCorsError } from "../utils/api";
import { authAPI } from "../utils/api";
const Login = () => {
const usernameId = useId();
@@ -82,21 +82,7 @@ const Login = () => {
setError(result.error || "Login failed");
}
} catch (err) {
// Check for CORS/network errors first
if (isCorsError(err)) {
setError(
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
);
} else if (
err.name === "TypeError" &&
err.message?.includes("Failed to fetch")
) {
setError(
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
);
} else {
setError(err.response?.data?.error || "Login failed");
}
setError(err.response?.data?.error || "Login failed");
} finally {
setIsLoading(false);
}
@@ -126,25 +112,12 @@ const Login = () => {
}
} catch (err) {
console.error("Signup error:", err);
if (isCorsError(err)) {
setError(
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
);
} else if (
err.name === "TypeError" &&
err.message?.includes("Failed to fetch")
) {
setError(
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
);
} else {
const errorMessage =
err.response?.data?.error ||
(err.response?.data?.errors && err.response.data.errors.length > 0
? err.response.data.errors.map((e) => e.msg).join(", ")
: err.message || "Signup failed");
setError(errorMessage);
}
const errorMessage =
err.response?.data?.error ||
(err.response?.data?.errors && err.response.data.errors.length > 0
? err.response.data.errors.map((e) => e.msg).join(", ")
: err.message || "Signup failed");
setError(errorMessage);
} finally {
setIsLoading(false);
}
@@ -173,22 +146,9 @@ const Login = () => {
}
} catch (err) {
console.error("TFA verification error:", err);
if (isCorsError(err)) {
setError(
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
);
} else if (
err.name === "TypeError" &&
err.message?.includes("Failed to fetch")
) {
setError(
"CORS_ORIGIN mismatch - please set your URL in your environment variable",
);
} else {
const errorMessage =
err.response?.data?.error || err.message || "TFA verification failed";
setError(errorMessage);
}
const errorMessage =
err.response?.data?.error || err.message || "TFA verification failed";
setError(errorMessage);
// Clear the token input for security
setTfaData({ token: "" });
} finally {

View File

@@ -153,14 +153,6 @@ const Packages = () => {
}));
}, [packagesResponse]);
// Fetch dashboard stats for card counts (consistent with homepage)
const { data: dashboardStats } = useQuery({
queryKey: ["dashboardStats"],
queryFn: () => dashboardAPI.getStats().then((res) => res.data),
staleTime: 5 * 60 * 1000, // Data stays fresh for 5 minutes
refetchOnWindowFocus: false, // Don't refetch when window regains focus
});
// Fetch hosts data to get total packages count
const { data: hosts } = useQuery({
queryKey: ["hosts"],
@@ -454,20 +446,24 @@ const Packages = () => {
const uniquePackageHostsCount = uniquePackageHosts.size;
// Calculate total packages installed
// Show unique package count (same as table) for consistency
const totalPackagesCount = packages?.length || 0;
// When filtering by host, count each package once (since it can only be installed once per host)
// When not filtering, sum up all installations across all hosts
const totalPackagesCount =
hostFilter && hostFilter !== "all"
? packages?.length || 0
: packages?.reduce(
(sum, pkg) => sum + (pkg.stats?.totalInstalls || 0),
0,
) || 0;
// Calculate total installations across all hosts
const totalInstallationsCount =
packages?.reduce((sum, pkg) => sum + (pkg.stats?.totalInstalls || 0), 0) ||
0;
// Use dashboard stats for outdated packages count (consistent with homepage)
// Calculate outdated packages
const outdatedPackagesCount =
dashboardStats?.cards?.totalOutdatedPackages || 0;
packages?.filter((pkg) => (pkg.stats?.updatesNeeded || 0) > 0).length || 0;
// Use dashboard stats for security updates count (consistent with homepage)
const securityUpdatesCount = dashboardStats?.cards?.securityUpdates || 0;
// Calculate security updates
const securityUpdatesCount =
packages?.filter((pkg) => (pkg.stats?.securityUpdates || 0) > 0).length ||
0;
if (isLoading) {
return (
@@ -533,13 +529,13 @@ const Packages = () => {
</div>
{/* Summary Stats */}
<div className="grid grid-cols-1 sm:grid-cols-5 gap-4 mb-6 flex-shrink-0">
<div className="grid grid-cols-1 sm:grid-cols-4 gap-4 mb-6 flex-shrink-0">
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
<div className="flex items-center">
<Package className="h-5 w-5 text-primary-600 mr-2" />
<div>
<p className="text-sm text-secondary-500 dark:text-white">
Total Packages
Total Installed
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{totalPackagesCount}
@@ -548,20 +544,6 @@ const Packages = () => {
</div>
</div>
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
<div className="flex items-center">
<Package className="h-5 w-5 text-blue-600 mr-2" />
<div>
<p className="text-sm text-secondary-500 dark:text-white">
Total Installations
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{totalInstallationsCount}
</p>
</div>
</div>
</div>
<div className="card p-4 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200">
<div className="flex items-center">
<Package className="h-5 w-5 text-warning-600 mr-2" />

View File

@@ -26,7 +26,7 @@ import { useEffect, useId, useState } from "react";
import { useAuth } from "../contexts/AuthContext";
import { useTheme } from "../contexts/ThemeContext";
import { isCorsError, tfaAPI } from "../utils/api";
import { tfaAPI } from "../utils/api";
const Profile = () => {
const usernameId = useId();
@@ -88,15 +88,8 @@ const Profile = () => {
text: result.error || "Failed to update profile",
});
}
} catch (error) {
if (isCorsError(error)) {
setMessage({
type: "error",
text: "CORS_ORIGIN mismatch - please set your URL in your environment variable",
});
} else {
setMessage({ type: "error", text: "Network error occurred" });
}
} catch {
setMessage({ type: "error", text: "Network error occurred" });
} finally {
setIsLoading(false);
}
@@ -140,15 +133,8 @@ const Profile = () => {
text: result.error || "Failed to change password",
});
}
} catch (error) {
if (isCorsError(error)) {
setMessage({
type: "error",
text: "CORS_ORIGIN mismatch - please set your URL in your environment variable",
});
} else {
setMessage({ type: "error", text: "Network error occurred" });
}
} catch {
setMessage({ type: "error", text: "Network error occurred" });
} finally {
setIsLoading(false);
}

View File

@@ -144,7 +144,7 @@ const Settings = () => {
defaultUserRole: settings.default_user_role || "user",
githubRepoUrl:
settings.github_repo_url ||
"https://github.com/PatchMon/PatchMon.git",
"git@github.com:9technologygroup/patchmon.net.git",
repositoryType: settings.repository_type || "public",
sshKeyPath: settings.ssh_key_path || "",
useCustomSshKey: !!settings.ssh_key_path,

View File

@@ -221,82 +221,7 @@ export const packagesAPI = {
};
// Utility functions
export const isCorsError = (error) => {
// Check for browser-level CORS errors (when request is blocked before reaching server)
if (error.message?.includes("Failed to fetch") && !error.response) {
return true;
}
// Check for TypeError with Failed to fetch (common CORS error pattern)
if (
error.name === "TypeError" &&
error.message?.includes("Failed to fetch")
) {
return true;
}
// Check for backend CORS errors that get converted to 500 by proxy
if (error.response?.status === 500) {
// Check if the error message contains CORS-related text
if (
error.message?.includes("Not allowed by CORS") ||
error.message?.includes("CORS") ||
error.message?.includes("cors")
) {
return true;
}
// Check if the response data contains CORS error information
if (
error.response?.data?.error?.includes("CORS") ||
error.response?.data?.error?.includes("cors") ||
error.response?.data?.message?.includes("CORS") ||
error.response?.data?.message?.includes("cors") ||
error.response?.data?.message?.includes("Not allowed by CORS")
) {
return true;
}
// Check for specific CORS error patterns from backend logs
if (
error.message?.includes("origin") &&
error.message?.includes("callback")
) {
return true;
}
// Check if this is likely a CORS error based on context
// If we're accessing from localhost but CORS_ORIGIN is set to fabio, this is likely CORS
const currentOrigin = window.location.origin;
if (
currentOrigin === "http://localhost:3000" &&
error.config?.url?.includes("/api/")
) {
// This is likely a CORS error when accessing from localhost
return true;
}
}
// Check for CORS-related errors
return (
error.message?.includes("CORS") ||
error.message?.includes("cors") ||
error.message?.includes("Access to fetch") ||
error.message?.includes("blocked by CORS policy") ||
error.message?.includes("Cross-Origin Request Blocked") ||
error.message?.includes("NetworkError when attempting to fetch resource") ||
error.message?.includes("ERR_BLOCKED_BY_CLIENT") ||
error.message?.includes("ERR_NETWORK") ||
error.message?.includes("ERR_CONNECTION_REFUSED")
);
};
export const formatError = (error) => {
// Check for CORS-related errors
if (isCorsError(error)) {
return "CORS_ORIGIN mismatch - please set your URL in your environment variable";
}
if (error.response?.data?.message) {
return error.response.data.message;
}

531
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "patchmon",
"version": "1.3.0",
"version": "1.2.9",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "patchmon",
"version": "1.3.0",
"version": "1.2.9",
"license": "AGPL-3.0",
"workspaces": [
"backend",
@@ -23,19 +23,18 @@
},
"backend": {
"name": "patchmon-backend",
"version": "1.3.0",
"version": "1.2.9",
"license": "AGPL-3.0",
"dependencies": {
"@bull-board/api": "^6.13.1",
"@bull-board/express": "^6.13.1",
"@prisma/client": "^6.1.0",
"axios": "^1.7.9",
"bcryptjs": "^2.4.3",
"bullmq": "^5.61.0",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
"dotenv": "^16.4.7",
"express": "^4.21.2",
"express": "^5.0.0",
"express-rate-limit": "^7.5.0",
"express-validator": "^7.2.0",
"helmet": "^8.0.0",
@@ -59,7 +58,7 @@
},
"frontend": {
"name": "patchmon-frontend",
"version": "1.3.0",
"version": "1.2.9",
"license": "AGPL-3.0",
"dependencies": {
"@dnd-kit/core": "^6.3.1",
@@ -71,7 +70,7 @@
"clsx": "^2.1.1",
"cors": "^2.8.5",
"date-fns": "^4.1.0",
"express": "^4.21.2",
"express": "^5.0.0",
"http-proxy-middleware": "^3.0.3",
"lucide-react": "^0.468.0",
"react": "^18.3.1",
@@ -142,6 +141,7 @@
"integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@babel/code-frame": "^7.27.1",
"@babel/generator": "^7.28.3",
@@ -595,6 +595,7 @@
"resolved": "https://registry.npmjs.org/@bull-board/ui/-/ui-6.13.1.tgz",
"integrity": "sha512-DzPjCFzjEbDukhfSd7nLdTLVKIv5waARQuAXETSRqiKTN4vSA1KNdaJ8p72YwHujKO19yFW1zWjNKrzsa8DCIg==",
"license": "MIT",
"peer": true,
"dependencies": {
"@bull-board/api": "6.13.1"
}
@@ -636,6 +637,7 @@
"resolved": "https://registry.npmjs.org/@dnd-kit/core/-/core-6.3.1.tgz",
"integrity": "sha512-xkGBRQQab4RLwgXxoqETICr6S5JlogafbhNsidmrkVv2YRs5MLwpjoF2qpiGjQt8S9AoxtIV603s0GIUpY5eYQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@dnd-kit/accessibility": "^3.1.1",
"@dnd-kit/utilities": "^3.2.2",
@@ -1938,6 +1940,7 @@
"integrity": "sha512-0dLEBsA1kI3OezMBF8nSsb7Nk19ZnsyE1LLhB8r27KbgU5H4pvuqZLdtE+aUkJVoXgTVuA+iLIwmZ0TuK4tx6A==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@types/prop-types": "*",
"csstype": "^3.0.2"
@@ -1981,13 +1984,34 @@
}
},
"node_modules/accepts": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
"integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==",
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz",
"integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==",
"license": "MIT",
"dependencies": {
"mime-types": "~2.1.34",
"negotiator": "0.6.3"
"mime-types": "^3.0.0",
"negotiator": "^1.0.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/accepts/node_modules/mime-db": {
"version": "1.54.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
"integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/accepts/node_modules/mime-types": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
"integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
"license": "MIT",
"dependencies": {
"mime-db": "^1.54.0"
},
"engines": {
"node": ">= 0.6"
@@ -2045,12 +2069,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/array-flatten": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==",
"license": "MIT"
},
"node_modules/async": {
"version": "3.2.6",
"resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz",
@@ -2154,44 +2172,25 @@
}
},
"node_modules/body-parser": {
"version": "1.20.3",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
"integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==",
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz",
"integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==",
"license": "MIT",
"dependencies": {
"bytes": "3.1.2",
"content-type": "~1.0.5",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.13.0",
"raw-body": "2.5.2",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
"bytes": "^3.1.2",
"content-type": "^1.0.5",
"debug": "^4.4.0",
"http-errors": "^2.0.0",
"iconv-lite": "^0.6.3",
"on-finished": "^2.4.1",
"qs": "^6.14.0",
"raw-body": "^3.0.0",
"type-is": "^2.0.0"
},
"engines": {
"node": ">= 0.8",
"npm": "1.2.8000 || >= 1.4.16"
"node": ">=18"
}
},
"node_modules/body-parser/node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"license": "MIT",
"dependencies": {
"ms": "2.0.0"
}
},
"node_modules/body-parser/node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"license": "MIT"
},
"node_modules/brace-expansion": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
@@ -2235,6 +2234,7 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"baseline-browser-mapping": "^2.8.3",
"caniuse-lite": "^1.0.30001741",
@@ -2454,6 +2454,7 @@
"resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.0.tgz",
"integrity": "sha512-aYeC/jDgSEx8SHWZvANYMioYMZ2KX02W6f6uVfyteuCGcadDLcYVHdfdygsTQkQ4TKn5lghoojAsPj5pu0SnvQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@kurkle/color": "^0.3.0"
},
@@ -2667,9 +2668,9 @@
}
},
"node_modules/content-disposition": {
"version": "0.5.4",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
"integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==",
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz",
"integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==",
"license": "MIT",
"dependencies": {
"safe-buffer": "5.2.1"
@@ -2885,16 +2886,6 @@
"devOptional": true,
"license": "MIT"
},
"node_modules/destroy": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
"integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==",
"license": "MIT",
"engines": {
"node": ">= 0.8",
"npm": "1.2.8000 || >= 1.4.16"
}
},
"node_modules/detect-libc": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
@@ -3156,45 +3147,42 @@
"license": "MIT"
},
"node_modules/express": {
"version": "4.21.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
"integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==",
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz",
"integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==",
"license": "MIT",
"peer": true,
"dependencies": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
"body-parser": "1.20.3",
"content-disposition": "0.5.4",
"content-type": "~1.0.4",
"cookie": "0.7.1",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "2.0.0",
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
"finalhandler": "1.3.1",
"fresh": "0.5.2",
"http-errors": "2.0.0",
"merge-descriptors": "1.0.3",
"methods": "~1.1.2",
"on-finished": "2.4.1",
"parseurl": "~1.3.3",
"path-to-regexp": "0.1.12",
"proxy-addr": "~2.0.7",
"qs": "6.13.0",
"range-parser": "~1.2.1",
"safe-buffer": "5.2.1",
"send": "0.19.0",
"serve-static": "1.16.2",
"setprototypeof": "1.2.0",
"statuses": "2.0.1",
"type-is": "~1.6.18",
"utils-merge": "1.0.1",
"vary": "~1.1.2"
"accepts": "^2.0.0",
"body-parser": "^2.2.0",
"content-disposition": "^1.0.0",
"content-type": "^1.0.5",
"cookie": "^0.7.1",
"cookie-signature": "^1.2.1",
"debug": "^4.4.0",
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"etag": "^1.8.1",
"finalhandler": "^2.1.0",
"fresh": "^2.0.0",
"http-errors": "^2.0.0",
"merge-descriptors": "^2.0.0",
"mime-types": "^3.0.0",
"on-finished": "^2.4.1",
"once": "^1.4.0",
"parseurl": "^1.3.3",
"proxy-addr": "^2.0.7",
"qs": "^6.14.0",
"range-parser": "^1.2.1",
"router": "^2.2.0",
"send": "^1.1.0",
"serve-static": "^2.2.0",
"statuses": "^2.0.1",
"type-is": "^2.0.1",
"vary": "^1.1.2"
},
"engines": {
"node": ">= 0.10.0"
"node": ">= 18"
},
"funding": {
"type": "opencollective",
@@ -3229,20 +3217,35 @@
"node": ">= 8.0.0"
}
},
"node_modules/express/node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"node_modules/express/node_modules/cookie-signature": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz",
"integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==",
"license": "MIT",
"dependencies": {
"ms": "2.0.0"
"engines": {
"node": ">=6.6.0"
}
},
"node_modules/express/node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"license": "MIT"
"node_modules/express/node_modules/mime-db": {
"version": "1.54.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
"integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/express/node_modules/mime-types": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
"integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
"license": "MIT",
"dependencies": {
"mime-db": "^1.54.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/exsolve": {
"version": "1.0.7",
@@ -3350,38 +3353,22 @@
}
},
"node_modules/finalhandler": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz",
"integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==",
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz",
"integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==",
"license": "MIT",
"dependencies": {
"debug": "2.6.9",
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"on-finished": "2.4.1",
"parseurl": "~1.3.3",
"statuses": "2.0.1",
"unpipe": "~1.0.0"
"debug": "^4.4.0",
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"on-finished": "^2.4.1",
"parseurl": "^1.3.3",
"statuses": "^2.0.1"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/finalhandler/node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"license": "MIT",
"dependencies": {
"ms": "2.0.0"
}
},
"node_modules/finalhandler/node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"license": "MIT"
},
"node_modules/find-up": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
@@ -3478,12 +3465,12 @@
}
},
"node_modules/fresh": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
"integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==",
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz",
"integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
"node": ">= 0.8"
}
},
"node_modules/fsevents": {
@@ -3762,12 +3749,12 @@
}
},
"node_modules/iconv-lite": {
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3"
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
@@ -3902,6 +3889,12 @@
"node": ">=0.10.0"
}
},
"node_modules/is-promise": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz",
"integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==",
"license": "MIT"
},
"node_modules/is-stream": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
@@ -4379,19 +4372,22 @@
}
},
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
"integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz",
"integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
"node": ">= 0.8"
}
},
"node_modules/merge-descriptors": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz",
"integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==",
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz",
"integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==",
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
@@ -4406,15 +4402,6 @@
"node": ">= 8"
}
},
"node_modules/methods": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
"integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/micromatch": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
@@ -4428,18 +4415,6 @@
"node": ">=8.6"
}
},
"node_modules/mime": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
"integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
"license": "MIT",
"bin": {
"mime": "cli.js"
},
"engines": {
"node": ">=4"
}
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
@@ -4562,9 +4537,9 @@
}
},
"node_modules/negotiator": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
"integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==",
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
"integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
@@ -4760,6 +4735,15 @@
"node": ">= 0.8"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"license": "ISC",
"dependencies": {
"wrappy": "1"
}
},
"node_modules/one-time": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz",
@@ -4880,10 +4864,14 @@
"license": "ISC"
},
"node_modules/path-to-regexp": {
"version": "0.1.12",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz",
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
"license": "MIT"
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz",
"integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==",
"license": "MIT",
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/pathe": {
"version": "2.0.3",
@@ -4978,6 +4966,7 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
@@ -5121,6 +5110,7 @@
"devOptional": true,
"hasInstallScript": true,
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"@prisma/config": "6.16.2",
"@prisma/engines": "6.16.2"
@@ -5267,12 +5257,12 @@
}
},
"node_modules/qs": {
"version": "6.13.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
"version": "6.14.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
"integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
"license": "BSD-3-Clause",
"dependencies": {
"side-channel": "^1.0.6"
"side-channel": "^1.1.0"
},
"engines": {
"node": ">=0.6"
@@ -5312,18 +5302,34 @@
}
},
"node_modules/raw-body": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.1.tgz",
"integrity": "sha512-9G8cA+tuMS75+6G/TzW8OtLzmBDMo8p1JRxN5AZ+LAp8uxGA8V8GZm4GQ4/N5QNQEnLmg6SS7wyuSmbKepiKqA==",
"license": "MIT",
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"iconv-lite": "0.7.0",
"unpipe": "1.0.0"
},
"engines": {
"node": ">= 0.8"
"node": ">= 0.10"
}
},
"node_modules/raw-body/node_modules/iconv-lite": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz",
"integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/rc9": {
@@ -5342,6 +5348,7 @@
"resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz",
"integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"loose-envify": "^1.1.0"
},
@@ -5364,6 +5371,7 @@
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz",
"integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==",
"license": "MIT",
"peer": true,
"dependencies": {
"loose-envify": "^1.1.0",
"scheduler": "^0.23.2"
@@ -5585,6 +5593,22 @@
"fsevents": "~2.3.2"
}
},
"node_modules/router": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz",
"integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==",
"license": "MIT",
"dependencies": {
"debug": "^4.4.0",
"depd": "^2.0.0",
"is-promise": "^4.0.0",
"parseurl": "^1.3.3",
"path-to-regexp": "^8.0.0"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/run-parallel": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
@@ -5674,66 +5698,61 @@
}
},
"node_modules/send": {
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz",
"integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==",
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz",
"integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==",
"license": "MIT",
"dependencies": {
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
"encodeurl": "~1.0.2",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
"fresh": "0.5.2",
"http-errors": "2.0.0",
"mime": "1.6.0",
"ms": "2.1.3",
"on-finished": "2.4.1",
"range-parser": "~1.2.1",
"statuses": "2.0.1"
"debug": "^4.3.5",
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"etag": "^1.8.1",
"fresh": "^2.0.0",
"http-errors": "^2.0.0",
"mime-types": "^3.0.1",
"ms": "^2.1.3",
"on-finished": "^2.4.1",
"range-parser": "^1.2.1",
"statuses": "^2.0.1"
},
"engines": {
"node": ">= 0.8.0"
"node": ">= 18"
}
},
"node_modules/send/node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"license": "MIT",
"dependencies": {
"ms": "2.0.0"
}
},
"node_modules/send/node_modules/debug/node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"license": "MIT"
},
"node_modules/send/node_modules/encodeurl": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
"integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==",
"node_modules/send/node_modules/mime-db": {
"version": "1.54.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
"integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
"node": ">= 0.6"
}
},
"node_modules/send/node_modules/mime-types": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
"integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
"license": "MIT",
"dependencies": {
"mime-db": "^1.54.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/serve-static": {
"version": "1.16.2",
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz",
"integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==",
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz",
"integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==",
"license": "MIT",
"dependencies": {
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"parseurl": "~1.3.3",
"send": "0.19.0"
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"parseurl": "^1.3.3",
"send": "^1.2.0"
},
"engines": {
"node": ">= 0.8.0"
"node": ">= 18"
}
},
"node_modules/set-blocking": {
@@ -6211,6 +6230,7 @@
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=12"
},
@@ -6282,13 +6302,35 @@
"license": "0BSD"
},
"node_modules/type-is": {
"version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
"integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz",
"integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==",
"license": "MIT",
"dependencies": {
"media-typer": "0.3.0",
"mime-types": "~2.1.24"
"content-type": "^1.0.5",
"media-typer": "^1.1.0",
"mime-types": "^3.0.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/type-is/node_modules/mime-db": {
"version": "1.54.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
"integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/type-is/node_modules/mime-types": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
"integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
"license": "MIT",
"dependencies": {
"mime-db": "^1.54.0"
},
"engines": {
"node": ">= 0.6"
@@ -6353,15 +6395,6 @@
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
"license": "MIT"
},
"node_modules/utils-merge": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
"integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==",
"license": "MIT",
"engines": {
"node": ">= 0.4.0"
}
},
"node_modules/uuid": {
"version": "11.1.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz",
@@ -6399,6 +6432,7 @@
"integrity": "sha512-VbA8ScMvAISJNJVbRDTJdCwqQoAareR/wutevKanhR2/1EkoXVZVkkORaYm/tNVCjP/UDTKtcw3bAkwOUdedmA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"esbuild": "^0.25.0",
"fdir": "^6.5.0",
@@ -6492,6 +6526,7 @@
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=12"
},
@@ -6594,6 +6629,12 @@
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"license": "ISC"
},
"node_modules/ws": {
"version": "8.18.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "patchmon",
"version": "1.3.0",
"version": "1.2.9",
"description": "Linux Patch Monitoring System",
"license": "AGPL-3.0",
"private": true,

1110
setup.sh

File diff suppressed because it is too large Load Diff

View File

@@ -1,263 +0,0 @@
#!/bin/bash
# redis-setup.sh - Redis Database and User Setup for PatchMon
# This script creates a dedicated Redis database and user for a PatchMon instance
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Default Redis connection details
REDIS_HOST=${REDIS_HOST:-"localhost"}
REDIS_PORT=${REDIS_PORT:-6379}
REDIS_ADMIN_PASSWORD=${REDIS_ADMIN_PASSWORD:-"YOURREDISPASSHERE"}
echo -e "${BLUE}🔧 PatchMon Redis Setup${NC}"
echo "=================================="
# Function to generate random strings
generate_random_string() {
local length=${1:-16}
openssl rand -base64 $length | tr -d "=+/" | cut -c1-$length
}
# Function to check if Redis is accessible
check_redis_connection() {
echo -e "${YELLOW}📡 Checking Redis connection...${NC}"
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
# With password - use ACL admin user
if redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning ping > /dev/null 2>&1; then
echo -e "${GREEN}✅ Redis connection successful${NC}"
return 0
else
echo -e "${RED}❌ Cannot connect to Redis with ACL admin user${NC}"
echo "Please ensure Redis is running and the admin password is correct"
return 1
fi
else
# Without password
if redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ping > /dev/null 2>&1; then
echo -e "${GREEN}✅ Redis connection successful${NC}"
return 0
else
echo -e "${RED}❌ Cannot connect to Redis${NC}"
echo "Please ensure Redis is running"
return 1
fi
fi
}
# Function to find next available database number
find_next_db() {
echo -e "${YELLOW}🔍 Finding next available database...${NC}" >&2
# Start from database 0 and keep checking until we find an empty one
local db_num=0
local max_attempts=100 # Safety limit to prevent infinite loop
while [ $db_num -lt $max_attempts ]; do
# Test if database is empty
local key_count
local redis_output
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
# With password - use ACL admin user
redis_output=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning -n "$db_num" DBSIZE 2>&1)
else
# Without password
redis_output=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" -n "$db_num" DBSIZE 2>&1)
fi
# Check for authentication errors
if echo "$redis_output" | grep -q "NOAUTH"; then
echo -e "${RED}❌ Authentication required but REDIS_ADMIN_PASSWORD not set${NC}" >&2
echo -e "${YELLOW}💡 Please set REDIS_ADMIN_PASSWORD environment variable:${NC}" >&2
echo -e "${YELLOW} export REDIS_ADMIN_PASSWORD='your_password'${NC}" >&2
echo -e "${YELLOW} Or run: REDIS_ADMIN_PASSWORD='your_password' ./setup-redis.sh${NC}" >&2
exit 1
fi
# Check for other errors
if echo "$redis_output" | grep -q "ERR"; then
if echo "$redis_output" | grep -q "invalid DB index"; then
echo -e "${RED}❌ Reached maximum database limit at database $db_num${NC}" >&2
echo -e "${YELLOW}💡 Redis is configured with $db_num databases maximum.${NC}" >&2
echo -e "${YELLOW}💡 Increase 'databases' setting in redis.conf or clean up unused databases.${NC}" >&2
exit 1
else
echo -e "${RED}❌ Error checking database $db_num: $redis_output${NC}" >&2
exit 1
fi
fi
key_count="$redis_output"
# If database is empty, use it
if [ "$key_count" = "0" ]; then
echo -e "${GREEN}✅ Found available database: $db_num (empty)${NC}" >&2
echo "$db_num"
return
fi
echo -e "${BLUE} Database $db_num has $key_count keys, checking next...${NC}" >&2
db_num=$((db_num + 1))
done
echo -e "${RED}❌ No available databases found (checked 0-$max_attempts)${NC}" >&2
echo -e "${YELLOW}💡 All checked databases are in use. Consider cleaning up unused databases.${NC}" >&2
exit 1
}
# Function to create Redis user
create_redis_user() {
local username="$1"
local password="$2"
local db_num="$3"
echo -e "${YELLOW}👤 Creating Redis user: $username for database $db_num${NC}"
# Create user with password and permissions
# Note: >password syntax is for Redis ACL, we need to properly escape it
local result
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
# With password - use ACL admin user
result=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning ACL SETUSER "$username" on ">${password}" ~* +@all 2>&1)
else
# Without password
result=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ACL SETUSER "$username" on ">${password}" ~* +@all 2>&1)
fi
if [ $? -eq 0 ] && [ "$result" = "OK" ]; then
echo -e "${GREEN}✅ Redis user '$username' created successfully for database $db_num${NC}"
# Save ACL users to file to persist across restarts
local save_result
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
save_result=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning ACL SAVE 2>&1)
else
save_result=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ACL SAVE 2>&1)
fi
if [ "$save_result" = "OK" ]; then
echo -e "${GREEN}✅ Redis ACL users saved to file${NC}"
else
echo -e "${YELLOW}⚠️ Failed to save ACL users to file: $save_result${NC}"
fi
# Verify user was created
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
local verify=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning ACL GETUSER "$username" 2>&1)
else
local verify=$(redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ACL GETUSER "$username" 2>&1)
fi
if [ "$verify" = "(nil)" ]; then
echo -e "${RED}❌ User creation reported OK but user does not exist${NC}"
return 1
fi
return 0
else
echo -e "${RED}❌ Failed to create Redis user${NC}"
echo -e "${RED}Error: $result${NC}"
return 1
fi
}
# Function to test user connection
test_user_connection() {
local username="$1"
local password="$2"
local db_num="$3"
echo -e "${YELLOW}🧪 Testing user connection...${NC}"
if redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user "$username" --pass "$password" --no-auth-warning -n "$db_num" ping > /dev/null 2>&1; then
echo -e "${GREEN}✅ User connection test successful${NC}"
return 0
else
echo -e "${RED}❌ User connection test failed${NC}"
return 1
fi
}
# Function to mark database as in-use
mark_database_in_use() {
local db_num="$1"
echo -e "${YELLOW}📝 Marking database as in-use...${NC}"
if [ -n "$REDIS_ADMIN_PASSWORD" ]; then
redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" --user admin --pass "$REDIS_ADMIN_PASSWORD" --no-auth-warning -n "$db_num" SET "patchmon:initialized" "$(date -u +%Y-%m-%dT%H:%M:%SZ)" > /dev/null
else
redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" -n "$db_num" SET "patchmon:initialized" "$(date -u +%Y-%m-%dT%H:%M:%SZ)" > /dev/null
fi
if [ $? -eq 0 ]; then
echo -e "${GREEN}✅ Database marked as in-use${NC}"
return 0
else
echo -e "${RED}❌ Failed to mark database${NC}"
return 1
fi
}
# Main execution
main() {
# Check Redis connection
if ! check_redis_connection; then
exit 1
fi
# Generate random credentials
USERNAME="patchmon_$(generate_random_string 8)"
PASSWORD=$(generate_random_string 32)
DB_NUM=$(find_next_db)
echo ""
echo -e "${BLUE}📋 Generated Configuration:${NC}"
echo "Username: $USERNAME"
echo "Password: $PASSWORD"
echo "Database: $DB_NUM"
echo ""
# Create Redis user
if ! create_redis_user "$USERNAME" "$PASSWORD" "$DB_NUM"; then
exit 1
fi
# Test user connection
if ! test_user_connection "$USERNAME" "$PASSWORD" "$DB_NUM"; then
exit 1
fi
# Mark database as in-use to prevent reuse on next run
if ! mark_database_in_use "$DB_NUM"; then
exit 1
fi
# Output .env configuration
echo ""
echo -e "${GREEN}🎉 Redis setup completed successfully!${NC}"
echo ""
echo -e "${BLUE}📄 Add these lines to your .env file:${NC}"
echo "=================================="
echo "REDIS_HOST=$REDIS_HOST"
echo "REDIS_PORT=$REDIS_PORT"
echo "REDIS_USER=$USERNAME"
echo "REDIS_PASSWORD=$PASSWORD"
echo "REDIS_DB=$DB_NUM"
echo "=================================="
echo ""
echo -e "${YELLOW}💡 Copy the configuration above to your .env file${NC}"
}
# Run main function
main "$@"