Compare commits

...

72 Commits

Author SHA1 Message Date
renovate[bot]
e6200f4f0e Update dependency tailwindcss to v4 2025-10-13 20:26:31 +00:00
Muhammad Ibrahim
4d5040e0e9 Merge feature/automation into main
- Resolve migration reconciliation conflicts
- Include updated migration that handles 1.2.7 upgrade scenario
- Merge automation features and Docker support
2025-10-13 21:25:11 +01:00
Muhammad Ibrahim
28c5310b99 Fix migration reconciliation to handle 1.2.7 upgrade scenario
- Add case for table exists but no migration record (1.2.7 upgrade)
- Creates migration record for existing user_sessions table
- Prevents P3018 error when table exists from 1.2.7 installation
- Handles all upgrade scenarios properly
2025-10-13 21:24:35 +01:00
Muhammad Ibrahim
a2e9743da6 Add migration reconciliation for user_sessions 1.2.7 to 1.2.8+ upgrade
- Creates migration 20251004999999_reconcile_user_sessions_migration
- Runs before 20251005000000_add_user_sessions migration
- Properly handles failed migrations by marking them as rolled back first
- Handles migration name conflicts from 1.2.7 (add_user_sessions) to 1.2.8+ (20251005000000_add_user_sessions)
- Fixes failed migration states from upgrade attempts
- Works naturally within Prisma migration system
- No changes to Docker entrypoint or setup scripts needed
2025-10-13 21:15:22 +01:00
Muhammad Ibrahim
3863d641fa Fix git update conflicts in setup.sh --update
- Add git clean -fd to remove untracked files before pull
- Add git reset --hard HEAD to ensure clean state
- Prevents merge conflicts from untracked files during updates
- Ensures smooth updates from any version to any version
2025-10-13 21:15:04 +01:00
Muhammad Ibrahim
cc8f77a946 deleted the js file 2025-10-13 20:59:05 +01:00
Muhammad Ibrahim
36455e2bfd Fixed Migration to be done via a prixma migraiton not js script 2025-10-13 20:58:08 +01:00
Muhammad Ibrahim
af65d38cad - Fixes P3009 error when upgrading from 1.2.7
- Reconciles 'add_user_sessions' to '20251005000000_add_user_sessions'
- Prevents duplicate migration attempts
- Handles fresh installs gracefully"
2025-10-13 20:29:42 +01:00
Muhammad Ibrahim
29266b6d77 Added longer transaction timeout on Postgresql DB 2025-10-12 21:14:52 +01:00
Muhammad Ibrahim
f96e468482 Improved patchmon-agent.sh logic to handle locked apt processes
Introduced docker Feature integration via agent
2025-10-11 22:54:49 +01:00
Muhammad Ibrahim
9f8c88badf Remove 'coming soon' indicator from Automation menu item 2025-10-11 20:55:27 +01:00
Muhammad Ibrahim
7985a225d7 Merge main into feature/automation to align git history
Resolved conflicts:
- backend/src/server.js: Kept automation routes alongside gethomepage routes
- frontend/src/pages/Queue.jsx: Kept deleted (replaced by Automation.jsx)
- setup.sh: Kept newer version date (2025-10-11)

This merge brings in all commits from main including:
- GetHomepage integration
- Version 1.2.9 updates
- Migration file renames
- Bug fixes and improvements
2025-10-11 20:45:29 +01:00
Muhammad Ibrahim
8c538bd99c Merge changes from main: Add GetHomepage integration and update to v1.2.9
- Added gethomepageRoutes.js for GetHomepage integration
- Updated all package.json files to version 1.2.9
- Updated agent script to version 1.2.9
- Updated version fallbacks in versionRoutes.js and updateScheduler.js
- Updated setup.sh with version 1.2.9
- Merged GetHomepage integration UI (Integrations.jsx)
- Updated docker-entrypoint.sh from main
- Updated VersionUpdateTab component
- Combined automation and gethomepage routes in server.js
- Maintains both BullMQ automation and GetHomepage functionality
2025-10-11 20:35:47 +01:00
9 Technology Group LTD
623bf5e2c8 Merge pull request #161 from PatchMon/feature/gethomepage
Feature/gethomepage + new version 1.2.9
2025-10-11 20:21:44 +01:00
Muhammad Ibrahim
ed8cc81b89 Changed version from 1.2.8 to 1.2.9 in preperation for next release 2025-10-11 20:14:08 +01:00
Muhammad Ibrahim
5c4353a688 Fixed linting errors with gethomepage area 2025-10-11 20:04:29 +01:00
Muhammad Ibrahim
6ebcdd57d5 Fixed Migration order issue where users were getting error of "add_user_sessions" does not exist 2025-10-11 14:47:27 +01:00
Muhammad Ibrahim
a3d0dfd665 Fixed entrypoint to handle better updating of Agent mechanism
Updated Readme to show the --update flag
2025-10-10 21:52:57 +01:00
Muhammad Ibrahim
d99ded6d65 Added Database Backup ability when doing setup.sh -- update 2025-10-10 20:16:24 +01:00
Muhammad Ibrahim
1ea96b6172 Merge branch 'main' of github.com:9technologygroup/patchmon.net 2025-10-10 19:37:46 +01:00
Muhammad Ibrahim
1e5ee66825 Fixed version update checking mechanism
Updated the setup.sh script to have the --update flag
2025-10-10 19:32:44 +01:00
Muhammad Ibrahim
88130797e4 Updated Version to 1.2.8 2025-10-10 12:39:17 +01:00
Muhammad Ibrahim
0ad1a96871 Building the start of Automation page and implemented BullMQ module 2025-10-10 12:24:23 +01:00
9 Technology Group LTD
566c415471 Merge pull request #152 from PatchMon/feature/queue
Feature/Agent
2025-10-08 18:52:02 +01:00
Muhammad Ibrahim
cfc91243eb Fixed Issues with RHEL based systems not sending their repos to PatchMon 2025-10-08 18:46:39 +01:00
Muhammad Ibrahim
84cf31869b Fixed spacing in the header for the buttons 2025-10-08 17:57:56 +01:00
Muhammad Ibrahim
18c9d241eb Fixed RockyLinux 10 Support 2025-10-08 17:53:08 +01:00
Muhammad Ibrahim
86b5da3ea0 Removed titles from the top nav bar to give space to search bar 2025-10-08 17:25:24 +01:00
9 Technology Group LTD
c9b5ee63d8 Merge pull request #151 from PatchMon/fix/agentdata
Fix/agentdata
2025-10-08 16:25:56 +01:00
Muhammad Ibrahim
ac4415e1dc Added support for Oracle Linux 9 2025-10-08 16:24:35 +01:00
9 Technology Group LTD
3737a5a935 Merge pull request #145 from Maelstromeous/patch-1
Document manual result update process for PatchMon
2025-10-08 15:50:28 +01:00
9 Technology Group LTD
bcce48948a Merge pull request #148 from PatchMon/refactor/frontend_optimisations
Various optimisations/fixes - mostly frontend
2025-10-08 15:48:10 +01:00
Muhammad Ibrahim
5e4c628110 Dashboard Card ecit 2025-10-08 09:53:03 +01:00
Muhammad Ibrahim
a8668ee3f3 Hide Dashboard text in header to give more space to search bar 2025-10-08 09:47:10 +01:00
Muhammad Ibrahim
5487206384 Fix hamburger menu icon and separator dark mode styling 2025-10-08 09:46:04 +01:00
Muhammad Ibrahim
daa31973f9 Fix mobile menu dark mode styling for Dashboard and navigation items 2025-10-08 09:45:31 +01:00
Muhammad Ibrahim
561c78fb08 Remove coming soon items from mobile menu navigation 2025-10-08 09:44:26 +01:00
Muhammad Ibrahim
6d3f2d94ba Add dark mode support and logout functionality to mobile menu 2025-10-08 09:43:41 +01:00
Muhammad Ibrahim
93534ebe52 Add dark mode support to BulkAssignModal 2025-10-08 09:40:38 +01:00
Muhammad Ibrahim
5cf2811bfd Fix BulkAssignModal: add missing bulkHostGroupId variable 2025-10-08 09:40:02 +01:00
tigattack
8fd91eae1a fix(frontend): use updateUserMutation in EditUserModal
Makes it more consistent with the other user mutations and resolves a lint error for the formerly unused `updateUserMutation`
2025-10-08 02:18:20 +01:00
tigattack
da8c661d20 refactor: fix lint errors 2025-10-08 02:12:51 +01:00
tigattack
2bf639e315 chore: update gitignore for docker dev 2025-10-08 02:10:40 +01:00
tigattack
c02ac4bd6f fix(frontend): don't query settings before auth 2025-10-08 02:10:40 +01:00
tigattack
4e0eaf7323 feat(frontend): add lazy loading for routes with Suspense fallback 2025-10-08 02:10:40 +01:00
tigattack
ef9ef58bcb feat(vite): add manual chunking for optimized build output 2025-10-08 02:08:46 +01:00
9 Technology Group LTD
29afe3da1f Merge pull request #147 from PatchMon/fix/agentdata
Add Line Chart
2025-10-08 00:47:47 +01:00
Muhammad Ibrahim
a861e4f9eb Fix linting issues: remove unused imports, add button types, fix array keys 2025-10-08 00:42:26 +01:00
9 Technology Group LTD
12ef6fd8e1 Merge pull request #146 from PatchMon/fix/agentdata
Agent improvements for Debian
Removal of 100 package limit
Modified hosts detail page with Agent history
Added Device fingerprinting for better session management (I need to improve this though)
Added Dashboard card of Package trends for all or specific hosts
Fixed filtering on the package page
2025-10-08 00:33:12 +01:00
Muhammad Ibrahim
ba9de097dc Added Dashboard card to show Package trends over time 2025-10-07 22:48:15 +01:00
Muhammad Ibrahim
8103581d17 Added Package trends over time graph XD 2025-10-07 22:46:55 +01:00
Muhammad Ibrahim
cdb24520d8 Added Total Packages in the Agent history
Added Script execution time in the Agent history tab
Added Pagination for the agent History
2025-10-07 21:46:37 +01:00
Muhammad Ibrahim
831adf3038 Fixed filtering for regular / security updates pie chart on the dashboard 2025-10-07 21:13:22 +01:00
Muhammad Ibrahim
2a1eed1354 Fixed Filtering with the OS Distribution Dashboard card 2025-10-07 21:01:44 +01:00
Muhammad Ibrahim
7819d4512e Made the coffee cup Yellow 2025-10-07 20:54:21 +01:00
Muhammad Ibrahim
a305fe23d3 Fixed issues with the agent not sending apt data properly
Added Indexing to the database for faster and efficient searching
Fixed some filtering from the hosts page relating to packages that need updating
Added buy me a coffee link (sorry and thank you <3)
2025-10-07 20:52:46 +01:00
Matt Cavanagh
2b36e88d85 Revise manual update instructions in README
Updated instructions for forcing updates after host package changes.
2025-10-07 20:25:53 +01:00
Matt Cavanagh
6624ec002d Document manual update process for PatchMon
Add instructions for manual update in README
2025-10-07 20:24:15 +01:00
Muhammad Ibrahim
840779844a Removed 100 limit 2025-10-07 18:20:41 +01:00
Muhammad Ibrahim
f91d3324ba Merge branch 'main' of github.com:9technologygroup/patchmon.net 2025-10-07 18:13:04 +01:00
Muhammad Ibrahim
8c60b5277e Update frontend: HostDetail, Hosts, and osIcons 2025-10-07 18:12:56 +01:00
9 Technology Group LTD
2ac756af84 Merge pull request #139 from stianmeyer/patch-2
Search for the absence of .sh files in the /app/agents folder to trigger copying of the agent files
2025-10-06 09:49:42 +01:00
9 Technology Group LTD
e227004d6b Merge pull request #140 from PatchMon/docs/docker
docs(docker): add description for 'edge' tag
2025-10-06 09:47:12 +01:00
Muhammad Ibrahim
d379473568 Added TFA timeout env variables
Added profile session management
Added "Remember me" to bypass TFA using device fingerprint
Fixed profile name not being persistent after logout and login
2025-10-06 00:55:23 +01:00
9 Technology Group LTD
2edc773adf Merge pull request #141 from PatchMon/ci/docker_no_push_fork 2025-10-05 23:27:44 +01:00
Stian Meyer
2db839556c Copy from agents_backup only when no .sh scripts are present 2025-10-06 00:24:07 +02:00
tigattack
aab6fc244e ci(docker): fix push conditions to prevent pushes from forks 2025-10-05 23:09:01 +01:00
tigattack
b43c9e94fd Merge pull request #117 from PatchMon/ci/tweaks 2025-10-05 22:38:29 +01:00
Stian Meyer
2e2a554aa3 Update backend.docker-entrypoint.sh 2025-10-05 23:36:46 +02:00
tigattack
eabcfd370c ci(docker): remove 'dev' branch from push trigger and update image tag handling
- Create 'edge' tag for pushes to main
- Create versioned & latest tags for new tags with `v` prefix (instead of on release)
2025-10-05 21:33:41 +01:00
tigattack
55cb07b3c8 ci(build): remove 'dev' branch from push trigger 2025-10-05 21:33:41 +01:00
tigattack
0e049ec3d5 ci: ignore changes to docker in build and code quality workflows 2025-10-05 21:33:41 +01:00
67 changed files with 10990 additions and 2622 deletions

View File

@@ -3,7 +3,9 @@ on:
push:
branches:
- main
- dev
paths-ignore:
- 'docker/**'
jobs:
deploy:
runs-on: self-hosted

View File

@@ -2,7 +2,11 @@ name: Code quality
on:
push:
paths-ignore:
- 'docker/**'
pull_request:
paths-ignore:
- 'docker/**'
jobs:
check:

View File

@@ -1,13 +1,14 @@
name: Build and Push Docker Images
on:
push:
branches:
- main
tags:
- 'v*'
pull_request:
branches:
- main
- dev
release:
types:
- published
workflow_dispatch:
inputs:
push:
@@ -56,7 +57,7 @@ jobs:
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=raw,value=latest,enable={{is_default_branch}}
type=edge,branch=main
- name: Build and push ${{ matrix.image }} image
uses: docker/build-push-action@v6
@@ -64,7 +65,11 @@ jobs:
context: .
file: docker/${{ matrix.image }}.Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'workflow_dispatch' || inputs.push == 'true' }}
# Push if:
# - Event is not workflow_dispatch OR input 'push' is true
# AND
# - Event is not pull_request OR the PR is from the same repository (to avoid pushing from forks)
push: ${{ (github.event_name != 'workflow_dispatch' || inputs.push == 'true') && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha,scope=${{ matrix.image }}

2
.gitignore vendored
View File

@@ -154,4 +154,4 @@ setup-installer-site.sh
install-server.*
notify-clients-upgrade.sh
debug-agent.sh
docker/compose_dev_data
docker/compose_dev_*

View File

@@ -43,7 +43,7 @@ PatchMon provides centralized patch management across diverse server environment
### API & Integrations
- REST API under `/api/v1` with JWT auth
- **Proxmox LXC Auto-Enrollment** - Automatically discover and enroll LXC containers from Proxmox hosts ([Documentation](PROXMOX_AUTO_ENROLLMENT.md))
- Proxmox LXC Auto-Enrollment - Automatically discover and enroll LXC containers from Proxmox hosts
### Security
- Rate limiting for general, auth, and agent endpoints
@@ -85,11 +85,16 @@ apt-get upgrade -y
apt install curl -y
```
#### Script
#### Install Script
```bash
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh
```
#### Update Script (--update flag)
```bash
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh --update
```
#### Minimum specs for building : #####
CPU : 2 vCPU
RAM : 2GB
@@ -113,6 +118,14 @@ After installation:
- Visit `http(s)://<your-domain>` and complete first-time admin setup
- See all useful info in `deployment-info.txt`
## Forcing updates after host package changes
Should you perform a manual package update on your host and wish to see the results reflected in PatchMon quicker than the usual scheduled update, you can trigger the process manually by running:
```bash
/usr/local/bin/patchmon-agent.sh update
```
This will send the results immediately to PatchMon.
## Communication Model
- Outbound-only agents: servers initiate communication to PatchMon

View File

@@ -1,12 +1,12 @@
#!/bin/bash
# PatchMon Agent Script v1.2.7
# PatchMon Agent Script v1.2.9
# This script sends package update information to the PatchMon server using API credentials
# Configuration
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
API_VERSION="v1"
AGENT_VERSION="1.2.7"
AGENT_VERSION="1.2.9"
CONFIG_FILE="/etc/patchmon/agent.conf"
CREDENTIALS_FILE="/etc/patchmon/credentials"
LOG_FILE="/var/log/patchmon-agent.log"
@@ -38,21 +38,21 @@ error() {
exit 1
}
# Info logging (cleaner output - only stdout, no duplicate logging)
# Info logging (cleaner output - only stderr, no duplicate logging)
info() {
echo -e "${BLUE} $1${NC}"
echo -e "${BLUE} $1${NC}" >&2
log "INFO: $1"
}
# Success logging (cleaner output - only stdout, no duplicate logging)
# Success logging (cleaner output - only stderr, no duplicate logging)
success() {
echo -e "${GREEN}$1${NC}"
echo -e "${GREEN}$1${NC}" >&2
log "SUCCESS: $1"
}
# Warning logging (cleaner output - only stdout, no duplicate logging)
# Warning logging (cleaner output - only stderr, no duplicate logging)
warning() {
echo -e "${YELLOW}⚠️ $1${NC}"
echo -e "${YELLOW}⚠️ $1${NC}" >&2
log "WARNING: $1"
}
@@ -231,9 +231,14 @@ detect_os() {
"opensuse"|"opensuse-leap"|"opensuse-tumbleweed")
OS_TYPE="suse"
;;
"rocky"|"almalinux")
"almalinux")
OS_TYPE="rhel"
;;
"ol")
# Keep Oracle Linux as 'ol' for proper frontend identification
OS_TYPE="ol"
;;
# Rocky Linux keeps its own identity for proper frontend display
esac
elif [[ -f /etc/redhat-release ]]; then
@@ -261,7 +266,7 @@ get_repository_info() {
"ubuntu"|"debian")
get_apt_repositories repos_json first
;;
"centos"|"rhel"|"fedora")
"centos"|"rhel"|"fedora"|"ol"|"rocky")
get_yum_repositories repos_json first
;;
*)
@@ -569,14 +574,118 @@ get_yum_repositories() {
local -n first_ref=$2
# Parse yum/dnf repository configuration
local repo_info=""
if command -v dnf >/dev/null 2>&1; then
local repo_info=$(dnf repolist all --verbose 2>/dev/null | grep -E "^Repo-id|^Repo-baseurl|^Repo-name|^Repo-status")
repo_info=$(dnf repolist all --verbose 2>/dev/null | grep -E "^Repo-id|^Repo-baseurl|^Repo-mirrors|^Repo-name|^Repo-status")
elif command -v yum >/dev/null 2>&1; then
local repo_info=$(yum repolist all -v 2>/dev/null | grep -E "^Repo-id|^Repo-baseurl|^Repo-name|^Repo-status")
repo_info=$(yum repolist all -v 2>/dev/null | grep -E "^Repo-id|^Repo-baseurl|^Repo-mirrors|^Repo-name|^Repo-status")
fi
# This is a simplified implementation - would need more work for full YUM support
# For now, return empty for non-APT systems
if [[ -z "$repo_info" ]]; then
return
fi
# Parse repository information
local current_repo=""
local repo_id=""
local repo_name=""
local repo_url=""
local repo_mirrors=""
local repo_status=""
while IFS= read -r line; do
if [[ "$line" =~ ^Repo-id[[:space:]]+:[[:space:]]+(.+)$ ]]; then
# Process previous repository if we have one
if [[ -n "$current_repo" ]]; then
process_yum_repo repos_ref first_ref "$repo_id" "$repo_name" "$repo_url" "$repo_mirrors" "$repo_status"
fi
# Start new repository
repo_id="${BASH_REMATCH[1]}"
repo_name="$repo_id"
repo_url=""
repo_mirrors=""
repo_status=""
current_repo="$repo_id"
elif [[ "$line" =~ ^Repo-name[[:space:]]+:[[:space:]]+(.+)$ ]]; then
repo_name="${BASH_REMATCH[1]}"
elif [[ "$line" =~ ^Repo-baseurl[[:space:]]+:[[:space:]]+(.+)$ ]]; then
repo_url="${BASH_REMATCH[1]}"
elif [[ "$line" =~ ^Repo-mirrors[[:space:]]+:[[:space:]]+(.+)$ ]]; then
repo_mirrors="${BASH_REMATCH[1]}"
elif [[ "$line" =~ ^Repo-status[[:space:]]+:[[:space:]]+(.+)$ ]]; then
repo_status="${BASH_REMATCH[1]}"
fi
done <<< "$repo_info"
# Process the last repository
if [[ -n "$current_repo" ]]; then
process_yum_repo repos_ref first_ref "$repo_id" "$repo_name" "$repo_url" "$repo_mirrors" "$repo_status"
fi
}
# Process a single YUM repository and add it to the JSON
process_yum_repo() {
local -n _repos_ref=$1
local -n _first_ref=$2
local repo_id="$3"
local repo_name="$4"
local repo_url="$5"
local repo_mirrors="$6"
local repo_status="$7"
# Skip if we don't have essential info
if [[ -z "$repo_id" ]]; then
return
fi
# Determine if repository is enabled
local is_enabled=false
if [[ "$repo_status" == "enabled" ]]; then
is_enabled=true
fi
# Use baseurl if available, otherwise use mirrors URL
local final_url=""
if [[ -n "$repo_url" ]]; then
# Extract first URL if multiple are listed
final_url=$(echo "$repo_url" | head -n 1 | awk '{print $1}')
elif [[ -n "$repo_mirrors" ]]; then
final_url="$repo_mirrors"
fi
# Skip if we don't have any URL
if [[ -z "$final_url" ]]; then
return
fi
# Determine if repository uses HTTPS
local is_secure=false
if [[ "$final_url" =~ ^https:// ]]; then
is_secure=true
fi
# Generate repository name if not provided
if [[ -z "$repo_name" ]]; then
repo_name="$repo_id"
fi
# Clean up repository name and URL - escape quotes and backslashes
repo_name=$(echo "$repo_name" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g')
final_url=$(echo "$final_url" | sed 's/\\/\\\\/g' | sed 's/"/\\"/g')
# Add to JSON
if [[ "$_first_ref" == true ]]; then
_first_ref=false
else
_repos_ref+=","
fi
_repos_ref+="{\"name\":\"$repo_name\",\"url\":\"$final_url\",\"distribution\":\"$OS_VERSION\",\"components\":\"main\",\"repoType\":\"rpm\",\"isEnabled\":$is_enabled,\"isSecure\":$is_secure}"
}
# Get package information based on OS
@@ -588,11 +697,11 @@ get_package_info() {
"ubuntu"|"debian")
get_apt_packages packages_json first
;;
"centos"|"rhel"|"fedora")
"centos"|"rhel"|"fedora"|"ol"|"rocky")
get_yum_packages packages_json first
;;
*)
error "Unsupported OS type: $OS_TYPE"
warning "Unsupported OS type: $OS_TYPE - returning empty package list"
;;
esac
@@ -600,13 +709,173 @@ get_package_info() {
echo "$packages_json"
}
# Check and handle APT locks
handle_apt_locks() {
local interactive=${1:-false} # First parameter indicates if running interactively
local lock_files=(
"/var/lib/dpkg/lock"
"/var/lib/dpkg/lock-frontend"
"/var/lib/apt/lists/lock"
"/var/cache/apt/archives/lock"
)
local processes_found=false
local hung_processes=()
# Check for running APT processes
if pgrep -x "apt-get|apt|aptitude|dpkg|unattended-upgr" > /dev/null 2>&1; then
processes_found=true
info "Found running package management processes:"
echo "" >&2
# Get process info with ACTUAL elapsed time (not CPU time)
# Using ps -eo format to get real elapsed time
while IFS= read -r line; do
[[ -z "$line" ]] && continue
local pid=$(echo "$line" | awk '{print $1}')
local elapsed=$(echo "$line" | awk '{print $2}')
local cmd=$(echo "$line" | awk '{for(i=3;i<=NF;i++) printf "%s ", $i; print ""}')
# Display process info
echo " PID $pid: $cmd (running for $elapsed)" >&2
# Parse elapsed time and convert to seconds
# Format can be: MM:SS, HH:MM:SS, DD-HH:MM:SS, or just SS
# Use 10# prefix to force base-10 (avoid octal interpretation of leading zeros)
local runtime_seconds=0
if [[ "$elapsed" =~ ^([0-9]+)-([0-9]+):([0-9]+):([0-9]+)$ ]]; then
# Format: DD-HH:MM:SS
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 86400 + 10#${BASH_REMATCH[2]} * 3600 + 10#${BASH_REMATCH[3]} * 60 + 10#${BASH_REMATCH[4]} ))
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+):([0-9]+)$ ]]; then
# Format: HH:MM:SS
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 3600 + 10#${BASH_REMATCH[2]} * 60 + 10#${BASH_REMATCH[3]} ))
elif [[ "$elapsed" =~ ^([0-9]+):([0-9]+)$ ]]; then
# Format: MM:SS
runtime_seconds=$(( 10#${BASH_REMATCH[1]} * 60 + 10#${BASH_REMATCH[2]} ))
elif [[ "$elapsed" =~ ^([0-9]+)$ ]]; then
# Format: just seconds
runtime_seconds=$((10#${BASH_REMATCH[1]}))
fi
# Consider process hung if running for more than 5 minutes
if [[ $runtime_seconds -gt 300 ]]; then
hung_processes+=("$pid:$elapsed:$cmd")
fi
done < <(ps -eo pid,etime,cmd | grep -E "apt-get|apt[^-]|aptitude|dpkg|unattended-upgr" | grep -v grep | grep -v "ps -eo")
echo "" >&2
info "Detected ${#hung_processes[@]} hung process(es), interactive=$interactive"
# If hung processes found and running interactively, offer to kill them
if [[ ${#hung_processes[@]} -gt 0 && "$interactive" == "true" ]]; then
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
echo "" >&2
for process_info in "${hung_processes[@]}"; do
IFS=':' read -r pid elapsed cmd <<< "$process_info"
echo " PID $pid: $cmd (hung for $elapsed)" >&2
done
echo "" >&2
read -p "$(echo -e "${YELLOW}⚠️ Do you want to kill these processes? [y/N]:${NC} ")" -n 1 -r >&2
echo "" >&2
if [[ $REPLY =~ ^[Yy]$ ]]; then
for process_info in "${hung_processes[@]}"; do
IFS=':' read -r pid elapsed cmd <<< "$process_info"
info "Killing process $pid..."
if kill "$pid" 2>/dev/null; then
success "Killed process $pid"
sleep 1
# Check if process is still running
if kill -0 "$pid" 2>/dev/null; then
warning "Process $pid still running, using SIGKILL..."
kill -9 "$pid" 2>/dev/null
success "Force killed process $pid"
fi
else
warning "Could not kill process $pid (may require sudo)"
fi
done
# Wait a moment for locks to clear
sleep 2
else
info "Skipping process termination"
fi
elif [[ ${#hung_processes[@]} -gt 0 ]]; then
warning "Found ${#hung_processes[@]} potentially hung process(es) (running > 5 minutes)"
info "Run this command with sudo and interactively to kill hung processes"
fi
fi
# Check for stale lock files (files that exist but no process is holding them)
for lock_file in "${lock_files[@]}"; do
if [[ -f "$lock_file" ]]; then
# Try to get the PID from the lock file if it exists
if lsof "$lock_file" > /dev/null 2>&1; then
info "Lock file $lock_file is held by an active process"
else
warning "Found stale lock file: $lock_file"
info "Attempting to remove stale lock..."
if rm -f "$lock_file" 2>/dev/null; then
success "Removed stale lock: $lock_file"
else
warning "Could not remove lock (insufficient permissions): $lock_file"
fi
fi
fi
done
# If processes were found, return failure so caller can wait
if [[ "$processes_found" == true ]]; then
return 1
else
return 0
fi
}
# Get package info for APT-based systems
get_apt_packages() {
local -n packages_ref=$1
local -n first_ref=$2
# Update package lists (use apt-get for older distros; quieter output)
apt-get update -qq
# Update package lists with retry logic for lock conflicts
local retry_count=0
local max_retries=3
local retry_delay=5
while [[ $retry_count -lt $max_retries ]]; do
if apt-get update -qq 2>/dev/null; then
break
else
retry_count=$((retry_count + 1))
if [[ $retry_count -lt $max_retries ]]; then
warning "APT lock detected (attempt $retry_count/$max_retries)"
# On first retry, try to handle locks
if [[ $retry_count -eq 1 ]]; then
info "Checking for stale APT locks..."
# Check if running interactively (stdin is a terminal OR stdout is a terminal)
local is_interactive=false
if [[ -t 0 ]] || [[ -t 1 ]]; then
is_interactive=true
fi
info "Interactive mode: $is_interactive"
handle_apt_locks "$is_interactive"
fi
info "Waiting ${retry_delay} seconds before retry..."
sleep $retry_delay
else
warning "APT lock persists after $max_retries attempts"
warning "Continuing without updating package lists (will use cached data)"
fi
fi
done
# Determine upgradable packages using apt-get simulation (compatible with Ubuntu 18.04)
# Example line format:
@@ -626,6 +895,11 @@ get_apt_packages() {
is_security_update=true
fi
# Escape JSON special characters in package data
package_name=$(echo "$package_name" | sed 's/"/\\"/g' | sed 's/\\/\\\\/g')
current_version=$(echo "$current_version" | sed 's/"/\\"/g' | sed 's/\\/\\\\/g')
available_version=$(echo "$available_version" | sed 's/"/\\"/g' | sed 's/\\/\\\\/g')
if [[ "$first_ref" == true ]]; then
first_ref=false
else
@@ -637,12 +911,16 @@ get_apt_packages() {
done <<< "$upgradable_sim"
# Get installed packages that are up to date
local installed=$(dpkg-query -W -f='${Package} ${Version}\n' | head -100)
local installed=$(dpkg-query -W -f='${Package} ${Version}\n')
while IFS=' ' read -r package_name version; do
if [[ -n "$package_name" && -n "$version" ]]; then
# Check if this package is not in the upgrade list
if ! echo "$upgradable" | grep -q "^$package_name/"; then
if ! echo "$upgradable_sim" | grep -q "^Inst $package_name "; then
# Escape JSON special characters in package data
package_name=$(echo "$package_name" | sed 's/"/\\"/g' | sed 's/\\/\\\\/g')
version=$(echo "$version" | sed 's/"/\\"/g' | sed 's/\\/\\\\/g')
if [[ "$first_ref" == true ]]; then
first_ref=false
else
@@ -871,6 +1149,9 @@ get_system_info() {
send_update() {
load_credentials
# Track execution start time
local start_time=$(date +%s.%N)
# Verify datetime before proceeding
if ! verify_datetime; then
warning "Datetime verification failed, but continuing with update..."
@@ -883,6 +1164,15 @@ send_update() {
local network_json=$(get_network_info)
local system_json=$(get_system_info)
# Validate JSON before sending
if ! echo "$packages_json" | jq empty 2>/dev/null; then
error "Invalid packages JSON generated: $packages_json"
fi
if ! echo "$repositories_json" | jq empty 2>/dev/null; then
error "Invalid repositories JSON generated: $repositories_json"
fi
info "Sending update to PatchMon server..."
# Merge all JSON objects into one
@@ -890,6 +1180,10 @@ send_update() {
# Get machine ID
local machine_id=$(get_machine_id)
# Calculate execution time (in seconds with decimals)
local end_time=$(date +%s.%N)
local execution_time=$(echo "$end_time - $start_time" | bc)
# Create the base payload and merge with system info
local base_payload=$(cat <<EOF
{
@@ -901,7 +1195,8 @@ send_update() {
"ip": "$IP_ADDRESS",
"architecture": "$ARCHITECTURE",
"agentVersion": "$AGENT_VERSION",
"machineId": "$machine_id"
"machineId": "$machine_id",
"executionTime": $execution_time
}
EOF
)
@@ -909,15 +1204,27 @@ EOF
# Merge the base payload with the system information
local payload=$(echo "$base_payload $merged_json" | jq -s '.[0] * .[1]')
# Write payload to temporary file to avoid "Argument list too long" error
local temp_payload_file=$(mktemp)
echo "$payload" > "$temp_payload_file"
# Debug: Show payload size
local payload_size=$(wc -c < "$temp_payload_file")
echo -e "${BLUE} 📊 Payload size: $payload_size bytes${NC}"
local response=$(curl $CURL_FLAGS -X POST \
-H "Content-Type: application/json" \
-H "X-API-ID: $API_ID" \
-H "X-API-KEY: $API_KEY" \
-d "$payload" \
"$PATCHMON_SERVER/api/$API_VERSION/hosts/update")
-d @"$temp_payload_file" \
"$PATCHMON_SERVER/api/$API_VERSION/hosts/update" 2>&1)
if [[ $? -eq 0 ]]; then
local curl_exit_code=$?
# Clean up temporary file
rm -f "$temp_payload_file"
if [[ $curl_exit_code -eq 0 ]]; then
if echo "$response" | grep -q "success"; then
local packages_count=$(echo "$response" | grep -o '"packagesProcessed":[0-9]*' | cut -d':' -f2)
success "Update sent successfully (${packages_count} packages processed)"
@@ -953,7 +1260,7 @@ EOF
error "Update failed: $response"
fi
else
error "Failed to send update"
error "Failed to send update (curl exit code: $curl_exit_code): $response"
fi
}
@@ -1401,9 +1708,21 @@ main() {
"diagnostics")
show_diagnostics
;;
"clear-locks"|"unlock")
check_root
info "Checking APT locks and hung processes..."
echo ""
handle_apt_locks true
echo ""
if [[ $? -eq 0 ]]; then
success "No APT locks or processes blocking package management"
else
info "APT processes are still running - they may be legitimate operations"
fi
;;
*)
echo "PatchMon Agent v$AGENT_VERSION - API Credential Based"
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|diagnostics}"
echo "Usage: $0 {configure|test|update|ping|config|check-version|check-agent-update|update-agent|update-crontab|clear-locks|diagnostics}"
echo ""
echo "Commands:"
echo " configure <API_ID> <API_KEY> [SERVER_URL] - Configure API credentials for this host"
@@ -1415,6 +1734,7 @@ main() {
echo " check-agent-update - Check for agent updates using timestamp comparison"
echo " update-agent - Update agent to latest version"
echo " update-crontab - Update crontab with current policy"
echo " clear-locks - Check and clear APT locks (interactive)"
echo " diagnostics - Show detailed system diagnostics"
echo ""
echo "Setup Process:"

496
agents/patchmon-docker-agent.sh Executable file
View File

@@ -0,0 +1,496 @@
#!/bin/bash
# PatchMon Docker Agent Script v1.2.9
# This script collects Docker container and image information and sends it to PatchMon
# Configuration
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
API_VERSION="v1"
AGENT_VERSION="1.2.9"
CONFIG_FILE="/etc/patchmon/agent.conf"
CREDENTIALS_FILE="/etc/patchmon/credentials"
LOG_FILE="/var/log/patchmon-docker-agent.log"
# Curl flags placeholder (replaced by server based on SSL settings)
CURL_FLAGS=""
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Logging function
log() {
if [[ -w "$(dirname "$LOG_FILE")" ]] 2>/dev/null; then
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" >> "$LOG_FILE" 2>/dev/null
fi
}
# Error handling
error() {
echo -e "${RED}ERROR: $1${NC}" >&2
log "ERROR: $1"
exit 1
}
# Info logging
info() {
echo -e "${BLUE} $1${NC}" >&2
log "INFO: $1"
}
# Success logging
success() {
echo -e "${GREEN}$1${NC}" >&2
log "SUCCESS: $1"
}
# Warning logging
warning() {
echo -e "${YELLOW}⚠️ $1${NC}" >&2
log "WARNING: $1"
}
# Check if Docker is installed and running
check_docker() {
if ! command -v docker &> /dev/null; then
error "Docker is not installed on this system"
fi
if ! docker info &> /dev/null; then
error "Docker daemon is not running or you don't have permission to access it. Try running with sudo."
fi
}
# Load credentials
load_credentials() {
if [[ ! -f "$CREDENTIALS_FILE" ]]; then
error "Credentials file not found at $CREDENTIALS_FILE. Please configure the main PatchMon agent first."
fi
source "$CREDENTIALS_FILE"
if [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
error "API credentials not found in $CREDENTIALS_FILE"
fi
# Use PATCHMON_URL from credentials if available, otherwise use default
if [[ -n "$PATCHMON_URL" ]]; then
PATCHMON_SERVER="$PATCHMON_URL"
fi
}
# Load configuration
load_config() {
if [[ -f "$CONFIG_FILE" ]]; then
source "$CONFIG_FILE"
if [[ -n "$SERVER_URL" ]]; then
PATCHMON_SERVER="$SERVER_URL"
fi
fi
}
# Collect Docker containers
collect_containers() {
info "Collecting Docker container information..."
local containers_json="["
local first=true
# Get all containers (running and stopped)
while IFS='|' read -r container_id name image status state created started ports; do
if [[ -z "$container_id" ]]; then
continue
fi
# Parse image name and tag
local image_name="${image%%:*}"
local image_tag="${image##*:}"
if [[ "$image_tag" == "$image_name" ]]; then
image_tag="latest"
fi
# Determine image source based on registry
local image_source="docker-hub"
if [[ "$image_name" == ghcr.io/* ]]; then
image_source="github"
elif [[ "$image_name" == registry.gitlab.com/* ]]; then
image_source="gitlab"
elif [[ "$image_name" == *"/"*"/"* ]]; then
image_source="private"
fi
# Get repository name (without registry prefix for common registries)
local image_repository="$image_name"
image_repository="${image_repository#ghcr.io/}"
image_repository="${image_repository#registry.gitlab.com/}"
# Get image ID
local full_image_id=$(docker inspect --format='{{.Image}}' "$container_id" 2>/dev/null || echo "unknown")
full_image_id="${full_image_id#sha256:}"
# Normalize status (extract just the status keyword)
local normalized_status="unknown"
if [[ "$status" =~ ^Up ]]; then
normalized_status="running"
elif [[ "$status" =~ ^Exited ]]; then
normalized_status="exited"
elif [[ "$status" =~ ^Created ]]; then
normalized_status="created"
elif [[ "$status" =~ ^Restarting ]]; then
normalized_status="restarting"
elif [[ "$status" =~ ^Paused ]]; then
normalized_status="paused"
elif [[ "$status" =~ ^Dead ]]; then
normalized_status="dead"
fi
# Parse ports
local ports_json="null"
if [[ -n "$ports" && "$ports" != "null" ]]; then
# Convert Docker port format to JSON
ports_json=$(echo "$ports" | jq -R -s -c 'split(",") | map(select(length > 0)) | map(split("->") | {(.[0]): .[1]}) | add // {}')
fi
# Convert dates to ISO 8601 format
# If date conversion fails, use null instead of invalid date string
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
local started_iso="null"
if [[ -n "$started" && "$started" != "null" ]]; then
started_iso=$(date -d "$started" -Iseconds 2>/dev/null || echo "null")
fi
# Add comma for JSON array
if [[ "$first" == false ]]; then
containers_json+=","
fi
first=false
# Build JSON object for this container
containers_json+="{\"container_id\":\"$container_id\","
containers_json+="\"name\":\"$name\","
containers_json+="\"image_name\":\"$image_name\","
containers_json+="\"image_tag\":\"$image_tag\","
containers_json+="\"image_repository\":\"$image_repository\","
containers_json+="\"image_source\":\"$image_source\","
containers_json+="\"image_id\":\"$full_image_id\","
containers_json+="\"status\":\"$normalized_status\","
containers_json+="\"state\":\"$state\","
containers_json+="\"ports\":$ports_json"
# Only add created_at if we have a valid date
if [[ "$created_iso" != "null" ]]; then
containers_json+=",\"created_at\":\"$created_iso\""
fi
# Only add started_at if we have a valid date
if [[ "$started_iso" != "null" ]]; then
containers_json+=",\"started_at\":\"$started_iso\""
fi
containers_json+="}"
done < <(docker ps -a --format '{{.ID}}|{{.Names}}|{{.Image}}|{{.Status}}|{{.State}}|{{.CreatedAt}}|{{.RunningFor}}|{{.Ports}}' 2>/dev/null)
containers_json+="]"
echo "$containers_json"
}
# Collect Docker images
collect_images() {
info "Collecting Docker image information..."
local images_json="["
local first=true
while IFS='|' read -r repository tag image_id created size digest; do
if [[ -z "$repository" || "$repository" == "<none>" ]]; then
continue
fi
# Clean up tag
if [[ -z "$tag" || "$tag" == "<none>" ]]; then
tag="latest"
fi
# Clean image ID
image_id="${image_id#sha256:}"
# Determine source
local source="docker-hub"
if [[ "$repository" == ghcr.io/* ]]; then
source="github"
elif [[ "$repository" == registry.gitlab.com/* ]]; then
source="gitlab"
elif [[ "$repository" == *"/"*"/"* ]]; then
source="private"
fi
# Convert size to bytes (approximate)
local size_bytes=0
if [[ "$size" =~ ([0-9.]+)([KMGT]?B) ]]; then
local num="${BASH_REMATCH[1]}"
local unit="${BASH_REMATCH[2]}"
case "$unit" in
KB) size_bytes=$(echo "$num * 1024" | bc | cut -d. -f1) ;;
MB) size_bytes=$(echo "$num * 1024 * 1024" | bc | cut -d. -f1) ;;
GB) size_bytes=$(echo "$num * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
TB) size_bytes=$(echo "$num * 1024 * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
B) size_bytes=$(echo "$num" | cut -d. -f1) ;;
esac
fi
# Convert created date to ISO 8601
# If date conversion fails, use null instead of invalid date string
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
# Add comma for JSON array
if [[ "$first" == false ]]; then
images_json+=","
fi
first=false
# Build JSON object for this image
images_json+="{\"repository\":\"$repository\","
images_json+="\"tag\":\"$tag\","
images_json+="\"image_id\":\"$image_id\","
images_json+="\"source\":\"$source\","
images_json+="\"size_bytes\":$size_bytes"
# Only add created_at if we have a valid date
if [[ "$created_iso" != "null" ]]; then
images_json+=",\"created_at\":\"$created_iso\""
fi
# Only add digest if present
if [[ -n "$digest" && "$digest" != "<none>" ]]; then
images_json+=",\"digest\":\"$digest\""
fi
images_json+="}"
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.CreatedAt}}|{{.Size}}|{{.Digest}}' --no-trunc 2>/dev/null)
images_json+="]"
echo "$images_json"
}
# Check for image updates
check_image_updates() {
info "Checking for image updates..."
local updates_json="["
local first=true
local update_count=0
# Get all images
while IFS='|' read -r repository tag image_id digest; do
if [[ -z "$repository" || "$repository" == "<none>" || "$tag" == "<none>" ]]; then
continue
fi
# Skip checking 'latest' tag as it's always considered current by name
# We'll still check digest though
local full_image="${repository}:${tag}"
# Try to get remote digest from registry
# Use docker manifest inspect to avoid pulling the image
local remote_digest=$(docker manifest inspect "$full_image" 2>/dev/null | jq -r '.config.digest // .manifests[0].digest // empty' 2>/dev/null)
if [[ -z "$remote_digest" ]]; then
# If manifest inspect fails, try buildx imagetools inspect (works for more registries)
remote_digest=$(docker buildx imagetools inspect "$full_image" 2>/dev/null | grep -oP 'Digest:\s*\K\S+' | head -1)
fi
# Clean up digests for comparison
local local_digest="${digest#sha256:}"
remote_digest="${remote_digest#sha256:}"
# If we got a remote digest and it's different from local, there's an update
if [[ -n "$remote_digest" && -n "$local_digest" && "$remote_digest" != "$local_digest" ]]; then
if [[ "$first" == false ]]; then
updates_json+=","
fi
first=false
# Build update JSON object
updates_json+="{\"repository\":\"$repository\","
updates_json+="\"current_tag\":\"$tag\","
updates_json+="\"available_tag\":\"$tag\","
updates_json+="\"current_digest\":\"$local_digest\","
updates_json+="\"available_digest\":\"$remote_digest\","
updates_json+="\"image_id\":\"${image_id#sha256:}\""
updates_json+="}"
((update_count++))
fi
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.Digest}}' --no-trunc 2>/dev/null)
updates_json+="]"
info "Found $update_count image update(s) available"
echo "$updates_json"
}
# Send Docker data to server
send_docker_data() {
load_credentials
info "Collecting Docker data..."
local containers=$(collect_containers)
local images=$(collect_images)
local updates=$(check_image_updates)
# Count collected items
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
info "Found $container_count containers, $image_count images, and $update_count update(s) available"
# Build payload
local payload="{\"apiId\":\"$API_ID\",\"apiKey\":\"$API_KEY\",\"containers\":$containers,\"images\":$images,\"updates\":$updates}"
# Send to server
info "Sending Docker data to PatchMon server..."
local response=$(curl $CURL_FLAGS -s -w "\n%{http_code}" -X POST \
-H "Content-Type: application/json" \
-d "$payload" \
"${PATCHMON_SERVER}/api/${API_VERSION}/docker/collect" 2>&1)
local http_code=$(echo "$response" | tail -n1)
local response_body=$(echo "$response" | head -n-1)
if [[ "$http_code" == "200" ]]; then
success "Docker data sent successfully!"
log "Docker data sent: $container_count containers, $image_count images"
return 0
else
error "Failed to send Docker data. HTTP Status: $http_code\nResponse: $response_body"
fi
}
# Test Docker data collection without sending
test_collection() {
check_docker
info "Testing Docker data collection (dry run)..."
echo ""
local containers=$(collect_containers)
local images=$(collect_images)
local updates=$(check_image_updates)
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo -e "${GREEN}Docker Data Collection Results${NC}"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo -e "Containers found: ${GREEN}$container_count${NC}"
echo -e "Images found: ${GREEN}$image_count${NC}"
echo -e "Updates available: ${YELLOW}$update_count${NC}"
echo ""
if command -v jq &> /dev/null; then
echo "━━━ Containers ━━━"
echo "$containers" | jq -r '.[] | "\(.name) (\(.status)) - \(.image_name):\(.image_tag)"' | head -10
if [[ $container_count -gt 10 ]]; then
echo "... and $((container_count - 10)) more"
fi
echo ""
echo "━━━ Images ━━━"
echo "$images" | jq -r '.[] | "\(.repository):\(.tag) (\(.size_bytes / 1024 / 1024 | floor)MB)"' | head -10
if [[ $image_count -gt 10 ]]; then
echo "... and $((image_count - 10)) more"
fi
if [[ $update_count -gt 0 ]]; then
echo ""
echo "━━━ Available Updates ━━━"
echo "$updates" | jq -r '.[] | "\(.repository):\(.current_tag) → \(.available_tag)"'
fi
fi
echo ""
success "Test collection completed successfully!"
}
# Show help
show_help() {
cat << EOF
PatchMon Docker Agent v${AGENT_VERSION}
This agent collects Docker container and image information and sends it to PatchMon.
USAGE:
$0 <command>
COMMANDS:
collect Collect and send Docker data to PatchMon server
test Test Docker data collection without sending (dry run)
help Show this help message
REQUIREMENTS:
- Docker must be installed and running
- Main PatchMon agent must be configured first
- Credentials file must exist at $CREDENTIALS_FILE
EXAMPLES:
# Test collection (dry run)
sudo $0 test
# Collect and send Docker data
sudo $0 collect
SCHEDULING:
To run this agent automatically, add a cron job:
# Run every 5 minutes
*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
# Run every hour
0 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
FILES:
Config: $CONFIG_FILE
Credentials: $CREDENTIALS_FILE
Log: $LOG_FILE
EOF
}
# Main function
main() {
case "$1" in
"collect")
check_docker
load_config
send_docker_data
;;
"test")
check_docker
load_config
test_collection
;;
"help"|"--help"|"-h"|"")
show_help
;;
*)
error "Unknown command: $1\n\nRun '$0 help' for usage information."
;;
esac
}
# Run main function
main "$@"

View File

@@ -31,3 +31,8 @@ JWT_SECRET=your-secure-random-secret-key-change-this-in-production
JWT_EXPIRES_IN=1h
JWT_REFRESH_EXPIRES_IN=7d
SESSION_INACTIVITY_TIMEOUT_MINUTES=30
# TFA Configuration
TFA_REMEMBER_ME_EXPIRES_IN=30d
TFA_MAX_REMEMBER_SESSIONS=5
TFA_SUSPICIOUS_ACTIVITY_THRESHOLD=3

View File

@@ -1,6 +1,6 @@
{
"name": "patchmon-backend",
"version": "1.2.7",
"version": "1.2.9",
"description": "Backend API for Linux Patch Monitoring System",
"license": "AGPL-3.0",
"main": "src/server.js",
@@ -14,14 +14,18 @@
"db:studio": "prisma studio"
},
"dependencies": {
"@bull-board/api": "^6.13.0",
"@bull-board/express": "^6.13.0",
"@prisma/client": "^6.1.0",
"bcryptjs": "^2.4.3",
"bullmq": "^5.61.0",
"cors": "^2.8.5",
"dotenv": "^16.4.7",
"express": "^4.21.2",
"express-rate-limit": "^7.5.0",
"express-validator": "^7.2.0",
"helmet": "^8.0.0",
"ioredis": "^5.8.1",
"jsonwebtoken": "^9.0.2",
"moment": "^2.30.1",
"qrcode": "^1.5.4",

View File

@@ -0,0 +1,119 @@
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
DO $$
DECLARE
old_migration_exists boolean := false;
table_exists boolean := false;
failed_migration_exists boolean := false;
new_migration_exists boolean := false;
BEGIN
-- Check if the old migration name exists
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = 'add_user_sessions'
) INTO old_migration_exists;
-- Check if user_sessions table exists
SELECT EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'user_sessions'
) INTO table_exists;
-- Check if there's a failed migration attempt
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL
) INTO failed_migration_exists;
-- Check if the new migration already exists and is successful
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NOT NULL
) INTO new_migration_exists;
-- FIRST: Handle failed migration (must be marked as rolled back)
IF failed_migration_exists THEN
RAISE NOTICE 'Found failed migration attempt - marking as rolled back';
-- Mark the failed migration as rolled back (required by Prisma)
UPDATE _prisma_migrations
SET rolled_back_at = NOW()
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL;
RAISE NOTICE 'Failed migration marked as rolled back';
-- If table exists, it means the migration partially succeeded
IF table_exists THEN
RAISE NOTICE 'Table exists - migration was partially successful, will be handled by next migration';
ELSE
RAISE NOTICE 'Table does not exist - migration will retry after rollback';
END IF;
END IF;
-- SECOND: Handle old migration name (1.2.7 -> 1.2.8+ upgrade)
IF old_migration_exists AND table_exists THEN
RAISE NOTICE 'Found 1.2.7 migration "add_user_sessions" - updating to timestamped version';
-- Update the old migration name to the new timestamped version
UPDATE _prisma_migrations
SET migration_name = '20251005000000_add_user_sessions'
WHERE migration_name = 'add_user_sessions';
RAISE NOTICE 'Migration name updated: add_user_sessions -> 20251005000000_add_user_sessions';
END IF;
-- THIRD: Handle case where table exists but no migration record exists (1.2.7 upgrade scenario)
IF table_exists AND NOT old_migration_exists AND NOT new_migration_exists THEN
RAISE NOTICE 'Table exists but no migration record found - creating migration record for 1.2.7 upgrade';
-- Insert a successful migration record for the existing table
INSERT INTO _prisma_migrations (
id,
checksum,
finished_at,
migration_name,
logs,
rolled_back_at,
started_at,
applied_steps_count
) VALUES (
gen_random_uuid()::text,
'', -- Empty checksum since we're reconciling
NOW(),
'20251005000000_add_user_sessions',
'Reconciled from 1.2.7 - table already exists',
NULL,
NOW(),
1
);
RAISE NOTICE 'Migration record created for existing table';
END IF;
-- FOURTH: If we have a rolled back migration and table exists, mark it as applied
IF failed_migration_exists AND table_exists THEN
RAISE NOTICE 'Migration was rolled back but table exists - marking as successfully applied';
-- Update the rolled back migration to be successful
UPDATE _prisma_migrations
SET
finished_at = NOW(),
rolled_back_at = NULL,
logs = 'Reconciled from failed state - table already exists'
WHERE migration_name = '20251005000000_add_user_sessions';
RAISE NOTICE 'Migration marked as successfully applied';
END IF;
-- If no issues found
IF NOT old_migration_exists AND NOT failed_migration_exists AND NOT (table_exists AND NOT new_migration_exists) THEN
RAISE NOTICE 'No migration reconciliation needed';
END IF;
END $$;

View File

@@ -0,0 +1,96 @@
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
DO $$
DECLARE
old_migration_exists boolean := false;
table_exists boolean := false;
failed_migration_exists boolean := false;
BEGIN
-- Check if the old migration name exists
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = 'add_user_sessions'
) INTO old_migration_exists;
-- Check if user_sessions table exists
SELECT EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'user_sessions'
) INTO table_exists;
-- Check if there's a failed migration attempt
SELECT EXISTS (
SELECT 1 FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL
) INTO failed_migration_exists;
-- Scenario 1: Old migration exists, table exists, no failed migration
-- This means 1.2.7 was installed and we need to update the migration name
IF old_migration_exists AND table_exists AND NOT failed_migration_exists THEN
RAISE NOTICE 'Found 1.2.7 migration "add_user_sessions" - updating to timestamped version';
-- Update the old migration name to the new timestamped version
UPDATE _prisma_migrations
SET migration_name = '20251005000000_add_user_sessions'
WHERE migration_name = 'add_user_sessions';
RAISE NOTICE 'Migration name updated: add_user_sessions -> 20251005000000_add_user_sessions';
END IF;
-- Scenario 2: Failed migration exists (upgrade attempt gone wrong)
IF failed_migration_exists THEN
RAISE NOTICE 'Found failed migration attempt - cleaning up';
-- If table exists, it means the migration partially succeeded
IF table_exists THEN
RAISE NOTICE 'Table exists - marking migration as applied';
-- Delete the failed migration record
DELETE FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL;
-- Insert a successful migration record
INSERT INTO _prisma_migrations (
id,
checksum,
finished_at,
migration_name,
logs,
rolled_back_at,
started_at,
applied_steps_count
) VALUES (
gen_random_uuid()::text,
'', -- Empty checksum since we're reconciling
NOW(),
'20251005000000_add_user_sessions',
NULL,
NULL,
NOW(),
1
);
RAISE NOTICE 'Migration marked as successfully applied';
ELSE
RAISE NOTICE 'Table does not exist - removing failed migration to allow retry';
-- Just delete the failed migration to allow it to retry
DELETE FROM _prisma_migrations
WHERE migration_name = '20251005000000_add_user_sessions'
AND finished_at IS NULL;
RAISE NOTICE 'Failed migration removed - will retry on next migration run';
END IF;
END IF;
-- Scenario 3: Everything is clean (fresh install or already reconciled)
IF NOT old_migration_exists AND NOT failed_migration_exists THEN
RAISE NOTICE 'No migration reconciliation needed';
END IF;
END $$;

View File

@@ -0,0 +1,6 @@
-- Add TFA remember me fields to user_sessions table
ALTER TABLE "user_sessions" ADD COLUMN "tfa_remember_me" BOOLEAN NOT NULL DEFAULT false;
ALTER TABLE "user_sessions" ADD COLUMN "tfa_bypass_until" TIMESTAMP(3);
-- Create index for TFA bypass until field for efficient querying
CREATE INDEX "user_sessions_tfa_bypass_until_idx" ON "user_sessions"("tfa_bypass_until");

View File

@@ -0,0 +1,7 @@
-- Add security fields to user_sessions table for production-ready remember me
ALTER TABLE "user_sessions" ADD COLUMN "device_fingerprint" TEXT;
ALTER TABLE "user_sessions" ADD COLUMN "login_count" INTEGER NOT NULL DEFAULT 1;
ALTER TABLE "user_sessions" ADD COLUMN "last_login_ip" TEXT;
-- Create index for device fingerprint for efficient querying
CREATE INDEX "user_sessions_device_fingerprint_idx" ON "user_sessions"("device_fingerprint");

View File

@@ -0,0 +1,3 @@
-- AlterTable
ALTER TABLE "update_history" ADD COLUMN "total_packages" INTEGER;

View File

@@ -0,0 +1,4 @@
-- AlterTable
ALTER TABLE "update_history" ADD COLUMN "payload_size_kb" DOUBLE PRECISION;
ALTER TABLE "update_history" ADD COLUMN "execution_time" DOUBLE PRECISION;

View File

@@ -0,0 +1,30 @@
-- Add indexes to host_packages table for performance optimization
-- These indexes will dramatically speed up queries filtering by host_id, package_id, needs_update, and is_security_update
-- Index for queries filtering by host_id (very common - used when viewing packages for a specific host)
CREATE INDEX IF NOT EXISTS "host_packages_host_id_idx" ON "host_packages"("host_id");
-- Index for queries filtering by package_id (used when finding hosts for a specific package)
CREATE INDEX IF NOT EXISTS "host_packages_package_id_idx" ON "host_packages"("package_id");
-- Index for queries filtering by needs_update (used when finding outdated packages)
CREATE INDEX IF NOT EXISTS "host_packages_needs_update_idx" ON "host_packages"("needs_update");
-- Index for queries filtering by is_security_update (used when finding security updates)
CREATE INDEX IF NOT EXISTS "host_packages_is_security_update_idx" ON "host_packages"("is_security_update");
-- Composite index for the most common query pattern: host_id + needs_update
-- This is optimal for "show me outdated packages for this host"
CREATE INDEX IF NOT EXISTS "host_packages_host_id_needs_update_idx" ON "host_packages"("host_id", "needs_update");
-- Composite index for host_id + needs_update + is_security_update
-- This is optimal for "show me security updates for this host"
CREATE INDEX IF NOT EXISTS "host_packages_host_id_needs_update_security_idx" ON "host_packages"("host_id", "needs_update", "is_security_update");
-- Index for queries filtering by package_id + needs_update
-- This is optimal for "show me hosts where this package needs updates"
CREATE INDEX IF NOT EXISTS "host_packages_package_id_needs_update_idx" ON "host_packages"("package_id", "needs_update");
-- Index on last_checked for cleanup/maintenance queries
CREATE INDEX IF NOT EXISTS "host_packages_last_checked_idx" ON "host_packages"("last_checked");

View File

@@ -0,0 +1,94 @@
-- CreateTable
CREATE TABLE "docker_images" (
"id" TEXT NOT NULL,
"repository" TEXT NOT NULL,
"tag" TEXT NOT NULL DEFAULT 'latest',
"image_id" TEXT NOT NULL,
"digest" TEXT,
"size_bytes" BIGINT,
"source" TEXT NOT NULL DEFAULT 'docker-hub',
"created_at" TIMESTAMP(3) NOT NULL,
"last_pulled" TIMESTAMP(3),
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL,
CONSTRAINT "docker_images_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "docker_containers" (
"id" TEXT NOT NULL,
"host_id" TEXT NOT NULL,
"container_id" TEXT NOT NULL,
"name" TEXT NOT NULL,
"image_id" TEXT,
"image_name" TEXT NOT NULL,
"image_tag" TEXT NOT NULL DEFAULT 'latest',
"status" TEXT NOT NULL,
"state" TEXT,
"ports" JSONB,
"created_at" TIMESTAMP(3) NOT NULL,
"started_at" TIMESTAMP(3),
"updated_at" TIMESTAMP(3) NOT NULL,
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "docker_containers_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "docker_image_updates" (
"id" TEXT NOT NULL,
"image_id" TEXT NOT NULL,
"current_tag" TEXT NOT NULL,
"available_tag" TEXT NOT NULL,
"is_security_update" BOOLEAN NOT NULL DEFAULT false,
"severity" TEXT,
"changelog_url" TEXT,
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP(3) NOT NULL,
CONSTRAINT "docker_image_updates_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "docker_images_repository_idx" ON "docker_images"("repository");
-- CreateIndex
CREATE INDEX "docker_images_source_idx" ON "docker_images"("source");
-- CreateIndex
CREATE INDEX "docker_images_repository_tag_idx" ON "docker_images"("repository", "tag");
-- CreateIndex
CREATE UNIQUE INDEX "docker_images_repository_tag_image_id_key" ON "docker_images"("repository", "tag", "image_id");
-- CreateIndex
CREATE INDEX "docker_containers_host_id_idx" ON "docker_containers"("host_id");
-- CreateIndex
CREATE INDEX "docker_containers_image_id_idx" ON "docker_containers"("image_id");
-- CreateIndex
CREATE INDEX "docker_containers_status_idx" ON "docker_containers"("status");
-- CreateIndex
CREATE INDEX "docker_containers_name_idx" ON "docker_containers"("name");
-- CreateIndex
CREATE UNIQUE INDEX "docker_containers_host_id_container_id_key" ON "docker_containers"("host_id", "container_id");
-- CreateIndex
CREATE INDEX "docker_image_updates_image_id_idx" ON "docker_image_updates"("image_id");
-- CreateIndex
CREATE INDEX "docker_image_updates_is_security_update_idx" ON "docker_image_updates"("is_security_update");
-- CreateIndex
CREATE UNIQUE INDEX "docker_image_updates_image_id_available_tag_key" ON "docker_image_updates"("image_id", "available_tag");
-- AddForeignKey
ALTER TABLE "docker_containers" ADD CONSTRAINT "docker_containers_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "docker_image_updates" ADD CONSTRAINT "docker_image_updates_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -44,6 +44,14 @@ model host_packages {
packages packages @relation(fields: [package_id], references: [id], onDelete: Cascade)
@@unique([host_id, package_id])
@@index([host_id])
@@index([package_id])
@@index([needs_update])
@@index([is_security_update])
@@index([host_id, needs_update])
@@index([host_id, needs_update, is_security_update])
@@index([package_id, needs_update])
@@index([last_checked])
}
model host_repositories {
@@ -108,6 +116,9 @@ model packages {
created_at DateTime @default(now())
updated_at DateTime
host_packages host_packages[]
@@index([name])
@@index([category])
}
model repositories {
@@ -170,14 +181,17 @@ model settings {
}
model update_history {
id String @id
host_id String
packages_count Int
security_count Int
timestamp DateTime @default(now())
status String @default("success")
error_message String?
hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade)
id String @id
host_id String
packages_count Int
security_count Int
total_packages Int?
payload_size_kb Float?
execution_time Float?
timestamp DateTime @default(now())
status String @default("success")
error_message String?
hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade)
}
model users {
@@ -207,15 +221,22 @@ model user_sessions {
access_token_hash String?
ip_address String?
user_agent String?
device_fingerprint String?
last_activity DateTime @default(now())
expires_at DateTime
created_at DateTime @default(now())
is_revoked Boolean @default(false)
tfa_remember_me Boolean @default(false)
tfa_bypass_until DateTime?
login_count Int @default(1)
last_login_ip String?
users users @relation(fields: [user_id], references: [id], onDelete: Cascade)
@@index([user_id])
@@index([refresh_token])
@@index([expires_at])
@@index([tfa_bypass_until])
@@index([device_fingerprint])
}
model auto_enrollment_tokens {
@@ -241,3 +262,65 @@ model auto_enrollment_tokens {
@@index([token_key])
@@index([is_active])
}
model docker_containers {
id String @id
host_id String
container_id String
name String
image_id String?
image_name String
image_tag String @default("latest")
status String
state String?
ports Json?
created_at DateTime
started_at DateTime?
updated_at DateTime
last_checked DateTime @default(now())
docker_images docker_images? @relation(fields: [image_id], references: [id], onDelete: SetNull)
@@unique([host_id, container_id])
@@index([host_id])
@@index([image_id])
@@index([status])
@@index([name])
}
model docker_images {
id String @id
repository String
tag String @default("latest")
image_id String
digest String?
size_bytes BigInt?
source String @default("docker-hub")
created_at DateTime
last_pulled DateTime?
last_checked DateTime @default(now())
updated_at DateTime
docker_containers docker_containers[]
docker_image_updates docker_image_updates[]
@@unique([repository, tag, image_id])
@@index([repository])
@@index([source])
@@index([repository, tag])
}
model docker_image_updates {
id String @id
image_id String
current_tag String
available_tag String
is_security_update Boolean @default(false)
severity String?
changelog_url String?
created_at DateTime @default(now())
updated_at DateTime
docker_images docker_images @relation(fields: [image_id], references: [id], onDelete: Cascade)
@@unique([image_id, available_tag])
@@index([image_id])
@@index([is_security_update])
}

View File

@@ -3,6 +3,7 @@ const { PrismaClient } = require("@prisma/client");
const {
validate_session,
update_session_activity,
is_tfa_bypassed,
} = require("../utils/session_manager");
const prisma = new PrismaClient();
@@ -46,6 +47,9 @@ const authenticateToken = async (req, res, next) => {
// Update session activity timestamp
await update_session_activity(decoded.sessionId);
// Check if TFA is bypassed for this session
const tfa_bypassed = await is_tfa_bypassed(decoded.sessionId);
// Update last login (only on successful authentication)
await prisma.users.update({
where: { id: validation.user.id },
@@ -57,6 +61,7 @@ const authenticateToken = async (req, res, next) => {
req.user = validation.user;
req.session_id = decoded.sessionId;
req.tfa_bypassed = tfa_bypassed;
next();
} catch (error) {
if (error.name === "JsonWebTokenError") {
@@ -114,8 +119,33 @@ const optionalAuth = async (req, _res, next) => {
}
};
// Middleware to check if TFA is required for sensitive operations
const requireTfaIfEnabled = async (req, res, next) => {
try {
// Check if user has TFA enabled
const user = await prisma.users.findUnique({
where: { id: req.user.id },
select: { tfa_enabled: true },
});
// If TFA is enabled and not bypassed, require TFA verification
if (user?.tfa_enabled && !req.tfa_bypassed) {
return res.status(403).json({
error: "Two-factor authentication required for this operation",
requires_tfa: true,
});
}
next();
} catch (error) {
console.error("TFA requirement check error:", error);
return res.status(500).json({ error: "Authentication check failed" });
}
};
module.exports = {
authenticateToken,
requireAdmin,
optionalAuth,
requireTfaIfEnabled,
};

View File

@@ -17,12 +17,65 @@ const {
refresh_access_token,
revoke_session,
revoke_all_user_sessions,
get_user_sessions,
} = require("../utils/session_manager");
const router = express.Router();
const prisma = new PrismaClient();
/**
* Parse user agent string to extract browser and OS info
*/
function parse_user_agent(user_agent) {
if (!user_agent)
return { browser: "Unknown", os: "Unknown", device: "Unknown" };
const ua = user_agent.toLowerCase();
// Browser detection
let browser = "Unknown";
if (ua.includes("chrome") && !ua.includes("edg")) browser = "Chrome";
else if (ua.includes("firefox")) browser = "Firefox";
else if (ua.includes("safari") && !ua.includes("chrome")) browser = "Safari";
else if (ua.includes("edg")) browser = "Edge";
else if (ua.includes("opera")) browser = "Opera";
// OS detection
let os = "Unknown";
if (ua.includes("windows")) os = "Windows";
else if (ua.includes("macintosh") || ua.includes("mac os")) os = "macOS";
else if (ua.includes("linux")) os = "Linux";
else if (ua.includes("android")) os = "Android";
else if (ua.includes("iphone") || ua.includes("ipad")) os = "iOS";
// Device type
let device = "Desktop";
if (ua.includes("mobile")) device = "Mobile";
else if (ua.includes("tablet") || ua.includes("ipad")) device = "Tablet";
return { browser, os, device };
}
/**
* Get basic location info from IP (simplified - in production you'd use a service)
*/
function get_location_from_ip(ip) {
if (!ip) return { country: "Unknown", city: "Unknown" };
// For localhost/private IPs
if (
ip === "127.0.0.1" ||
ip === "::1" ||
ip.startsWith("192.168.") ||
ip.startsWith("10.")
) {
return { country: "Local", city: "Local Network" };
}
// In a real implementation, you'd use a service like MaxMind GeoIP2
// For now, return unknown for external IPs
return { country: "Unknown", city: "Unknown" };
}
// Check if any admin users exist (for first-time setup)
router.get("/check-admin-users", async (_req, res) => {
try {
@@ -765,6 +818,8 @@ router.post(
id: user.id,
username: user.username,
email: user.email,
first_name: user.first_name,
last_name: user.last_name,
role: user.role,
is_active: user.is_active,
last_login: user.last_login,
@@ -788,6 +843,10 @@ router.post(
.isLength({ min: 6, max: 6 })
.withMessage("Token must be 6 digits"),
body("token").isNumeric().withMessage("Token must contain only numbers"),
body("remember_me")
.optional()
.isBoolean()
.withMessage("Remember me must be a boolean"),
],
async (req, res) => {
try {
@@ -796,7 +855,7 @@ router.post(
return res.status(400).json({ errors: errors.array() });
}
const { username, token } = req.body;
const { username, token, remember_me = false } = req.body;
// Find user
const user = await prisma.users.findFirst({
@@ -865,13 +924,20 @@ router.post(
// Create session with access and refresh tokens
const ip_address = req.ip || req.connection.remoteAddress;
const user_agent = req.get("user-agent");
const session = await create_session(user.id, ip_address, user_agent);
const session = await create_session(
user.id,
ip_address,
user_agent,
remember_me,
req,
);
res.json({
message: "Login successful",
token: session.access_token,
refresh_token: session.refresh_token,
expires_at: session.expires_at,
tfa_bypass_until: session.tfa_bypass_until,
user: {
id: user.id,
username: user.username,
@@ -1109,10 +1175,43 @@ router.post(
// Get user's active sessions
router.get("/sessions", authenticateToken, async (req, res) => {
try {
const sessions = await get_user_sessions(req.user.id);
const sessions = await prisma.user_sessions.findMany({
where: {
user_id: req.user.id,
is_revoked: false,
expires_at: { gt: new Date() },
},
select: {
id: true,
ip_address: true,
user_agent: true,
device_fingerprint: true,
last_activity: true,
created_at: true,
expires_at: true,
tfa_remember_me: true,
tfa_bypass_until: true,
login_count: true,
last_login_ip: true,
},
orderBy: { last_activity: "desc" },
});
// Enhance sessions with device info
const enhanced_sessions = sessions.map((session) => {
const is_current_session = session.id === req.session_id;
const device_info = parse_user_agent(session.user_agent);
return {
...session,
is_current_session,
device_info,
location_info: get_location_from_ip(session.ip_address),
};
});
res.json({
sessions: sessions,
sessions: enhanced_sessions,
});
} catch (error) {
console.error("Get sessions error:", error);
@@ -1134,6 +1233,11 @@ router.delete("/sessions/:session_id", authenticateToken, async (req, res) => {
return res.status(404).json({ error: "Session not found" });
}
// Don't allow revoking the current session
if (session_id === req.session_id) {
return res.status(400).json({ error: "Cannot revoke current session" });
}
await revoke_session(session_id);
res.json({
@@ -1145,4 +1249,25 @@ router.delete("/sessions/:session_id", authenticateToken, async (req, res) => {
}
});
// Revoke all sessions except current one
router.delete("/sessions", authenticateToken, async (req, res) => {
try {
// Revoke all sessions except the current one
await prisma.user_sessions.updateMany({
where: {
user_id: req.user.id,
id: { not: req.session_id },
},
data: { is_revoked: true },
});
res.json({
message: "All other sessions revoked successfully",
});
} catch (error) {
console.error("Revoke all sessions error:", error);
res.status(500).json({ error: "Failed to revoke sessions" });
}
});
module.exports = router;

View File

@@ -0,0 +1,362 @@
const express = require("express");
const { queueManager, QUEUE_NAMES } = require("../services/automation");
const { authenticateToken } = require("../middleware/auth");
const router = express.Router();
// Get all queue statistics
router.get("/stats", authenticateToken, async (req, res) => {
try {
const stats = await queueManager.getAllQueueStats();
res.json({
success: true,
data: stats,
});
} catch (error) {
console.error("Error fetching queue stats:", error);
res.status(500).json({
success: false,
error: "Failed to fetch queue statistics",
});
}
});
// Get specific queue statistics
router.get("/stats/:queueName", authenticateToken, async (req, res) => {
try {
const { queueName } = req.params;
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
return res.status(400).json({
success: false,
error: "Invalid queue name",
});
}
const stats = await queueManager.getQueueStats(queueName);
res.json({
success: true,
data: stats,
});
} catch (error) {
console.error("Error fetching queue stats:", error);
res.status(500).json({
success: false,
error: "Failed to fetch queue statistics",
});
}
});
// Get recent jobs for a queue
router.get("/jobs/:queueName", authenticateToken, async (req, res) => {
try {
const { queueName } = req.params;
const { limit = 10 } = req.query;
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
return res.status(400).json({
success: false,
error: "Invalid queue name",
});
}
const jobs = await queueManager.getRecentJobs(queueName, parseInt(limit));
// Format jobs for frontend
const formattedJobs = jobs.map((job) => ({
id: job.id,
name: job.name,
status: job.finishedOn
? job.failedReason
? "failed"
: "completed"
: "active",
progress: job.progress,
data: job.data,
returnvalue: job.returnvalue,
failedReason: job.failedReason,
processedOn: job.processedOn,
finishedOn: job.finishedOn,
createdAt: new Date(job.timestamp),
attemptsMade: job.attemptsMade,
delay: job.delay,
}));
res.json({
success: true,
data: formattedJobs,
});
} catch (error) {
console.error("Error fetching recent jobs:", error);
res.status(500).json({
success: false,
error: "Failed to fetch recent jobs",
});
}
});
// Trigger manual GitHub update check
router.post("/trigger/github-update", authenticateToken, async (req, res) => {
try {
const job = await queueManager.triggerGitHubUpdateCheck();
res.json({
success: true,
data: {
jobId: job.id,
message: "GitHub update check triggered successfully",
},
});
} catch (error) {
console.error("Error triggering GitHub update check:", error);
res.status(500).json({
success: false,
error: "Failed to trigger GitHub update check",
});
}
});
// Trigger manual session cleanup
router.post("/trigger/session-cleanup", authenticateToken, async (req, res) => {
try {
const job = await queueManager.triggerSessionCleanup();
res.json({
success: true,
data: {
jobId: job.id,
message: "Session cleanup triggered successfully",
},
});
} catch (error) {
console.error("Error triggering session cleanup:", error);
res.status(500).json({
success: false,
error: "Failed to trigger session cleanup",
});
}
});
// Trigger manual echo hello
router.post("/trigger/echo-hello", authenticateToken, async (req, res) => {
try {
const { message } = req.body;
const job = await queueManager.triggerEchoHello(message);
res.json({
success: true,
data: {
jobId: job.id,
message: "Echo hello triggered successfully",
},
});
} catch (error) {
console.error("Error triggering echo hello:", error);
res.status(500).json({
success: false,
error: "Failed to trigger echo hello",
});
}
});
// Trigger manual orphaned repo cleanup
router.post(
"/trigger/orphaned-repo-cleanup",
authenticateToken,
async (req, res) => {
try {
const job = await queueManager.triggerOrphanedRepoCleanup();
res.json({
success: true,
data: {
jobId: job.id,
message: "Orphaned repository cleanup triggered successfully",
},
});
} catch (error) {
console.error("Error triggering orphaned repository cleanup:", error);
res.status(500).json({
success: false,
error: "Failed to trigger orphaned repository cleanup",
});
}
},
);
// Get queue health status
router.get("/health", authenticateToken, async (req, res) => {
try {
const stats = await queueManager.getAllQueueStats();
const totalJobs = Object.values(stats).reduce((sum, queueStats) => {
return sum + queueStats.waiting + queueStats.active + queueStats.failed;
}, 0);
const health = {
status: "healthy",
totalJobs,
queues: Object.keys(stats).length,
timestamp: new Date().toISOString(),
};
// Check for unhealthy conditions
if (totalJobs > 1000) {
health.status = "warning";
health.message = "High number of queued jobs";
}
const failedJobs = Object.values(stats).reduce((sum, queueStats) => {
return sum + queueStats.failed;
}, 0);
if (failedJobs > 10) {
health.status = "error";
health.message = "High number of failed jobs";
}
res.json({
success: true,
data: health,
});
} catch (error) {
console.error("Error checking queue health:", error);
res.status(500).json({
success: false,
error: "Failed to check queue health",
});
}
});
// Get automation overview (for dashboard cards)
router.get("/overview", authenticateToken, async (req, res) => {
try {
const stats = await queueManager.getAllQueueStats();
// Get recent jobs for each queue to show last run times
const recentJobs = await Promise.all([
queueManager.getRecentJobs(QUEUE_NAMES.GITHUB_UPDATE_CHECK, 1),
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
queueManager.getRecentJobs(QUEUE_NAMES.ECHO_HELLO, 1),
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
]);
// Calculate overview metrics
const overview = {
scheduledTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
stats[QUEUE_NAMES.SYSTEM_MAINTENANCE].delayed +
stats[QUEUE_NAMES.ECHO_HELLO].delayed +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed,
runningTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
stats[QUEUE_NAMES.SYSTEM_MAINTENANCE].active +
stats[QUEUE_NAMES.ECHO_HELLO].active +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active,
failedTasks:
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
stats[QUEUE_NAMES.SYSTEM_MAINTENANCE].failed +
stats[QUEUE_NAMES.ECHO_HELLO].failed +
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed,
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
return (
sum +
queueStats.completed +
queueStats.failed +
queueStats.active +
queueStats.waiting +
queueStats.delayed
);
}, 0),
// Automation details with last run times
automations: [
{
name: "GitHub Update Check",
queue: QUEUE_NAMES.GITHUB_UPDATE_CHECK,
description: "Checks for new PatchMon releases",
schedule: "Daily at midnight",
lastRun: recentJobs[0][0]?.finishedOn
? new Date(recentJobs[0][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[0][0]?.finishedOn || 0,
status: recentJobs[0][0]?.failedReason
? "Failed"
: recentJobs[0][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
},
{
name: "Session Cleanup",
queue: QUEUE_NAMES.SESSION_CLEANUP,
description: "Cleans up expired user sessions",
schedule: "Every hour",
lastRun: recentJobs[1][0]?.finishedOn
? new Date(recentJobs[1][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[1][0]?.finishedOn || 0,
status: recentJobs[1][0]?.failedReason
? "Failed"
: recentJobs[1][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.SESSION_CLEANUP],
},
{
name: "Echo Hello",
queue: QUEUE_NAMES.ECHO_HELLO,
description: "Simple test automation task",
schedule: "Manual only",
lastRun: recentJobs[2][0]?.finishedOn
? new Date(recentJobs[2][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[2][0]?.finishedOn || 0,
status: recentJobs[2][0]?.failedReason
? "Failed"
: recentJobs[2][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.ECHO_HELLO],
},
{
name: "Orphaned Repo Cleanup",
queue: QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
description: "Removes repositories with no associated hosts",
schedule: "Daily at 2 AM",
lastRun: recentJobs[3][0]?.finishedOn
? new Date(recentJobs[3][0].finishedOn).toLocaleString()
: "Never",
lastRunTimestamp: recentJobs[3][0]?.finishedOn || 0,
status: recentJobs[3][0]?.failedReason
? "Failed"
: recentJobs[3][0]
? "Success"
: "Never run",
stats: stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
},
].sort((a, b) => {
// Sort by last run timestamp (most recent first)
// If both have never run (timestamp 0), maintain original order
if (a.lastRunTimestamp === 0 && b.lastRunTimestamp === 0) return 0;
if (a.lastRunTimestamp === 0) return 1; // Never run goes to bottom
if (b.lastRunTimestamp === 0) return -1; // Never run goes to bottom
return b.lastRunTimestamp - a.lastRunTimestamp; // Most recent first
}),
};
res.json({
success: true,
data: overview,
});
} catch (error) {
console.error("Error fetching automation overview:", error);
res.status(500).json({
success: false,
error: "Failed to fetch automation overview",
});
}
});
module.exports = router;

View File

@@ -130,15 +130,20 @@ async function createDefaultDashboardPreferences(userId, userRole = "user") {
requiredPermission: "can_view_packages",
order: 13,
},
{
cardId: "packageTrends",
requiredPermission: "can_view_packages",
order: 14,
},
{
cardId: "recentUsers",
requiredPermission: "can_view_users",
order: 14,
order: 15,
},
{
cardId: "quickStats",
requiredPermission: "can_view_dashboard",
order: 15,
order: 16,
},
];
@@ -341,19 +346,26 @@ router.get("/defaults", authenticateToken, async (_req, res) => {
enabled: true,
order: 13,
},
{
cardId: "packageTrends",
title: "Package Trends",
icon: "TrendingUp",
enabled: true,
order: 14,
},
{
cardId: "recentUsers",
title: "Recent Users Logged in",
icon: "Users",
enabled: true,
order: 14,
order: 15,
},
{
cardId: "quickStats",
title: "Quick Stats",
icon: "TrendingUp",
enabled: true,
order: 15,
order: 16,
},
];

View File

@@ -145,9 +145,13 @@ router.get(
];
// Package update priority distribution
const regularUpdates = Math.max(
0,
totalOutdatedPackages - securityUpdates,
);
const packageUpdateDistribution = [
{ name: "Security", count: securityUpdates },
{ name: "Regular", count: totalOutdatedPackages - securityUpdates },
{ name: "Regular", count: regularUpdates },
];
res.json({
@@ -343,32 +347,41 @@ router.get(
try {
const { hostId } = req.params;
const host = await prisma.hosts.findUnique({
where: { id: hostId },
include: {
host_groups: {
select: {
id: true,
name: true,
color: true,
const limit = parseInt(req.query.limit, 10) || 10;
const offset = parseInt(req.query.offset, 10) || 0;
const [host, totalHistoryCount] = await Promise.all([
prisma.hosts.findUnique({
where: { id: hostId },
include: {
host_groups: {
select: {
id: true,
name: true,
color: true,
},
},
host_packages: {
include: {
packages: true,
},
orderBy: {
needs_update: "desc",
},
},
update_history: {
orderBy: {
timestamp: "desc",
},
take: limit,
skip: offset,
},
},
host_packages: {
include: {
packages: true,
},
orderBy: {
needs_update: "desc",
},
},
update_history: {
orderBy: {
timestamp: "desc",
},
take: 10,
},
},
});
}),
prisma.update_history.count({
where: { host_id: hostId },
}),
]);
if (!host) {
return res.status(404).json({ error: "Host not found" });
@@ -384,6 +397,12 @@ router.get(
(hp) => hp.needs_update && hp.is_security_update,
).length,
},
pagination: {
total: totalHistoryCount,
limit,
offset,
hasMore: offset + limit < totalHistoryCount,
},
};
res.json(hostWithStats);
@@ -456,4 +475,132 @@ router.get(
},
);
// Get package trends over time
router.get(
"/package-trends",
authenticateToken,
requireViewHosts,
async (req, res) => {
try {
const { days = 30, hostId } = req.query;
const daysInt = parseInt(days, 10);
// Calculate date range
const endDate = new Date();
const startDate = new Date();
startDate.setDate(endDate.getDate() - daysInt);
// Build where clause
const whereClause = {
timestamp: {
gte: startDate,
lte: endDate,
},
};
// Add host filter if specified
if (hostId && hostId !== "all" && hostId !== "undefined") {
whereClause.host_id = hostId;
}
// Get all update history records in the date range
const trendsData = await prisma.update_history.findMany({
where: whereClause,
select: {
timestamp: true,
packages_count: true,
security_count: true,
total_packages: true,
},
orderBy: {
timestamp: "asc",
},
});
// Process data to show actual values (no averaging)
const processedData = trendsData
.filter((record) => record.total_packages !== null) // Only include records with valid data
.map((record) => {
const date = new Date(record.timestamp);
let timeKey;
if (daysInt <= 1) {
// For hourly view, use exact timestamp
timeKey = date.toISOString().substring(0, 16); // YYYY-MM-DDTHH:MM
} else {
// For daily view, group by day
timeKey = date.toISOString().split("T")[0]; // YYYY-MM-DD
}
return {
timeKey,
total_packages: record.total_packages,
packages_count: record.packages_count || 0,
security_count: record.security_count || 0,
};
})
.sort((a, b) => a.timeKey.localeCompare(b.timeKey)); // Sort by time
// Get hosts list for dropdown (always fetch for dropdown functionality)
const hostsList = await prisma.hosts.findMany({
select: {
id: true,
friendly_name: true,
hostname: true,
},
orderBy: {
friendly_name: "asc",
},
});
// Format data for chart
const chartData = {
labels: [],
datasets: [
{
label: "Total Packages",
data: [],
borderColor: "#3B82F6", // Blue
backgroundColor: "rgba(59, 130, 246, 0.1)",
tension: 0.4,
hidden: true, // Hidden by default
},
{
label: "Outdated Packages",
data: [],
borderColor: "#F59E0B", // Orange
backgroundColor: "rgba(245, 158, 11, 0.1)",
tension: 0.4,
},
{
label: "Security Packages",
data: [],
borderColor: "#EF4444", // Red
backgroundColor: "rgba(239, 68, 68, 0.1)",
tension: 0.4,
},
],
};
// Process aggregated data
processedData.forEach((item) => {
chartData.labels.push(item.timeKey);
chartData.datasets[0].data.push(item.total_packages);
chartData.datasets[1].data.push(item.packages_count);
chartData.datasets[2].data.push(item.security_count);
});
res.json({
chartData,
hosts: hostsList,
period: daysInt,
hostId: hostId || "all",
});
} catch (error) {
console.error("Error fetching package trends:", error);
res.status(500).json({ error: "Failed to fetch package trends" });
}
},
);
module.exports = router;

View File

@@ -0,0 +1,779 @@
const express = require("express");
const { authenticateToken } = require("../middleware/auth");
const { PrismaClient } = require("@prisma/client");
const { v4: uuidv4 } = require("uuid");
const prisma = new PrismaClient();
const router = express.Router();
// Helper function to convert BigInt fields to strings for JSON serialization
const convertBigIntToString = (obj) => {
if (obj === null || obj === undefined) return obj;
if (typeof obj === "bigint") {
return obj.toString();
}
if (Array.isArray(obj)) {
return obj.map(convertBigIntToString);
}
if (typeof obj === "object") {
const converted = {};
for (const key in obj) {
converted[key] = convertBigIntToString(obj[key]);
}
return converted;
}
return obj;
};
// GET /api/v1/docker/dashboard - Get Docker dashboard statistics
router.get("/dashboard", authenticateToken, async (_req, res) => {
try {
// Get total hosts with Docker containers
const hostsWithDocker = await prisma.docker_containers.groupBy({
by: ["host_id"],
_count: true,
});
// Get total containers
const totalContainers = await prisma.docker_containers.count();
// Get running containers
const runningContainers = await prisma.docker_containers.count({
where: { status: "running" },
});
// Get total images
const totalImages = await prisma.docker_images.count();
// Get available updates
const availableUpdates = await prisma.docker_image_updates.count();
// Get containers by status
const containersByStatus = await prisma.docker_containers.groupBy({
by: ["status"],
_count: true,
});
// Get images by source
const imagesBySource = await prisma.docker_images.groupBy({
by: ["source"],
_count: true,
});
res.json({
stats: {
totalHostsWithDocker: hostsWithDocker.length,
totalContainers,
runningContainers,
totalImages,
availableUpdates,
},
containersByStatus,
imagesBySource,
});
} catch (error) {
console.error("Error fetching Docker dashboard:", error);
res.status(500).json({ error: "Failed to fetch Docker dashboard" });
}
});
// GET /api/v1/docker/containers - Get all containers with filters
router.get("/containers", authenticateToken, async (req, res) => {
try {
const { status, hostId, imageId, search, page = 1, limit = 50 } = req.query;
const where = {};
if (status) where.status = status;
if (hostId) where.host_id = hostId;
if (imageId) where.image_id = imageId;
if (search) {
where.OR = [
{ name: { contains: search, mode: "insensitive" } },
{ image_name: { contains: search, mode: "insensitive" } },
];
}
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
const take = parseInt(limit, 10);
const [containers, total] = await Promise.all([
prisma.docker_containers.findMany({
where,
include: {
docker_images: true,
},
orderBy: { updated_at: "desc" },
skip,
take,
}),
prisma.docker_containers.count({ where }),
]);
// Get host information for each container
const hostIds = [...new Set(containers.map((c) => c.host_id))];
const hosts = await prisma.hosts.findMany({
where: { id: { in: hostIds } },
select: { id: true, friendly_name: true, hostname: true, ip: true },
});
const hostsMap = hosts.reduce((acc, host) => {
acc[host.id] = host;
return acc;
}, {});
const containersWithHosts = containers.map((container) => ({
...container,
host: hostsMap[container.host_id],
}));
res.json(
convertBigIntToString({
containers: containersWithHosts,
pagination: {
page: parseInt(page, 10),
limit: parseInt(limit, 10),
total,
totalPages: Math.ceil(total / parseInt(limit, 10)),
},
}),
);
} catch (error) {
console.error("Error fetching containers:", error);
res.status(500).json({ error: "Failed to fetch containers" });
}
});
// GET /api/v1/docker/containers/:id - Get container detail
router.get("/containers/:id", authenticateToken, async (req, res) => {
try {
const { id } = req.params;
const container = await prisma.docker_containers.findUnique({
where: { id },
include: {
docker_images: {
include: {
docker_image_updates: true,
},
},
},
});
if (!container) {
return res.status(404).json({ error: "Container not found" });
}
// Get host information
const host = await prisma.hosts.findUnique({
where: { id: container.host_id },
select: {
id: true,
friendly_name: true,
hostname: true,
ip: true,
os_type: true,
os_version: true,
},
});
// Get other containers using the same image
const similarContainers = await prisma.docker_containers.findMany({
where: {
image_id: container.image_id,
id: { not: id },
},
take: 10,
});
res.json(
convertBigIntToString({
container: {
...container,
host,
},
similarContainers,
}),
);
} catch (error) {
console.error("Error fetching container detail:", error);
res.status(500).json({ error: "Failed to fetch container detail" });
}
});
// GET /api/v1/docker/images - Get all images with filters
router.get("/images", authenticateToken, async (req, res) => {
try {
const { source, search, page = 1, limit = 50 } = req.query;
const where = {};
if (source) where.source = source;
if (search) {
where.OR = [
{ repository: { contains: search, mode: "insensitive" } },
{ tag: { contains: search, mode: "insensitive" } },
];
}
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
const take = parseInt(limit, 10);
const [images, total] = await Promise.all([
prisma.docker_images.findMany({
where,
include: {
_count: {
select: {
docker_containers: true,
docker_image_updates: true,
},
},
docker_image_updates: {
take: 1,
orderBy: { created_at: "desc" },
},
},
orderBy: { updated_at: "desc" },
skip,
take,
}),
prisma.docker_images.count({ where }),
]);
// Get unique hosts using each image
const imagesWithHosts = await Promise.all(
images.map(async (image) => {
const containers = await prisma.docker_containers.findMany({
where: { image_id: image.id },
select: { host_id: true },
distinct: ["host_id"],
});
return {
...image,
hostsCount: containers.length,
hasUpdates: image._count.docker_image_updates > 0,
};
}),
);
res.json(
convertBigIntToString({
images: imagesWithHosts,
pagination: {
page: parseInt(page, 10),
limit: parseInt(limit, 10),
total,
totalPages: Math.ceil(total / parseInt(limit, 10)),
},
}),
);
} catch (error) {
console.error("Error fetching images:", error);
res.status(500).json({ error: "Failed to fetch images" });
}
});
// GET /api/v1/docker/images/:id - Get image detail
router.get("/images/:id", authenticateToken, async (req, res) => {
try {
const { id } = req.params;
const image = await prisma.docker_images.findUnique({
where: { id },
include: {
docker_containers: {
take: 100,
},
docker_image_updates: {
orderBy: { created_at: "desc" },
},
},
});
if (!image) {
return res.status(404).json({ error: "Image not found" });
}
// Get unique hosts using this image
const hostIds = [...new Set(image.docker_containers.map((c) => c.host_id))];
const hosts = await prisma.hosts.findMany({
where: { id: { in: hostIds } },
select: { id: true, friendly_name: true, hostname: true, ip: true },
});
res.json(
convertBigIntToString({
image,
hosts,
totalContainers: image.docker_containers.length,
totalHosts: hosts.length,
}),
);
} catch (error) {
console.error("Error fetching image detail:", error);
res.status(500).json({ error: "Failed to fetch image detail" });
}
});
// GET /api/v1/docker/hosts - Get all hosts with Docker
router.get("/hosts", authenticateToken, async (req, res) => {
try {
const { page = 1, limit = 50 } = req.query;
// Get hosts that have Docker containers
const hostsWithContainers = await prisma.docker_containers.groupBy({
by: ["host_id"],
_count: true,
});
const hostIds = hostsWithContainers.map((h) => h.host_id);
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
const take = parseInt(limit, 10);
const hosts = await prisma.hosts.findMany({
where: { id: { in: hostIds } },
skip,
take,
orderBy: { friendly_name: "asc" },
});
// Get container counts and statuses for each host
const hostsWithStats = await Promise.all(
hosts.map(async (host) => {
const [totalContainers, runningContainers, totalImages] =
await Promise.all([
prisma.docker_containers.count({
where: { host_id: host.id },
}),
prisma.docker_containers.count({
where: { host_id: host.id, status: "running" },
}),
prisma.docker_containers.findMany({
where: { host_id: host.id },
select: { image_id: true },
distinct: ["image_id"],
}),
]);
return {
...host,
dockerStats: {
totalContainers,
runningContainers,
totalImages: totalImages.length,
},
};
}),
);
res.json(
convertBigIntToString({
hosts: hostsWithStats,
pagination: {
page: parseInt(page, 10),
limit: parseInt(limit, 10),
total: hostIds.length,
totalPages: Math.ceil(hostIds.length / parseInt(limit, 10)),
},
}),
);
} catch (error) {
console.error("Error fetching Docker hosts:", error);
res.status(500).json({ error: "Failed to fetch Docker hosts" });
}
});
// GET /api/v1/docker/hosts/:id - Get host Docker detail
router.get("/hosts/:id", authenticateToken, async (req, res) => {
try {
const { id } = req.params;
const host = await prisma.hosts.findUnique({
where: { id },
});
if (!host) {
return res.status(404).json({ error: "Host not found" });
}
// Get containers on this host
const containers = await prisma.docker_containers.findMany({
where: { host_id: id },
include: {
docker_images: {
include: {
docker_image_updates: true,
},
},
},
orderBy: { name: "asc" },
});
// Get unique images on this host
const imageIds = [...new Set(containers.map((c) => c.image_id))].filter(
Boolean,
);
const images = await prisma.docker_images.findMany({
where: { id: { in: imageIds } },
});
// Get container statistics
const runningContainers = containers.filter(
(c) => c.status === "running",
).length;
const stoppedContainers = containers.filter(
(c) => c.status === "exited" || c.status === "stopped",
).length;
res.json(
convertBigIntToString({
host,
containers,
images,
stats: {
totalContainers: containers.length,
runningContainers,
stoppedContainers,
totalImages: images.length,
},
}),
);
} catch (error) {
console.error("Error fetching host Docker detail:", error);
res.status(500).json({ error: "Failed to fetch host Docker detail" });
}
});
// GET /api/v1/docker/updates - Get available updates
router.get("/updates", authenticateToken, async (req, res) => {
try {
const { page = 1, limit = 50, securityOnly = false } = req.query;
const where = {};
if (securityOnly === "true") {
where.is_security_update = true;
}
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
const take = parseInt(limit, 10);
const [updates, total] = await Promise.all([
prisma.docker_image_updates.findMany({
where,
include: {
docker_images: {
include: {
docker_containers: {
select: {
id: true,
host_id: true,
name: true,
},
},
},
},
},
orderBy: [{ is_security_update: "desc" }, { created_at: "desc" }],
skip,
take,
}),
prisma.docker_image_updates.count({ where }),
]);
// Get affected hosts for each update
const updatesWithHosts = await Promise.all(
updates.map(async (update) => {
const hostIds = [
...new Set(
update.docker_images.docker_containers.map((c) => c.host_id),
),
];
const hosts = await prisma.hosts.findMany({
where: { id: { in: hostIds } },
select: { id: true, friendly_name: true, hostname: true },
});
return {
...update,
affectedHosts: hosts,
affectedContainersCount:
update.docker_images.docker_containers.length,
};
}),
);
res.json(
convertBigIntToString({
updates: updatesWithHosts,
pagination: {
page: parseInt(page, 10),
limit: parseInt(limit, 10),
total,
totalPages: Math.ceil(total / parseInt(limit, 10)),
},
}),
);
} catch (error) {
console.error("Error fetching Docker updates:", error);
res.status(500).json({ error: "Failed to fetch Docker updates" });
}
});
// POST /api/v1/docker/collect - Collect Docker data from agent
router.post("/collect", async (req, res) => {
try {
const { apiId, apiKey, containers, images, updates } = req.body;
// Validate API credentials
const host = await prisma.hosts.findFirst({
where: { api_id: apiId, api_key: apiKey },
});
if (!host) {
return res.status(401).json({ error: "Invalid API credentials" });
}
const now = new Date();
// Helper function to validate and parse dates
const parseDate = (dateString) => {
if (!dateString) return now;
const date = new Date(dateString);
return Number.isNaN(date.getTime()) ? now : date;
};
// Process containers
if (containers && Array.isArray(containers)) {
for (const containerData of containers) {
const containerId = uuidv4();
// Find or create image
let imageId = null;
if (containerData.image_repository && containerData.image_tag) {
const image = await prisma.docker_images.upsert({
where: {
repository_tag_image_id: {
repository: containerData.image_repository,
tag: containerData.image_tag,
image_id: containerData.image_id || "unknown",
},
},
update: {
last_checked: now,
updated_at: now,
},
create: {
id: uuidv4(),
repository: containerData.image_repository,
tag: containerData.image_tag,
image_id: containerData.image_id || "unknown",
source: containerData.image_source || "docker-hub",
created_at: parseDate(containerData.created_at),
updated_at: now,
},
});
imageId = image.id;
}
// Upsert container
await prisma.docker_containers.upsert({
where: {
host_id_container_id: {
host_id: host.id,
container_id: containerData.container_id,
},
},
update: {
name: containerData.name,
image_id: imageId,
image_name: containerData.image_name,
image_tag: containerData.image_tag || "latest",
status: containerData.status,
state: containerData.state,
ports: containerData.ports || null,
started_at: containerData.started_at
? parseDate(containerData.started_at)
: null,
updated_at: now,
last_checked: now,
},
create: {
id: containerId,
host_id: host.id,
container_id: containerData.container_id,
name: containerData.name,
image_id: imageId,
image_name: containerData.image_name,
image_tag: containerData.image_tag || "latest",
status: containerData.status,
state: containerData.state,
ports: containerData.ports || null,
created_at: parseDate(containerData.created_at),
started_at: containerData.started_at
? parseDate(containerData.started_at)
: null,
updated_at: now,
},
});
}
}
// Process standalone images
if (images && Array.isArray(images)) {
for (const imageData of images) {
await prisma.docker_images.upsert({
where: {
repository_tag_image_id: {
repository: imageData.repository,
tag: imageData.tag,
image_id: imageData.image_id,
},
},
update: {
size_bytes: imageData.size_bytes
? BigInt(imageData.size_bytes)
: null,
last_checked: now,
updated_at: now,
},
create: {
id: uuidv4(),
repository: imageData.repository,
tag: imageData.tag,
image_id: imageData.image_id,
digest: imageData.digest,
size_bytes: imageData.size_bytes
? BigInt(imageData.size_bytes)
: null,
source: imageData.source || "docker-hub",
created_at: parseDate(imageData.created_at),
updated_at: now,
},
});
}
}
// Process updates
// First, get all images for this host to clean up old updates
const hostImageIds = await prisma.docker_containers
.findMany({
where: { host_id: host.id },
select: { image_id: true },
distinct: ["image_id"],
})
.then((results) => results.map((r) => r.image_id).filter(Boolean));
// Delete old updates for images on this host that are no longer reported
if (hostImageIds.length > 0) {
const reportedImageIds = [];
// Process new updates
if (updates && Array.isArray(updates)) {
for (const updateData of updates) {
// Find the image by repository, tag, and image_id
const image = await prisma.docker_images.findFirst({
where: {
repository: updateData.repository,
tag: updateData.current_tag,
image_id: updateData.image_id,
},
});
if (image) {
reportedImageIds.push(image.id);
// Store digest info in changelog_url field as JSON for now
const digestInfo = JSON.stringify({
method: "digest_comparison",
current_digest: updateData.current_digest,
available_digest: updateData.available_digest,
});
// Upsert the update record
await prisma.docker_image_updates.upsert({
where: {
image_id_available_tag: {
image_id: image.id,
available_tag: updateData.available_tag,
},
},
update: {
updated_at: now,
changelog_url: digestInfo,
severity: "digest_changed",
},
create: {
id: uuidv4(),
image_id: image.id,
current_tag: updateData.current_tag,
available_tag: updateData.available_tag,
severity: "digest_changed",
changelog_url: digestInfo,
updated_at: now,
},
});
}
}
}
// Remove stale updates for images on this host that are no longer in the updates list
const imageIdsToCleanup = hostImageIds.filter(
(id) => !reportedImageIds.includes(id),
);
if (imageIdsToCleanup.length > 0) {
await prisma.docker_image_updates.deleteMany({
where: {
image_id: { in: imageIdsToCleanup },
},
});
}
}
res.json({ success: true, message: "Docker data collected successfully" });
} catch (error) {
console.error("Error collecting Docker data:", error);
console.error("Error stack:", error.stack);
console.error("Request body:", JSON.stringify(req.body, null, 2));
res.status(500).json({
error: "Failed to collect Docker data",
message: error.message,
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
});
}
});
// GET /api/v1/docker/agent - Serve the Docker agent installation script
router.get("/agent", async (_req, res) => {
try {
const fs = require("node:fs");
const path = require("node:path");
const agentPath = path.join(
__dirname,
"../../..",
"agents",
"patchmon-docker-agent.sh",
);
// Check if file exists
if (!fs.existsSync(agentPath)) {
return res.status(404).json({ error: "Docker agent script not found" });
}
// Read and serve the file
const agentScript = fs.readFileSync(agentPath, "utf8");
res.setHeader("Content-Type", "text/x-shellscript");
res.setHeader(
"Content-Disposition",
'inline; filename="patchmon-docker-agent.sh"',
);
res.send(agentScript);
} catch (error) {
console.error("Error serving Docker agent:", error);
res.status(500).json({ error: "Failed to serve Docker agent script" });
}
});
module.exports = router;

View File

@@ -0,0 +1,236 @@
const express = require("express");
const { createPrismaClient } = require("../config/database");
const bcrypt = require("bcryptjs");
const router = express.Router();
const prisma = createPrismaClient();
// Middleware to authenticate API key
const authenticateApiKey = async (req, res, next) => {
try {
const authHeader = req.headers.authorization;
if (!authHeader || !authHeader.startsWith("Basic ")) {
return res
.status(401)
.json({ error: "Missing or invalid authorization header" });
}
// Decode base64 credentials
const base64Credentials = authHeader.split(" ")[1];
const credentials = Buffer.from(base64Credentials, "base64").toString(
"ascii",
);
const [apiKey, apiSecret] = credentials.split(":");
if (!apiKey || !apiSecret) {
return res.status(401).json({ error: "Invalid credentials format" });
}
// Find the token in database
const token = await prisma.auto_enrollment_tokens.findUnique({
where: { token_key: apiKey },
include: {
users: {
select: {
id: true,
username: true,
role: true,
},
},
},
});
if (!token) {
console.log(`API key not found: ${apiKey}`);
return res.status(401).json({ error: "Invalid API key" });
}
// Check if token is active
if (!token.is_active) {
return res.status(401).json({ error: "API key is disabled" });
}
// Check if token has expired
if (token.expires_at && new Date(token.expires_at) < new Date()) {
return res.status(401).json({ error: "API key has expired" });
}
// Check if token is for gethomepage integration
if (token.metadata?.integration_type !== "gethomepage") {
return res.status(401).json({ error: "Invalid API key type" });
}
// Verify the secret
const isValidSecret = await bcrypt.compare(apiSecret, token.token_secret);
if (!isValidSecret) {
return res.status(401).json({ error: "Invalid API secret" });
}
// Check IP restrictions if any
if (token.allowed_ip_ranges && token.allowed_ip_ranges.length > 0) {
const clientIp = req.ip || req.connection.remoteAddress;
const forwardedFor = req.headers["x-forwarded-for"];
const realIp = req.headers["x-real-ip"];
// Get the actual client IP (considering proxies)
const actualClientIp = forwardedFor
? forwardedFor.split(",")[0].trim()
: realIp || clientIp;
const isAllowedIp = token.allowed_ip_ranges.some((range) => {
// Simple IP range check (can be enhanced for CIDR support)
return actualClientIp.startsWith(range) || actualClientIp === range;
});
if (!isAllowedIp) {
console.log(
`IP validation failed. Client IP: ${actualClientIp}, Allowed ranges: ${token.allowed_ip_ranges.join(", ")}`,
);
return res.status(403).json({ error: "IP address not allowed" });
}
}
// Update last used timestamp
await prisma.auto_enrollment_tokens.update({
where: { id: token.id },
data: { last_used_at: new Date() },
});
// Attach token info to request
req.apiToken = token;
next();
} catch (error) {
console.error("API key authentication error:", error);
res.status(500).json({ error: "Authentication failed" });
}
};
// Get homepage widget statistics
router.get("/stats", authenticateApiKey, async (_req, res) => {
try {
// Get total hosts count
const totalHosts = await prisma.hosts.count({
where: { status: "active" },
});
// Get total outdated packages count
const totalOutdatedPackages = await prisma.host_packages.count({
where: { needs_update: true },
});
// Get total repositories count
const totalRepos = await prisma.repositories.count({
where: { is_active: true },
});
// Get hosts that need updates (have outdated packages)
const hostsNeedingUpdates = await prisma.hosts.count({
where: {
status: "active",
host_packages: {
some: {
needs_update: true,
},
},
},
});
// Get security updates count
const securityUpdates = await prisma.host_packages.count({
where: {
needs_update: true,
is_security_update: true,
},
});
// Get hosts with security updates
const hostsWithSecurityUpdates = await prisma.hosts.count({
where: {
status: "active",
host_packages: {
some: {
needs_update: true,
is_security_update: true,
},
},
},
});
// Get up-to-date hosts count
const upToDateHosts = totalHosts - hostsNeedingUpdates;
// Get recent update activity (last 24 hours)
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
const recentUpdates = await prisma.update_history.count({
where: {
timestamp: {
gte: oneDayAgo,
},
status: "success",
},
});
// Get OS distribution
const osDistribution = await prisma.hosts.groupBy({
by: ["os_type"],
where: { status: "active" },
_count: {
id: true,
},
orderBy: {
_count: {
id: "desc",
},
},
});
// Format OS distribution data
const osDistributionFormatted = osDistribution.map((os) => ({
name: os.os_type,
count: os._count.id,
}));
// Extract top 3 OS types for flat display in widgets
const top_os_1 = osDistributionFormatted[0] || { name: "None", count: 0 };
const top_os_2 = osDistributionFormatted[1] || { name: "None", count: 0 };
const top_os_3 = osDistributionFormatted[2] || { name: "None", count: 0 };
// Prepare response data
const stats = {
total_hosts: totalHosts,
total_outdated_packages: totalOutdatedPackages,
total_repos: totalRepos,
hosts_needing_updates: hostsNeedingUpdates,
up_to_date_hosts: upToDateHosts,
security_updates: securityUpdates,
hosts_with_security_updates: hostsWithSecurityUpdates,
recent_updates_24h: recentUpdates,
os_distribution: osDistributionFormatted,
// Flattened OS data for easy widget display
top_os_1_name: top_os_1.name,
top_os_1_count: top_os_1.count,
top_os_2_name: top_os_2.name,
top_os_2_count: top_os_2.count,
top_os_3_name: top_os_3.name,
top_os_3_count: top_os_3.count,
last_updated: new Date().toISOString(),
};
res.json(stats);
} catch (error) {
console.error("Error fetching homepage stats:", error);
res.status(500).json({ error: "Failed to fetch statistics" });
}
});
// Health check endpoint for the API
router.get("/health", authenticateApiKey, async (req, res) => {
res.json({
status: "ok",
timestamp: new Date().toISOString(),
api_key: req.apiToken.token_name,
});
});
module.exports = router;

View File

@@ -325,9 +325,13 @@ router.post(
return res.status(400).json({ errors: errors.array() });
}
const { packages, repositories } = req.body;
const { packages, repositories, executionTime } = req.body;
const host = req.hostRecord;
// Calculate payload size in KB
const payloadSizeBytes = JSON.stringify(req.body).length;
const payloadSizeKb = payloadSizeBytes / 1024;
// Update host last update timestamp and system info if provided
const updateData = {
last_update: new Date(),
@@ -383,152 +387,193 @@ router.post(
(pkg) => pkg.isSecurityUpdate,
).length;
const updatesCount = packages.filter((pkg) => pkg.needsUpdate).length;
const totalPackages = packages.length;
// Process everything in a single transaction to avoid race conditions
await prisma.$transaction(async (tx) => {
// Update host data
await tx.hosts.update({
where: { id: host.id },
data: updateData,
});
// Clear existing host packages to avoid duplicates
await tx.host_packages.deleteMany({
where: { host_id: host.id },
});
// Process each package
for (const packageData of packages) {
// Find or create package
let pkg = await tx.packages.findUnique({
where: { name: packageData.name },
await prisma.$transaction(
async (tx) => {
// Update host data
await tx.hosts.update({
where: { id: host.id },
data: updateData,
});
if (!pkg) {
pkg = await tx.packages.create({
data: {
// Clear existing host packages to avoid duplicates
await tx.host_packages.deleteMany({
where: { host_id: host.id },
});
// Process packages in batches using createMany/updateMany
const packagesToCreate = [];
const packagesToUpdate = [];
const hostPackagesToUpsert = [];
// First pass: identify what needs to be created/updated
const existingPackages = await tx.packages.findMany({
where: {
name: { in: packages.map((p) => p.name) },
},
});
const existingPackageMap = new Map(
existingPackages.map((p) => [p.name, p]),
);
for (const packageData of packages) {
const existingPkg = existingPackageMap.get(packageData.name);
if (!existingPkg) {
// Package doesn't exist, create it
const newPkg = {
id: uuidv4(),
name: packageData.name,
description: packageData.description || null,
category: packageData.category || null,
latest_version:
packageData.availableVersion || packageData.currentVersion,
created_at: new Date(),
updated_at: new Date(),
};
packagesToCreate.push(newPkg);
existingPackageMap.set(packageData.name, newPkg);
} else if (
packageData.availableVersion &&
packageData.availableVersion !== existingPkg.latest_version
) {
// Package exists but needs version update
packagesToUpdate.push({
id: existingPkg.id,
latest_version: packageData.availableVersion,
});
}
}
// Batch create new packages
if (packagesToCreate.length > 0) {
await tx.packages.createMany({
data: packagesToCreate,
skipDuplicates: true,
});
}
// Batch update existing packages
for (const update of packagesToUpdate) {
await tx.packages.update({
where: { id: update.id },
data: {
latest_version: update.latest_version,
updated_at: new Date(),
},
});
} else {
// Update package latest version if newer
if (
packageData.availableVersion &&
packageData.availableVersion !== pkg.latest_version
) {
await tx.packages.update({
where: { id: pkg.id },
data: {
latest_version: packageData.availableVersion,
updated_at: new Date(),
},
});
}
}
// Create host package relationship
// Use upsert to handle potential duplicates gracefully
await tx.host_packages.upsert({
where: {
host_id_package_id: {
host_id: host.id,
package_id: pkg.id,
},
},
update: {
current_version: packageData.currentVersion,
available_version: packageData.availableVersion || null,
needs_update: packageData.needsUpdate,
is_security_update: packageData.isSecurityUpdate || false,
last_checked: new Date(),
},
create: {
id: uuidv4(),
host_id: host.id,
package_id: pkg.id,
current_version: packageData.currentVersion,
available_version: packageData.availableVersion || null,
needs_update: packageData.needsUpdate,
is_security_update: packageData.isSecurityUpdate || false,
last_checked: new Date(),
},
});
}
// Now process host_packages
for (const packageData of packages) {
const pkg = existingPackageMap.get(packageData.name);
// Process repositories if provided
if (repositories && Array.isArray(repositories)) {
// Clear existing host repositories
await tx.host_repositories.deleteMany({
where: { host_id: host.id },
});
// Deduplicate repositories by URL+distribution+components to avoid constraint violations
const uniqueRepos = new Map();
for (const repoData of repositories) {
const key = `${repoData.url}|${repoData.distribution}|${repoData.components}`;
if (!uniqueRepos.has(key)) {
uniqueRepos.set(key, repoData);
}
}
// Process each unique repository
for (const repoData of uniqueRepos.values()) {
// Find or create repository
let repo = await tx.repositories.findFirst({
await tx.host_packages.upsert({
where: {
url: repoData.url,
distribution: repoData.distribution,
components: repoData.components,
},
});
if (!repo) {
repo = await tx.repositories.create({
data: {
id: uuidv4(),
name: repoData.name,
url: repoData.url,
distribution: repoData.distribution,
components: repoData.components,
repo_type: repoData.repoType,
is_active: true,
is_secure: repoData.isSecure || false,
description: `${repoData.repoType} repository for ${repoData.distribution}`,
updated_at: new Date(),
host_id_package_id: {
host_id: host.id,
package_id: pkg.id,
},
});
}
// Create host repository relationship
await tx.host_repositories.create({
data: {
},
update: {
current_version: packageData.currentVersion,
available_version: packageData.availableVersion || null,
needs_update: packageData.needsUpdate,
is_security_update: packageData.isSecurityUpdate || false,
last_checked: new Date(),
},
create: {
id: uuidv4(),
host_id: host.id,
repository_id: repo.id,
is_enabled: repoData.isEnabled !== false, // Default to enabled
package_id: pkg.id,
current_version: packageData.currentVersion,
available_version: packageData.availableVersion || null,
needs_update: packageData.needsUpdate,
is_security_update: packageData.isSecurityUpdate || false,
last_checked: new Date(),
},
});
}
}
// Create update history record
await tx.update_history.create({
data: {
id: uuidv4(),
host_id: host.id,
packages_count: updatesCount,
security_count: securityCount,
status: "success",
},
});
});
// Process repositories if provided
if (repositories && Array.isArray(repositories)) {
// Clear existing host repositories
await tx.host_repositories.deleteMany({
where: { host_id: host.id },
});
// Deduplicate repositories by URL+distribution+components to avoid constraint violations
const uniqueRepos = new Map();
for (const repoData of repositories) {
const key = `${repoData.url}|${repoData.distribution}|${repoData.components}`;
if (!uniqueRepos.has(key)) {
uniqueRepos.set(key, repoData);
}
}
// Process each unique repository
for (const repoData of uniqueRepos.values()) {
// Find or create repository
let repo = await tx.repositories.findFirst({
where: {
url: repoData.url,
distribution: repoData.distribution,
components: repoData.components,
},
});
if (!repo) {
repo = await tx.repositories.create({
data: {
id: uuidv4(),
name: repoData.name,
url: repoData.url,
distribution: repoData.distribution,
components: repoData.components,
repo_type: repoData.repoType,
is_active: true,
is_secure: repoData.isSecure || false,
description: `${repoData.repoType} repository for ${repoData.distribution}`,
updated_at: new Date(),
},
});
}
// Create host repository relationship
await tx.host_repositories.create({
data: {
id: uuidv4(),
host_id: host.id,
repository_id: repo.id,
is_enabled: repoData.isEnabled !== false, // Default to enabled
last_checked: new Date(),
},
});
}
}
// Create update history record
await tx.update_history.create({
data: {
id: uuidv4(),
host_id: host.id,
packages_count: updatesCount,
security_count: securityCount,
total_packages: totalPackages,
payload_size_kb: payloadSizeKb,
execution_time: executionTime ? parseFloat(executionTime) : null,
status: "success",
},
});
},
{
maxWait: 30000, // Wait up to 30s for a transaction slot
timeout: 60000, // Allow transaction to run for up to 60s
},
);
// Agent auto-update is now handled client-side by the agent itself

View File

@@ -14,6 +14,7 @@ router.get("/", async (req, res) => {
category = "",
needsUpdate = "",
isSecurityUpdate = "",
host = "",
} = req.query;
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
@@ -33,8 +34,27 @@ router.get("/", async (req, res) => {
: {},
// Category filter
category ? { category: { equals: category } } : {},
// Update status filters
needsUpdate
// Host filter - only return packages installed on the specified host
// Combined with update status filters if both are present
host
? {
host_packages: {
some: {
host_id: host,
// If needsUpdate or isSecurityUpdate filters are present, apply them here
...(needsUpdate
? { needs_update: needsUpdate === "true" }
: {}),
...(isSecurityUpdate
? { is_security_update: isSecurityUpdate === "true" }
: {}),
},
},
}
: {},
// Update status filters (only applied if no host filter)
// If host filter is present, these are already applied above
!host && needsUpdate
? {
host_packages: {
some: {
@@ -43,7 +63,7 @@ router.get("/", async (req, res) => {
},
}
: {},
isSecurityUpdate
!host && isSecurityUpdate
? {
host_packages: {
some: {
@@ -84,24 +104,32 @@ router.get("/", async (req, res) => {
// Get additional stats for each package
const packagesWithStats = await Promise.all(
packages.map(async (pkg) => {
// Build base where clause for this package
const baseWhere = { package_id: pkg.id };
// If host filter is specified, add host filter to all queries
const hostWhere = host ? { ...baseWhere, host_id: host } : baseWhere;
const [updatesCount, securityCount, packageHosts] = await Promise.all([
prisma.host_packages.count({
where: {
package_id: pkg.id,
...hostWhere,
needs_update: true,
},
}),
prisma.host_packages.count({
where: {
package_id: pkg.id,
...hostWhere,
needs_update: true,
is_security_update: true,
},
}),
prisma.host_packages.findMany({
where: {
package_id: pkg.id,
needs_update: true,
...hostWhere,
// If host filter is specified, include all packages for that host
// Otherwise, only include packages that need updates
...(host ? {} : { needs_update: true }),
},
select: {
hosts: {
@@ -112,6 +140,10 @@ router.get("/", async (req, res) => {
os_type: true,
},
},
current_version: true,
available_version: true,
needs_update: true,
is_security_update: true,
},
take: 10, // Limit to first 10 for performance
}),

View File

@@ -14,13 +14,13 @@ const router = express.Router();
function getCurrentVersion() {
try {
const packageJson = require("../../package.json");
return packageJson?.version || "1.2.7";
return packageJson?.version || "1.2.9";
} catch (packageError) {
console.warn(
"Could not read version from package.json, using fallback:",
packageError.message,
);
return "1.2.7";
return "1.2.9";
}
}
@@ -126,43 +126,61 @@ async function getLatestCommit(owner, repo) {
// Helper function to get commit count difference
async function getCommitDifference(owner, repo, currentVersion) {
try {
const currentVersionTag = `v${currentVersion}`;
// Compare main branch with the released version tag
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${currentVersionTag}...main`;
// Try both with and without 'v' prefix for compatibility
const versionTags = [
currentVersion, // Try without 'v' first (new format)
`v${currentVersion}`, // Try with 'v' prefix (old format)
];
const response = await fetch(apiUrl, {
method: "GET",
headers: {
Accept: "application/vnd.github.v3+json",
"User-Agent": `PatchMon-Server/${getCurrentVersion()}`,
},
});
for (const versionTag of versionTags) {
try {
// Compare main branch with the released version tag
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${versionTag}...main`;
if (!response.ok) {
const errorText = await response.text();
if (
errorText.includes("rate limit") ||
errorText.includes("API rate limit")
) {
throw new Error("GitHub API rate limit exceeded");
const response = await fetch(apiUrl, {
method: "GET",
headers: {
Accept: "application/vnd.github.v3+json",
"User-Agent": `PatchMon-Server/${getCurrentVersion()}`,
},
});
if (!response.ok) {
const errorText = await response.text();
if (
errorText.includes("rate limit") ||
errorText.includes("API rate limit")
) {
throw new Error("GitHub API rate limit exceeded");
}
// If 404, try next tag format
if (response.status === 404) {
continue;
}
throw new Error(
`GitHub API error: ${response.status} ${response.statusText}`,
);
}
throw new Error(
`GitHub API error: ${response.status} ${response.statusText}`,
);
}
const compareData = await response.json();
return {
commitsBehind: compareData.behind_by || 0, // How many commits main is behind release
commitsAhead: compareData.ahead_by || 0, // How many commits main is ahead of release
totalCommits: compareData.total_commits || 0,
branchInfo: "main branch vs release",
};
} catch (error) {
console.error("Error fetching commit difference:", error.message);
throw error;
const compareData = await response.json();
return {
commitsBehind: compareData.behind_by || 0, // How many commits main is behind release
commitsAhead: compareData.ahead_by || 0, // How many commits main is ahead of release
totalCommits: compareData.total_commits || 0,
branchInfo: "main branch vs release",
};
} catch (error) {
// If rate limit, throw immediately
if (error.message.includes("rate limit")) {
throw error;
}
}
}
// If all attempts failed, throw error
throw new Error(
`Could not find tag '${currentVersion}' or 'v${currentVersion}' in repository`,
);
}
// Helper function to compare version strings (semantic versioning)
@@ -274,11 +292,11 @@ router.get(
) {
console.log("GitHub API rate limited, providing fallback data");
latestRelease = {
tagName: "v1.2.7",
version: "1.2.7",
tagName: "v1.2.8",
version: "1.2.8",
publishedAt: "2025-10-02T17:12:53Z",
htmlUrl:
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.7",
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.8",
};
latestCommit = {
sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd",
@@ -296,10 +314,13 @@ router.get(
};
} else {
// Fall back to cached data for other errors
const githubRepoUrl = settings.githubRepoUrl || DEFAULT_GITHUB_REPO;
latestRelease = settings.latest_version
? {
version: settings.latest_version,
tagName: `v${settings.latest_version}`,
publishedAt: null, // Only use date from GitHub API, not cached data
htmlUrl: `${githubRepoUrl.replace(/\.git$/, "")}/releases/tag/v${settings.latest_version}`,
}
: null;
}

View File

@@ -62,9 +62,13 @@ const versionRoutes = require("./routes/versionRoutes");
const tfaRoutes = require("./routes/tfaRoutes");
const searchRoutes = require("./routes/searchRoutes");
const autoEnrollmentRoutes = require("./routes/autoEnrollmentRoutes");
const gethomepageRoutes = require("./routes/gethomepageRoutes");
const automationRoutes = require("./routes/automationRoutes");
const dockerRoutes = require("./routes/dockerRoutes");
const updateScheduler = require("./services/updateScheduler");
const { initSettings } = require("./services/settingsService");
const { cleanup_expired_sessions } = require("./utils/session_manager");
const { queueManager } = require("./services/automation");
// Initialize Prisma client with optimized connection pooling for multiple instances
const prisma = createPrismaClient();
@@ -422,6 +426,9 @@ app.use(
authLimiter,
autoEnrollmentRoutes,
);
app.use(`/api/${apiVersion}/gethomepage`, gethomepageRoutes);
app.use(`/api/${apiVersion}/automation`, automationRoutes);
app.use(`/api/${apiVersion}/docker`, dockerRoutes);
// Error handling middleware
app.use((err, _req, res, _next) => {
@@ -448,6 +455,7 @@ process.on("SIGINT", async () => {
clearInterval(app.locals.session_cleanup_interval);
}
updateScheduler.stop();
await queueManager.shutdown();
await disconnectPrisma(prisma);
process.exit(0);
});
@@ -460,6 +468,7 @@ process.on("SIGTERM", async () => {
clearInterval(app.locals.session_cleanup_interval);
}
updateScheduler.stop();
await queueManager.shutdown();
await disconnectPrisma(prisma);
process.exit(0);
});
@@ -674,11 +683,16 @@ async function getPermissionBasedPreferences(userRole) {
requiredPermission: "can_view_packages",
order: 13,
},
{ cardId: "recentUsers", requiredPermission: "can_view_users", order: 14 },
{
cardId: "packageTrends",
requiredPermission: "can_view_packages",
order: 14,
},
{ cardId: "recentUsers", requiredPermission: "can_view_users", order: 15 },
{
cardId: "quickStats",
requiredPermission: "can_view_dashboard",
order: 15,
order: 16,
},
];
@@ -723,6 +737,12 @@ async function startServer() {
// Initialize dashboard preferences for all users
await initializeDashboardPreferences();
// Initialize BullMQ queue manager
await queueManager.initialize();
// Schedule recurring jobs
await queueManager.scheduleAllJobs();
// Initial session cleanup
await cleanup_expired_sessions();

View File

@@ -0,0 +1,67 @@
/**
* Echo Hello Automation
* Simple test automation task
*/
class EchoHello {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "echo-hello";
}
/**
* Process echo hello job
*/
async process(job) {
const startTime = Date.now();
console.log("👋 Starting echo hello task...");
try {
// Simple echo task
const message = job.data.message || "Hello from BullMQ!";
const timestamp = new Date().toISOString();
// Simulate some work
await new Promise((resolve) => setTimeout(resolve, 100));
const executionTime = Date.now() - startTime;
console.log(`✅ Echo hello completed in ${executionTime}ms: ${message}`);
return {
success: true,
message,
timestamp,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ Echo hello failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Echo hello is manual only - no scheduling
*/
async schedule() {
console.log(" Echo hello is manual only - no scheduling needed");
return null;
}
/**
* Trigger manual echo hello
*/
async triggerManual(message = "Hello from BullMQ!") {
const job = await this.queueManager.queues[this.queueName].add(
"echo-hello-manual",
{ message },
{ priority: 1 },
);
console.log("✅ Manual echo hello triggered");
return job;
}
}
module.exports = EchoHello;

View File

@@ -0,0 +1,153 @@
const { prisma } = require("./shared/prisma");
const { compareVersions, checkPublicRepo } = require("./shared/utils");
/**
* GitHub Update Check Automation
* Checks for new releases on GitHub using HTTPS API
*/
class GitHubUpdateCheck {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "github-update-check";
}
/**
* Process GitHub update check job
*/
async process(job) {
const startTime = Date.now();
console.log("🔍 Starting GitHub update check...");
try {
// Get settings
const settings = await prisma.settings.findFirst();
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
let owner, repo;
// Parse GitHub repository URL (supports both HTTPS and SSH formats)
if (repoUrl.includes("git@github.com:")) {
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
if (match) {
[, owner, repo] = match;
}
} else if (repoUrl.includes("github.com/")) {
const match = repoUrl.match(
/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/,
);
if (match) {
[, owner, repo] = match;
}
}
if (!owner || !repo) {
throw new Error("Could not parse GitHub repository URL");
}
// Always use HTTPS GitHub API (simpler and more reliable)
const latestVersion = await checkPublicRepo(owner, repo);
if (!latestVersion) {
throw new Error("Could not determine latest version");
}
// Read version from package.json
let currentVersion = "1.2.7"; // fallback
try {
const packageJson = require("../../../package.json");
if (packageJson?.version) {
currentVersion = packageJson.version;
}
} catch (packageError) {
console.warn(
"Could not read version from package.json:",
packageError.message,
);
}
const isUpdateAvailable =
compareVersions(latestVersion, currentVersion) > 0;
// Update settings with check results
await prisma.settings.update({
where: { id: settings.id },
data: {
last_update_check: new Date(),
update_available: isUpdateAvailable,
latest_version: latestVersion,
},
});
const executionTime = Date.now() - startTime;
console.log(
`✅ GitHub update check completed in ${executionTime}ms - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`,
);
return {
success: true,
currentVersion,
latestVersion,
isUpdateAvailable,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ GitHub update check failed after ${executionTime}ms:`,
error.message,
);
// Update last check time even on error
try {
const settings = await prisma.settings.findFirst();
if (settings) {
await prisma.settings.update({
where: { id: settings.id },
data: {
last_update_check: new Date(),
update_available: false,
},
});
}
} catch (updateError) {
console.error(
"❌ Error updating last check time:",
updateError.message,
);
}
throw error;
}
}
/**
* Schedule recurring GitHub update check (daily at midnight)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"github-update-check",
{},
{
repeat: { cron: "0 0 * * *" }, // Daily at midnight
jobId: "github-update-check-recurring",
},
);
console.log("✅ GitHub update check scheduled");
return job;
}
/**
* Trigger manual GitHub update check
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"github-update-check-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual GitHub update check triggered");
return job;
}
}
module.exports = GitHubUpdateCheck;

View File

@@ -0,0 +1,283 @@
const { Queue, Worker } = require("bullmq");
const { redis, redisConnection } = require("./shared/redis");
const { prisma } = require("./shared/prisma");
// Import automation classes
const GitHubUpdateCheck = require("./githubUpdateCheck");
const SessionCleanup = require("./sessionCleanup");
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
const EchoHello = require("./echoHello");
// Queue names
const QUEUE_NAMES = {
GITHUB_UPDATE_CHECK: "github-update-check",
SESSION_CLEANUP: "session-cleanup",
SYSTEM_MAINTENANCE: "system-maintenance",
ECHO_HELLO: "echo-hello",
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
};
/**
* Main Queue Manager
* Manages all BullMQ queues and workers
*/
class QueueManager {
constructor() {
this.queues = {};
this.workers = {};
this.automations = {};
this.isInitialized = false;
}
/**
* Initialize all queues, workers, and automations
*/
async initialize() {
try {
console.log("✅ Redis connection successful");
// Initialize queues
await this.initializeQueues();
// Initialize automation classes
await this.initializeAutomations();
// Initialize workers
await this.initializeWorkers();
// Setup event listeners
this.setupEventListeners();
this.isInitialized = true;
console.log("✅ Queue manager initialized successfully");
} catch (error) {
console.error("❌ Failed to initialize queue manager:", error.message);
throw error;
}
}
/**
* Initialize all queues
*/
async initializeQueues() {
for (const [key, queueName] of Object.entries(QUEUE_NAMES)) {
this.queues[queueName] = new Queue(queueName, {
connection: redisConnection,
defaultJobOptions: {
removeOnComplete: 50, // Keep last 50 completed jobs
removeOnFail: 20, // Keep last 20 failed jobs
attempts: 3, // Retry failed jobs 3 times
backoff: {
type: "exponential",
delay: 2000,
},
},
});
console.log(`✅ Queue '${queueName}' initialized`);
}
}
/**
* Initialize automation classes
*/
async initializeAutomations() {
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new GitHubUpdateCheck(
this,
);
this.automations[QUEUE_NAMES.SESSION_CLEANUP] = new SessionCleanup(this);
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] =
new OrphanedRepoCleanup(this);
this.automations[QUEUE_NAMES.ECHO_HELLO] = new EchoHello(this);
console.log("✅ All automation classes initialized");
}
/**
* Initialize all workers
*/
async initializeWorkers() {
// GitHub Update Check Worker
this.workers[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new Worker(
QUEUE_NAMES.GITHUB_UPDATE_CHECK,
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].process.bind(
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Session Cleanup Worker
this.workers[QUEUE_NAMES.SESSION_CLEANUP] = new Worker(
QUEUE_NAMES.SESSION_CLEANUP,
this.automations[QUEUE_NAMES.SESSION_CLEANUP].process.bind(
this.automations[QUEUE_NAMES.SESSION_CLEANUP],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Orphaned Repo Cleanup Worker
this.workers[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] = new Worker(
QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].process.bind(
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Echo Hello Worker
this.workers[QUEUE_NAMES.ECHO_HELLO] = new Worker(
QUEUE_NAMES.ECHO_HELLO,
this.automations[QUEUE_NAMES.ECHO_HELLO].process.bind(
this.automations[QUEUE_NAMES.ECHO_HELLO],
),
{
connection: redisConnection,
concurrency: 1,
},
);
// Add error handling for all workers
Object.values(this.workers).forEach((worker) => {
worker.on("error", (error) => {
console.error("Worker error:", error);
});
});
console.log("✅ All workers initialized");
}
/**
* Setup event listeners for all queues
*/
setupEventListeners() {
for (const queueName of Object.values(QUEUE_NAMES)) {
const queue = this.queues[queueName];
queue.on("error", (error) => {
console.error(`❌ Queue '${queueName}' experienced an error:`, error);
});
queue.on("failed", (job, err) => {
console.error(
`❌ Job '${job.id}' in queue '${queueName}' failed:`,
err,
);
});
queue.on("completed", (job) => {
console.log(`✅ Job '${job.id}' in queue '${queueName}' completed.`);
});
}
console.log("✅ Queue events initialized");
}
/**
* Schedule all recurring jobs
*/
async scheduleAllJobs() {
await this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].schedule();
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
await this.automations[QUEUE_NAMES.ECHO_HELLO].schedule();
}
/**
* Manual job triggers
*/
async triggerGitHubUpdateCheck() {
return this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].triggerManual();
}
async triggerSessionCleanup() {
return this.automations[QUEUE_NAMES.SESSION_CLEANUP].triggerManual();
}
async triggerOrphanedRepoCleanup() {
return this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].triggerManual();
}
async triggerEchoHello(message = "Hello from BullMQ!") {
return this.automations[QUEUE_NAMES.ECHO_HELLO].triggerManual(message);
}
/**
* Get queue statistics
*/
async getQueueStats(queueName) {
const queue = this.queues[queueName];
if (!queue) {
throw new Error(`Queue ${queueName} not found`);
}
const [waiting, active, completed, failed, delayed] = await Promise.all([
queue.getWaiting(),
queue.getActive(),
queue.getCompleted(),
queue.getFailed(),
queue.getDelayed(),
]);
return {
waiting: waiting.length,
active: active.length,
completed: completed.length,
failed: failed.length,
delayed: delayed.length,
};
}
/**
* Get all queue statistics
*/
async getAllQueueStats() {
const stats = {};
for (const queueName of Object.values(QUEUE_NAMES)) {
stats[queueName] = await this.getQueueStats(queueName);
}
return stats;
}
/**
* Get recent jobs for a queue
*/
async getRecentJobs(queueName, limit = 10) {
const queue = this.queues[queueName];
if (!queue) {
throw new Error(`Queue ${queueName} not found`);
}
const [completed, failed] = await Promise.all([
queue.getCompleted(0, limit - 1),
queue.getFailed(0, limit - 1),
]);
return [...completed, ...failed]
.sort((a, b) => new Date(b.finishedOn) - new Date(a.finishedOn))
.slice(0, limit);
}
/**
* Graceful shutdown
*/
async shutdown() {
console.log("🛑 Shutting down queue manager...");
for (const queueName of Object.keys(this.queues)) {
await this.queues[queueName].close();
await this.workers[queueName].close();
}
await redis.quit();
console.log("✅ Queue manager shutdown complete");
}
}
const queueManager = new QueueManager();
module.exports = { queueManager, QUEUE_NAMES };

View File

@@ -0,0 +1,114 @@
const { prisma } = require("./shared/prisma");
/**
* Orphaned Repository Cleanup Automation
* Removes repositories with no associated hosts
*/
class OrphanedRepoCleanup {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "orphaned-repo-cleanup";
}
/**
* Process orphaned repository cleanup job
*/
async process(job) {
const startTime = Date.now();
console.log("🧹 Starting orphaned repository cleanup...");
try {
// Find repositories with 0 hosts
const orphanedRepos = await prisma.repositories.findMany({
where: {
host_repositories: {
none: {},
},
},
include: {
_count: {
select: {
host_repositories: true,
},
},
},
});
let deletedCount = 0;
const deletedRepos = [];
// Delete orphaned repositories
for (const repo of orphanedRepos) {
try {
await prisma.repositories.delete({
where: { id: repo.id },
});
deletedCount++;
deletedRepos.push({
id: repo.id,
name: repo.name,
url: repo.url,
});
console.log(
`🗑️ Deleted orphaned repository: ${repo.name} (${repo.url})`,
);
} catch (deleteError) {
console.error(
`❌ Failed to delete repository ${repo.id}:`,
deleteError.message,
);
}
}
const executionTime = Date.now() - startTime;
console.log(
`✅ Orphaned repository cleanup completed in ${executionTime}ms - Deleted ${deletedCount} repositories`,
);
return {
success: true,
deletedCount,
deletedRepos,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ Orphaned repository cleanup failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Schedule recurring orphaned repository cleanup (daily at 2 AM)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"orphaned-repo-cleanup",
{},
{
repeat: { cron: "0 2 * * *" }, // Daily at 2 AM
jobId: "orphaned-repo-cleanup-recurring",
},
);
console.log("✅ Orphaned repository cleanup scheduled");
return job;
}
/**
* Trigger manual orphaned repository cleanup
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"orphaned-repo-cleanup-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual orphaned repository cleanup triggered");
return job;
}
}
module.exports = OrphanedRepoCleanup;

View File

@@ -0,0 +1,78 @@
const { prisma } = require("./shared/prisma");
const { cleanup_expired_sessions } = require("../../utils/session_manager");
/**
* Session Cleanup Automation
* Cleans up expired user sessions
*/
class SessionCleanup {
constructor(queueManager) {
this.queueManager = queueManager;
this.queueName = "session-cleanup";
}
/**
* Process session cleanup job
*/
async process(job) {
const startTime = Date.now();
console.log("🧹 Starting session cleanup...");
try {
const result = await prisma.user_sessions.deleteMany({
where: {
OR: [{ expires_at: { lt: new Date() } }, { is_revoked: true }],
},
});
const executionTime = Date.now() - startTime;
console.log(
`✅ Session cleanup completed in ${executionTime}ms - Cleaned up ${result.count} expired sessions`,
);
return {
success: true,
sessionsCleaned: result.count,
executionTime,
};
} catch (error) {
const executionTime = Date.now() - startTime;
console.error(
`❌ Session cleanup failed after ${executionTime}ms:`,
error.message,
);
throw error;
}
}
/**
* Schedule recurring session cleanup (every hour)
*/
async schedule() {
const job = await this.queueManager.queues[this.queueName].add(
"session-cleanup",
{},
{
repeat: { cron: "0 * * * *" }, // Every hour
jobId: "session-cleanup-recurring",
},
);
console.log("✅ Session cleanup scheduled");
return job;
}
/**
* Trigger manual session cleanup
*/
async triggerManual() {
const job = await this.queueManager.queues[this.queueName].add(
"session-cleanup-manual",
{},
{ priority: 1 },
);
console.log("✅ Manual session cleanup triggered");
return job;
}
}
module.exports = SessionCleanup;

View File

@@ -0,0 +1,5 @@
const { PrismaClient } = require("@prisma/client");
const prisma = new PrismaClient();
module.exports = { prisma };

View File

@@ -0,0 +1,16 @@
const IORedis = require("ioredis");
// Redis connection configuration
const redisConnection = {
host: process.env.REDIS_HOST || "localhost",
port: parseInt(process.env.REDIS_PORT) || 6379,
password: process.env.REDIS_PASSWORD || undefined,
db: parseInt(process.env.REDIS_DB) || 0,
retryDelayOnFailover: 100,
maxRetriesPerRequest: null, // BullMQ requires this to be null
};
// Create Redis connection
const redis = new IORedis(redisConnection);
module.exports = { redis, redisConnection };

View File

@@ -0,0 +1,82 @@
// Common utilities for automation jobs
/**
* Compare two semantic versions
* @param {string} version1 - First version
* @param {string} version2 - Second version
* @returns {number} - 1 if version1 > version2, -1 if version1 < version2, 0 if equal
*/
function compareVersions(version1, version2) {
const v1parts = version1.split(".").map(Number);
const v2parts = version2.split(".").map(Number);
const maxLength = Math.max(v1parts.length, v2parts.length);
for (let i = 0; i < maxLength; i++) {
const v1part = v1parts[i] || 0;
const v2part = v2parts[i] || 0;
if (v1part > v2part) return 1;
if (v1part < v2part) return -1;
}
return 0;
}
/**
* Check public GitHub repository for latest release
* @param {string} owner - Repository owner
* @param {string} repo - Repository name
* @returns {Promise<string|null>} - Latest version or null
*/
async function checkPublicRepo(owner, repo) {
try {
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
let currentVersion = "1.2.7"; // fallback
try {
const packageJson = require("../../../package.json");
if (packageJson?.version) {
currentVersion = packageJson.version;
}
} catch (packageError) {
console.warn(
"Could not read version from package.json for User-Agent, using fallback:",
packageError.message,
);
}
const response = await fetch(httpsRepoUrl, {
method: "GET",
headers: {
Accept: "application/vnd.github.v3+json",
"User-Agent": `PatchMon-Server/${currentVersion}`,
},
});
if (!response.ok) {
const errorText = await response.text();
if (
errorText.includes("rate limit") ||
errorText.includes("API rate limit")
) {
console.log("⚠️ GitHub API rate limit exceeded, skipping update check");
return null;
}
throw new Error(
`GitHub API error: ${response.status} ${response.statusText}`,
);
}
const releaseData = await response.json();
return releaseData.tag_name.replace("v", "");
} catch (error) {
console.error("GitHub API error:", error.message);
throw error;
}
}
module.exports = {
compareVersions,
checkPublicRepo,
};

View File

@@ -104,7 +104,7 @@ class UpdateScheduler {
}
// Read version from package.json dynamically
let currentVersion = "1.2.7"; // fallback
let currentVersion = "1.2.9"; // fallback
try {
const packageJson = require("../../package.json");
if (packageJson?.version) {
@@ -214,7 +214,7 @@ class UpdateScheduler {
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
// Get current version for User-Agent
let currentVersion = "1.2.7"; // fallback
let currentVersion = "1.2.9"; // fallback
try {
const packageJson = require("../../package.json");
if (packageJson?.version) {

View File

@@ -15,6 +15,16 @@ if (!process.env.JWT_SECRET) {
const JWT_SECRET = process.env.JWT_SECRET;
const JWT_EXPIRES_IN = process.env.JWT_EXPIRES_IN || "1h";
const JWT_REFRESH_EXPIRES_IN = process.env.JWT_REFRESH_EXPIRES_IN || "7d";
const TFA_REMEMBER_ME_EXPIRES_IN =
process.env.TFA_REMEMBER_ME_EXPIRES_IN || "30d";
const TFA_MAX_REMEMBER_SESSIONS = parseInt(
process.env.TFA_MAX_REMEMBER_SESSIONS || "5",
10,
);
const TFA_SUSPICIOUS_ACTIVITY_THRESHOLD = parseInt(
process.env.TFA_SUSPICIOUS_ACTIVITY_THRESHOLD || "3",
10,
);
const INACTIVITY_TIMEOUT_MINUTES = parseInt(
process.env.SESSION_INACTIVITY_TIMEOUT_MINUTES || "30",
10,
@@ -70,16 +80,136 @@ function parse_expiration(expiration_string) {
}
}
/**
* Generate device fingerprint from request data
*/
function generate_device_fingerprint(req) {
const components = [
req.get("user-agent") || "",
req.get("accept-language") || "",
req.get("accept-encoding") || "",
req.ip || "",
];
// Create a simple hash of device characteristics
const fingerprint = crypto
.createHash("sha256")
.update(components.join("|"))
.digest("hex")
.substring(0, 32); // Use first 32 chars for storage efficiency
return fingerprint;
}
/**
* Check for suspicious activity patterns
*/
async function check_suspicious_activity(
user_id,
_ip_address,
_device_fingerprint,
) {
try {
// Check for multiple sessions from different IPs in short time
const recent_sessions = await prisma.user_sessions.findMany({
where: {
user_id: user_id,
created_at: {
gte: new Date(Date.now() - 24 * 60 * 60 * 1000), // Last 24 hours
},
is_revoked: false,
},
select: {
ip_address: true,
device_fingerprint: true,
created_at: true,
},
});
// Count unique IPs and devices
const unique_ips = new Set(recent_sessions.map((s) => s.ip_address));
const unique_devices = new Set(
recent_sessions.map((s) => s.device_fingerprint),
);
// Flag as suspicious if more than threshold different IPs or devices in 24h
if (
unique_ips.size > TFA_SUSPICIOUS_ACTIVITY_THRESHOLD ||
unique_devices.size > TFA_SUSPICIOUS_ACTIVITY_THRESHOLD
) {
console.warn(
`Suspicious activity detected for user ${user_id}: ${unique_ips.size} IPs, ${unique_devices.size} devices`,
);
return true;
}
return false;
} catch (error) {
console.error("Error checking suspicious activity:", error);
return false;
}
}
/**
* Create a new session for user
*/
async function create_session(user_id, ip_address, user_agent) {
async function create_session(
user_id,
ip_address,
user_agent,
remember_me = false,
req = null,
) {
try {
const session_id = crypto.randomUUID();
const refresh_token = generate_refresh_token();
const access_token = generate_access_token(user_id, session_id);
const expires_at = parse_expiration(JWT_REFRESH_EXPIRES_IN);
// Generate device fingerprint if request is available
const device_fingerprint = req ? generate_device_fingerprint(req) : null;
// Check for suspicious activity
if (device_fingerprint) {
const is_suspicious = await check_suspicious_activity(
user_id,
ip_address,
device_fingerprint,
);
if (is_suspicious) {
console.warn(
`Suspicious activity detected for user ${user_id}, session creation may be restricted`,
);
}
}
// Check session limits for remember me
if (remember_me) {
const existing_remember_sessions = await prisma.user_sessions.count({
where: {
user_id: user_id,
tfa_remember_me: true,
is_revoked: false,
expires_at: { gt: new Date() },
},
});
// Limit remember me sessions per user
if (existing_remember_sessions >= TFA_MAX_REMEMBER_SESSIONS) {
throw new Error(
"Maximum number of remembered devices reached. Please revoke an existing session first.",
);
}
}
// Use longer expiration for remember me sessions
const expires_at = remember_me
? parse_expiration(TFA_REMEMBER_ME_EXPIRES_IN)
: parse_expiration(JWT_REFRESH_EXPIRES_IN);
// Calculate TFA bypass until date for remember me sessions
const tfa_bypass_until = remember_me
? parse_expiration(TFA_REMEMBER_ME_EXPIRES_IN)
: null;
// Store session in database
await prisma.user_sessions.create({
@@ -90,8 +220,13 @@ async function create_session(user_id, ip_address, user_agent) {
access_token_hash: hash_token(access_token),
ip_address: ip_address || null,
user_agent: user_agent || null,
device_fingerprint: device_fingerprint,
last_login_ip: ip_address || null,
last_activity: new Date(),
expires_at: expires_at,
tfa_remember_me: remember_me,
tfa_bypass_until: tfa_bypass_until,
login_count: 1,
},
});
@@ -100,6 +235,7 @@ async function create_session(user_id, ip_address, user_agent) {
access_token,
refresh_token,
expires_at,
tfa_bypass_until,
};
} catch (error) {
console.error("Error creating session:", error);
@@ -299,6 +435,8 @@ async function get_user_sessions(user_id) {
last_activity: true,
created_at: true,
expires_at: true,
tfa_remember_me: true,
tfa_bypass_until: true,
},
orderBy: { last_activity: "desc" },
});
@@ -308,6 +446,42 @@ async function get_user_sessions(user_id) {
}
}
/**
* Check if TFA is bypassed for a session
*/
async function is_tfa_bypassed(session_id) {
try {
const session = await prisma.user_sessions.findUnique({
where: { id: session_id },
select: {
tfa_remember_me: true,
tfa_bypass_until: true,
is_revoked: true,
expires_at: true,
},
});
if (!session) {
return false;
}
// Check if session is still valid
if (session.is_revoked || new Date() > session.expires_at) {
return false;
}
// Check if TFA is bypassed and still within bypass period
if (session.tfa_remember_me && session.tfa_bypass_until) {
return new Date() < session.tfa_bypass_until;
}
return false;
} catch (error) {
console.error("Error checking TFA bypass:", error);
return false;
}
}
module.exports = {
create_session,
validate_session,
@@ -317,6 +491,9 @@ module.exports = {
revoke_all_user_sessions,
cleanup_expired_sessions,
get_user_sessions,
is_tfa_bypassed,
generate_device_fingerprint,
check_suspicious_activity,
generate_access_token,
INACTIVITY_TIMEOUT_MINUTES,
};

View File

@@ -8,19 +8,94 @@ log() {
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" >&2
}
# Copy files from agents_backup to agents if agents directory is empty
if [ -d "/app/agents" ] && [ -z "$(ls -A /app/agents 2>/dev/null)" ]; then
if [ -d "/app/agents_backup" ]; then
log "Agents directory is empty, copying from backup..."
cp -r /app/agents_backup/* /app/agents/
# Function to extract version from agent script
get_agent_version() {
local file="$1"
if [ -f "$file" ]; then
grep -m 1 '^AGENT_VERSION=' "$file" | cut -d'"' -f2 2>/dev/null || echo "0.0.0"
else
log "Warning: agents_backup directory not found"
echo "0.0.0"
fi
else
log "Agents directory already contains files, skipping copy"
fi
}
log "Starting PatchMon Backend (${NODE_ENV:-production})..."
# Function to compare versions (returns 0 if $1 > $2)
version_greater() {
# Use sort -V for version comparison
test "$(printf '%s\n' "$1" "$2" | sort -V | tail -n1)" = "$1" && test "$1" != "$2"
}
# Check and update agent files if necessary
update_agents() {
local backup_agent="/app/agents_backup/patchmon-agent.sh"
local current_agent="/app/agents/patchmon-agent.sh"
# Check if agents directory exists
if [ ! -d "/app/agents" ]; then
log "ERROR: /app/agents directory not found"
return 1
fi
# Check if backup exists
if [ ! -d "/app/agents_backup" ]; then
log "WARNING: agents_backup directory not found, skipping agent update"
return 0
fi
# Get versions
local backup_version=$(get_agent_version "$backup_agent")
local current_version=$(get_agent_version "$current_agent")
log "Agent version check:"
log " Image version: ${backup_version}"
log " Volume version: ${current_version}"
# Determine if update is needed
local needs_update=0
# Case 1: No agents in volume (first time setup)
if [ -z "$(find /app/agents -maxdepth 1 -type f -name '*.sh' 2>/dev/null | head -n 1)" ]; then
log "Agents directory is empty - performing initial copy"
needs_update=1
# Case 2: Backup version is newer
elif version_greater "$backup_version" "$current_version"; then
log "Newer agent version available (${backup_version} > ${current_version})"
needs_update=1
else
log "Agents are up to date"
needs_update=0
fi
# Perform update if needed
if [ $needs_update -eq 1 ]; then
log "Updating agents to version ${backup_version}..."
# Create backup of existing agents if they exist
if [ -f "$current_agent" ]; then
local backup_timestamp=$(date +%Y%m%d_%H%M%S)
local backup_name="/app/agents/patchmon-agent.sh.backup.${backup_timestamp}"
cp "$current_agent" "$backup_name" 2>/dev/null || true
log "Previous agent backed up to: $(basename $backup_name)"
fi
# Copy new agents
cp -r /app/agents_backup/* /app/agents/
# Verify update
local new_version=$(get_agent_version "$current_agent")
if [ "$new_version" = "$backup_version" ]; then
log "✅ Agents successfully updated to version ${new_version}"
else
log "⚠️ Warning: Agent update may have failed (expected: ${backup_version}, got: ${new_version})"
fi
fi
}
# Main execution
log "PatchMon Backend Container Starting..."
log "Environment: ${NODE_ENV:-production}"
# Update agents (version-aware)
update_agents
log "Running database migrations..."
npx prisma migrate deploy

View File

@@ -1,7 +1,7 @@
{
"name": "patchmon-frontend",
"private": true,
"version": "1.2.7",
"version": "1.2.9",
"license": "AGPL-3.0",
"type": "module",
"scripts": {
@@ -35,7 +35,7 @@
"@vitejs/plugin-react": "^4.3.4",
"autoprefixer": "^10.4.20",
"postcss": "^8.5.6",
"tailwindcss": "^3.4.17",
"tailwindcss": "^4.0.0",
"vite": "^7.1.5"
},
"overrides": {

View File

@@ -1,3 +1,4 @@
import { lazy, Suspense } from "react";
import { Route, Routes } from "react-router-dom";
import FirstTimeAdminSetup from "./components/FirstTimeAdminSetup";
import Layout from "./components/Layout";
@@ -8,23 +9,48 @@ import { isAuthPhase } from "./constants/authPhases";
import { AuthProvider, useAuth } from "./contexts/AuthContext";
import { ThemeProvider } from "./contexts/ThemeContext";
import { UpdateNotificationProvider } from "./contexts/UpdateNotificationContext";
import Dashboard from "./pages/Dashboard";
import HostDetail from "./pages/HostDetail";
import Hosts from "./pages/Hosts";
import Login from "./pages/Login";
import PackageDetail from "./pages/PackageDetail";
import Packages from "./pages/Packages";
import Profile from "./pages/Profile";
import Repositories from "./pages/Repositories";
import RepositoryDetail from "./pages/RepositoryDetail";
import AlertChannels from "./pages/settings/AlertChannels";
import Integrations from "./pages/settings/Integrations";
import Notifications from "./pages/settings/Notifications";
import PatchManagement from "./pages/settings/PatchManagement";
import SettingsAgentConfig from "./pages/settings/SettingsAgentConfig";
import SettingsHostGroups from "./pages/settings/SettingsHostGroups";
import SettingsServerConfig from "./pages/settings/SettingsServerConfig";
import SettingsUsers from "./pages/settings/SettingsUsers";
// Lazy load pages
const Dashboard = lazy(() => import("./pages/Dashboard"));
const HostDetail = lazy(() => import("./pages/HostDetail"));
const Hosts = lazy(() => import("./pages/Hosts"));
const Login = lazy(() => import("./pages/Login"));
const PackageDetail = lazy(() => import("./pages/PackageDetail"));
const Packages = lazy(() => import("./pages/Packages"));
const Profile = lazy(() => import("./pages/Profile"));
const Automation = lazy(() => import("./pages/Automation"));
const Repositories = lazy(() => import("./pages/Repositories"));
const RepositoryDetail = lazy(() => import("./pages/RepositoryDetail"));
const Docker = lazy(() => import("./pages/Docker"));
const DockerContainerDetail = lazy(
() => import("./pages/docker/ContainerDetail"),
);
const DockerImageDetail = lazy(() => import("./pages/docker/ImageDetail"));
const DockerHostDetail = lazy(() => import("./pages/docker/HostDetail"));
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
const Integrations = lazy(() => import("./pages/settings/Integrations"));
const Notifications = lazy(() => import("./pages/settings/Notifications"));
const PatchManagement = lazy(() => import("./pages/settings/PatchManagement"));
const SettingsAgentConfig = lazy(
() => import("./pages/settings/SettingsAgentConfig"),
);
const SettingsHostGroups = lazy(
() => import("./pages/settings/SettingsHostGroups"),
);
const SettingsServerConfig = lazy(
() => import("./pages/settings/SettingsServerConfig"),
);
const SettingsUsers = lazy(() => import("./pages/settings/SettingsUsers"));
// Loading fallback component
const LoadingFallback = () => (
<div className="min-h-screen bg-gradient-to-br from-primary-50 to-secondary-50 dark:from-secondary-900 dark:to-secondary-800 flex items-center justify-center">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary-600 mx-auto mb-4"></div>
<p className="text-secondary-600 dark:text-secondary-300">Loading...</p>
</div>
</div>
);
function AppRoutes() {
const { needsFirstTimeSetup, authPhase, isAuthenticated } = useAuth();
@@ -53,285 +79,337 @@ function AppRoutes() {
}
return (
<Routes>
<Route path="/login" element={<Login />} />
<Route
path="/"
element={
<ProtectedRoute requirePermission="can_view_dashboard">
<Layout>
<Dashboard />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/hosts"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Hosts />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/hosts/:hostId"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<HostDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/packages"
element={
<ProtectedRoute requirePermission="can_view_packages">
<Layout>
<Packages />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/repositories"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Repositories />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/repositories/:repositoryId"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<RepositoryDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/users"
element={
<ProtectedRoute requirePermission="can_view_users">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/permissions"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/users"
element={
<ProtectedRoute requirePermission="can_view_users">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/roles"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/profile"
element={
<ProtectedRoute>
<Layout>
<SettingsLayout>
<Profile />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/host-groups"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsHostGroups />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/notifications"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsLayout>
<Notifications />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-config"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-config/management"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-config"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-config/version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/alert-channels"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsLayout>
<AlertChannels />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/integrations"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<Integrations />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/patch-management"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<PatchManagement />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-url"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/branding"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/options"
element={
<ProtectedRoute requirePermission="can_manage_hosts">
<Layout>
<SettingsHostGroups />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/packages/:packageId"
element={
<ProtectedRoute requirePermission="can_view_packages">
<Layout>
<PackageDetail />
</Layout>
</ProtectedRoute>
}
/>
</Routes>
<Suspense fallback={<LoadingFallback />}>
<Routes>
<Route path="/login" element={<Login />} />
<Route
path="/"
element={
<ProtectedRoute requirePermission="can_view_dashboard">
<Layout>
<Dashboard />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/hosts"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Hosts />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/hosts/:hostId"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<HostDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/packages"
element={
<ProtectedRoute requirePermission="can_view_packages">
<Layout>
<Packages />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/repositories"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Repositories />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/repositories/:repositoryId"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<RepositoryDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/automation"
element={
<ProtectedRoute requirePermission="can_view_hosts">
<Layout>
<Automation />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/docker"
element={
<ProtectedRoute requirePermission="can_view_reports">
<Layout>
<Docker />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/docker/containers/:id"
element={
<ProtectedRoute requirePermission="can_view_reports">
<Layout>
<DockerContainerDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/docker/images/:id"
element={
<ProtectedRoute requirePermission="can_view_reports">
<Layout>
<DockerImageDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/docker/hosts/:id"
element={
<ProtectedRoute requirePermission="can_view_reports">
<Layout>
<DockerHostDetail />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/users"
element={
<ProtectedRoute requirePermission="can_view_users">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/permissions"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/users"
element={
<ProtectedRoute requirePermission="can_view_users">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/roles"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsUsers />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/profile"
element={
<ProtectedRoute>
<Layout>
<SettingsLayout>
<Profile />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/host-groups"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsHostGroups />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/notifications"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsLayout>
<Notifications />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-config"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-config/management"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-config"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-config/version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/alert-channels"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsLayout>
<AlertChannels />
</SettingsLayout>
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/integrations"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<Integrations />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/patch-management"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<PatchManagement />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-url"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/server-version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/branding"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsServerConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/settings/agent-version"
element={
<ProtectedRoute requirePermission="can_manage_settings">
<Layout>
<SettingsAgentConfig />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/options"
element={
<ProtectedRoute requirePermission="can_manage_hosts">
<Layout>
<SettingsHostGroups />
</Layout>
</ProtectedRoute>
}
/>
<Route
path="/packages/:packageId"
element={
<ProtectedRoute requirePermission="can_view_packages">
<Layout>
<PackageDetail />
</Layout>
</ProtectedRoute>
}
/>
</Routes>
</Suspense>
);
}

File diff suppressed because one or more lines are too long

View File

@@ -1,11 +1,16 @@
import { useQuery } from "@tanstack/react-query";
import { useEffect } from "react";
import { isAuthReady } from "../constants/authPhases";
import { useAuth } from "../contexts/AuthContext";
import { settingsAPI } from "../utils/api";
const LogoProvider = ({ children }) => {
const { authPhase, isAuthenticated } = useAuth();
const { data: settings } = useQuery({
queryKey: ["settings"],
queryFn: () => settingsAPI.get().then((res) => res.data),
enabled: isAuthReady(authPhase, isAuthenticated()),
});
useEffect(() => {

View File

@@ -82,6 +82,7 @@ const SettingsLayout = ({ children }) => {
name: "Alert Channels",
href: "/settings/alert-channels",
icon: Bell,
comingSoon: true,
},
{
name: "Notifications",
@@ -118,7 +119,6 @@ const SettingsLayout = ({ children }) => {
name: "Integrations",
href: "/settings/integrations",
icon: Wrench,
comingSoon: true,
},
],
});

View File

@@ -54,7 +54,7 @@ const UsersTab = () => {
});
// Update user mutation
const updateUserMutation = useMutation({
const _updateUserMutation = useMutation({
mutationFn: ({ id, data }) => adminUsersAPI.update(id, data),
onSuccess: () => {
queryClient.invalidateQueries(["users"]);
@@ -319,9 +319,8 @@ const UsersTab = () => {
user={editingUser}
isOpen={!!editingUser}
onClose={() => setEditingUser(null)}
onUserUpdated={() => {
queryClient.invalidateQueries(["users"]);
}}
onUpdateUser={updateUserMutation.mutate}
isLoading={updateUserMutation.isPending}
roles={roles}
/>
)}
@@ -591,7 +590,14 @@ const AddUserModal = ({ isOpen, onClose, onUserCreated, roles }) => {
};
// Edit User Modal Component
const EditUserModal = ({ user, isOpen, onClose, onUserUpdated, roles }) => {
const EditUserModal = ({
user,
isOpen,
onClose,
onUpdateUser,
isLoading,
roles,
}) => {
const editUsernameId = useId();
const editEmailId = useId();
const editFirstNameId = useId();
@@ -607,7 +613,6 @@ const EditUserModal = ({ user, isOpen, onClose, onUserUpdated, roles }) => {
role: user?.role || "user",
is_active: user?.is_active ?? true,
});
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState("");
const [success, setSuccess] = useState(false);
@@ -635,22 +640,18 @@ const EditUserModal = ({ user, isOpen, onClose, onUserUpdated, roles }) => {
const handleSubmit = async (e) => {
e.preventDefault();
setIsLoading(true);
setError("");
setSuccess(false);
try {
await adminUsersAPI.update(user.id, formData);
await onUpdateUser({ id: user.id, data: formData });
setSuccess(true);
onUserUpdated();
// Auto-close after 1.5 seconds
setTimeout(() => {
onClose();
}, 1500);
} catch (err) {
setError(err.response?.data?.error || "Failed to update user");
} finally {
setIsLoading(false);
}
};

View File

@@ -128,12 +128,14 @@ const VersionUpdateTab = () => {
<span className="text-lg font-mono text-secondary-900 dark:text-white">
{versionInfo.github.latestRelease.tagName}
</span>
<div className="text-xs text-secondary-500 dark:text-secondary-400">
Published:{" "}
{new Date(
versionInfo.github.latestRelease.publishedAt,
).toLocaleDateString()}
</div>
{versionInfo.github.latestRelease.publishedAt && (
<div className="text-xs text-secondary-500 dark:text-secondary-400">
Published:{" "}
{new Date(
versionInfo.github.latestRelease.publishedAt,
).toLocaleDateString()}
</div>
)}
</div>
</div>
)}

View File

@@ -0,0 +1,581 @@
import { useQuery } from "@tanstack/react-query";
import {
Activity,
AlertCircle,
ArrowDown,
ArrowUp,
ArrowUpDown,
Bot,
CheckCircle,
Clock,
Play,
RefreshCw,
Settings,
XCircle,
Zap,
} from "lucide-react";
import { useEffect, useState } from "react";
import api from "../utils/api";
const Automation = () => {
const [activeTab, setActiveTab] = useState("overview");
const [sortField, setSortField] = useState("nextRunTimestamp");
const [sortDirection, setSortDirection] = useState("asc");
// Fetch automation overview data
const { data: overview, isLoading: overviewLoading } = useQuery({
queryKey: ["automation-overview"],
queryFn: async () => {
const response = await api.get("/automation/overview");
return response.data.data;
},
refetchInterval: 30000, // Refresh every 30 seconds
});
// Fetch queue statistics
const { data: queueStats, isLoading: statsLoading } = useQuery({
queryKey: ["automation-stats"],
queryFn: async () => {
const response = await api.get("/automation/stats");
return response.data.data;
},
refetchInterval: 30000,
});
// Fetch recent jobs
const { data: recentJobs, isLoading: jobsLoading } = useQuery({
queryKey: ["automation-jobs"],
queryFn: async () => {
const jobs = await Promise.all([
api
.get("/automation/jobs/github-update-check?limit=5")
.then((r) => r.data.data || []),
api
.get("/automation/jobs/session-cleanup?limit=5")
.then((r) => r.data.data || []),
]);
return {
githubUpdate: jobs[0],
sessionCleanup: jobs[1],
};
},
refetchInterval: 30000,
});
const getStatusIcon = (status) => {
switch (status) {
case "completed":
return <CheckCircle className="h-4 w-4 text-green-500" />;
case "failed":
return <XCircle className="h-4 w-4 text-red-500" />;
case "active":
return <Activity className="h-4 w-4 text-blue-500 animate-pulse" />;
default:
return <Clock className="h-4 w-4 text-gray-500" />;
}
};
const getStatusColor = (status) => {
switch (status) {
case "completed":
return "bg-green-100 text-green-800";
case "failed":
return "bg-red-100 text-red-800";
case "active":
return "bg-blue-100 text-blue-800";
default:
return "bg-gray-100 text-gray-800";
}
};
const formatDate = (dateString) => {
if (!dateString) return "N/A";
return new Date(dateString).toLocaleString();
};
const formatDuration = (ms) => {
if (!ms) return "N/A";
return `${ms}ms`;
};
const getStatusBadge = (status) => {
switch (status) {
case "Success":
return (
<span className="px-2 py-1 text-xs font-medium rounded-full bg-green-100 text-green-800">
Success
</span>
);
case "Failed":
return (
<span className="px-2 py-1 text-xs font-medium rounded-full bg-red-100 text-red-800">
Failed
</span>
);
case "Never run":
return (
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
Never run
</span>
);
default:
return (
<span className="px-2 py-1 text-xs font-medium rounded-full bg-gray-100 text-gray-800">
{status}
</span>
);
}
};
const getNextRunTime = (schedule, lastRun) => {
if (schedule === "Manual only") return "Manual trigger only";
if (schedule === "Daily at midnight") {
const now = new Date();
const tomorrow = new Date(now);
tomorrow.setDate(tomorrow.getDate() + 1);
tomorrow.setHours(0, 0, 0, 0);
return tomorrow.toLocaleString([], {
hour12: true,
hour: "numeric",
minute: "2-digit",
day: "numeric",
month: "numeric",
year: "numeric",
});
}
if (schedule === "Daily at 2 AM") {
const now = new Date();
const tomorrow = new Date(now);
tomorrow.setDate(tomorrow.getDate() + 1);
tomorrow.setHours(2, 0, 0, 0);
return tomorrow.toLocaleString([], {
hour12: true,
hour: "numeric",
minute: "2-digit",
day: "numeric",
month: "numeric",
year: "numeric",
});
}
if (schedule === "Every hour") {
const now = new Date();
const nextHour = new Date(now);
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
return nextHour.toLocaleString([], {
hour12: true,
hour: "numeric",
minute: "2-digit",
day: "numeric",
month: "numeric",
year: "numeric",
});
}
return "Unknown";
};
const getNextRunTimestamp = (schedule) => {
if (schedule === "Manual only") return Number.MAX_SAFE_INTEGER; // Manual tasks go to bottom
if (schedule === "Daily at midnight") {
const now = new Date();
const tomorrow = new Date(now);
tomorrow.setDate(tomorrow.getDate() + 1);
tomorrow.setHours(0, 0, 0, 0);
return tomorrow.getTime();
}
if (schedule === "Daily at 2 AM") {
const now = new Date();
const tomorrow = new Date(now);
tomorrow.setDate(tomorrow.getDate() + 1);
tomorrow.setHours(2, 0, 0, 0);
return tomorrow.getTime();
}
if (schedule === "Every hour") {
const now = new Date();
const nextHour = new Date(now);
nextHour.setHours(nextHour.getHours() + 1, 0, 0, 0);
return nextHour.getTime();
}
return Number.MAX_SAFE_INTEGER; // Unknown schedules go to bottom
};
const triggerManualJob = async (jobType, data = {}) => {
try {
let endpoint;
if (jobType === "github") {
endpoint = "/automation/trigger/github-update";
} else if (jobType === "sessions") {
endpoint = "/automation/trigger/session-cleanup";
} else if (jobType === "echo") {
endpoint = "/automation/trigger/echo-hello";
} else if (jobType === "orphaned-repos") {
endpoint = "/automation/trigger/orphaned-repo-cleanup";
}
const response = await api.post(endpoint, data);
// Refresh data
window.location.reload();
} catch (error) {
console.error("Error triggering job:", error);
alert(
"Failed to trigger job: " +
(error.response?.data?.error || error.message),
);
}
};
const handleSort = (field) => {
if (sortField === field) {
setSortDirection(sortDirection === "asc" ? "desc" : "asc");
} else {
setSortField(field);
setSortDirection("asc");
}
};
const getSortIcon = (field) => {
if (sortField !== field) return <ArrowUpDown className="h-4 w-4" />;
return sortDirection === "asc" ? (
<ArrowUp className="h-4 w-4" />
) : (
<ArrowDown className="h-4 w-4" />
);
};
// Sort automations based on current sort settings
const sortedAutomations = overview?.automations
? [...overview.automations].sort((a, b) => {
let aValue, bValue;
switch (sortField) {
case "name":
aValue = a.name.toLowerCase();
bValue = b.name.toLowerCase();
break;
case "schedule":
aValue = a.schedule.toLowerCase();
bValue = b.schedule.toLowerCase();
break;
case "lastRun":
// Convert "Never" to empty string for proper sorting
aValue = a.lastRun === "Never" ? "" : a.lastRun;
bValue = b.lastRun === "Never" ? "" : b.lastRun;
break;
case "lastRunTimestamp":
aValue = a.lastRunTimestamp || 0;
bValue = b.lastRunTimestamp || 0;
break;
case "nextRunTimestamp":
aValue = getNextRunTimestamp(a.schedule);
bValue = getNextRunTimestamp(b.schedule);
break;
case "status":
aValue = a.status.toLowerCase();
bValue = b.status.toLowerCase();
break;
default:
aValue = a[sortField];
bValue = b[sortField];
}
if (aValue < bValue) return sortDirection === "asc" ? -1 : 1;
if (aValue > bValue) return sortDirection === "asc" ? 1 : -1;
return 0;
})
: [];
const tabs = [{ id: "overview", name: "Overview", icon: Settings }];
return (
<div className="space-y-6">
{/* Page Header */}
<div className="flex items-center justify-between">
<div>
<h1 className="text-2xl font-semibold text-secondary-900 dark:text-white">
Automation Management
</h1>
<p className="text-sm text-secondary-600 dark:text-secondary-400 mt-1">
Monitor and manage automated server operations, agent
communications, and patch deployments
</p>
</div>
<div className="flex items-center gap-3">
<button
type="button"
onClick={() => triggerManualJob("github")}
className="btn-outline flex items-center gap-2"
title="Trigger manual GitHub update check"
>
<RefreshCw className="h-4 w-4" />
Check Updates
</button>
<button
type="button"
onClick={() => triggerManualJob("sessions")}
className="btn-outline flex items-center gap-2"
title="Trigger manual session cleanup"
>
<RefreshCw className="h-4 w-4" />
Clean Sessions
</button>
<button
type="button"
onClick={() =>
triggerManualJob("echo", {
message: "Hello from Automation Page!",
})
}
className="btn-outline flex items-center gap-2"
title="Trigger echo hello task"
>
<RefreshCw className="h-4 w-4" />
Echo Hello
</button>
</div>
</div>
{/* Stats Cards */}
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
{/* Scheduled Tasks Card */}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Clock className="h-5 w-5 text-warning-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Scheduled Tasks
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{overviewLoading ? "..." : overview?.scheduledTasks || 0}
</p>
</div>
</div>
</div>
{/* Running Tasks Card */}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Play className="h-5 w-5 text-success-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Running Tasks
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{overviewLoading ? "..." : overview?.runningTasks || 0}
</p>
</div>
</div>
</div>
{/* Failed Tasks Card */}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<XCircle className="h-5 w-5 text-red-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Failed Tasks
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{overviewLoading ? "..." : overview?.failedTasks || 0}
</p>
</div>
</div>
</div>
{/* Total Task Runs Card */}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Zap className="h-5 w-5 text-secondary-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Total Task Runs
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{overviewLoading ? "..." : overview?.totalAutomations || 0}
</p>
</div>
</div>
</div>
</div>
{/* Tabs */}
<div className="mb-6">
<div className="border-b border-gray-200 dark:border-gray-700">
<nav className="-mb-px flex space-x-8">
{tabs.map((tab) => (
<button
type="button"
key={tab.id}
onClick={() => setActiveTab(tab.id)}
className={`py-2 px-1 border-b-2 font-medium text-sm flex items-center gap-2 ${
activeTab === tab.id
? "border-blue-500 text-blue-600 dark:text-blue-400"
: "border-transparent text-gray-500 hover:text-gray-700 hover:border-gray-300 dark:text-gray-400 dark:hover:text-gray-300"
}`}
>
<tab.icon className="h-4 w-4" />
{tab.name}
</button>
))}
</nav>
</div>
</div>
{/* Tab Content */}
{activeTab === "overview" && (
<div className="card p-6">
{overviewLoading ? (
<div className="text-center py-8">
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-600 mx-auto"></div>
<p className="mt-2 text-sm text-secondary-500">
Loading automations...
</p>
</div>
) : (
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-600">
<thead className="bg-secondary-50 dark:bg-secondary-700">
<tr>
<th className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider">
Run
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("name")}
>
<div className="flex items-center gap-1">
Task
{getSortIcon("name")}
</div>
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("schedule")}
>
<div className="flex items-center gap-1">
Frequency
{getSortIcon("schedule")}
</div>
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("lastRunTimestamp")}
>
<div className="flex items-center gap-1">
Last Run
{getSortIcon("lastRunTimestamp")}
</div>
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("nextRunTimestamp")}
>
<div className="flex items-center gap-1">
Next Run
{getSortIcon("nextRunTimestamp")}
</div>
</th>
<th
className="px-4 py-2 text-left text-xs font-medium text-secondary-500 dark:text-secondary-300 uppercase tracking-wider cursor-pointer hover:bg-secondary-100 dark:hover:bg-secondary-600"
onClick={() => handleSort("status")}
>
<div className="flex items-center gap-1">
Status
{getSortIcon("status")}
</div>
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-600">
{sortedAutomations.map((automation) => (
<tr
key={automation.queue}
className="hover:bg-secondary-50 dark:hover:bg-secondary-700"
>
<td className="px-4 py-2 whitespace-nowrap">
{automation.schedule !== "Manual only" ? (
<button
type="button"
onClick={() => {
if (automation.queue.includes("github")) {
triggerManualJob("github");
} else if (automation.queue.includes("session")) {
triggerManualJob("sessions");
} else if (automation.queue.includes("echo")) {
triggerManualJob("echo", {
message: "Manual trigger from table",
});
} else if (
automation.queue.includes("orphaned-repo")
) {
triggerManualJob("orphaned-repos");
}
}}
className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200"
title="Run Now"
>
<Play className="h-3 w-3" />
</button>
) : (
<button
type="button"
onClick={() => {
if (automation.queue.includes("echo")) {
triggerManualJob("echo", {
message: "Manual trigger from table",
});
}
}}
className="inline-flex items-center justify-center w-6 h-6 border border-transparent rounded text-white bg-green-600 hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-green-500 transition-colors duration-200"
title="Trigger"
>
<Play className="h-3 w-3" />
</button>
)}
</td>
<td className="px-4 py-2 whitespace-nowrap">
<div>
<div className="text-sm font-medium text-secondary-900 dark:text-white">
{automation.name}
</div>
<div className="text-xs text-secondary-500 dark:text-secondary-400">
{automation.description}
</div>
</div>
</td>
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
{automation.schedule}
</td>
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
{automation.lastRun}
</td>
<td className="px-4 py-2 whitespace-nowrap text-sm text-secondary-900 dark:text-white">
{getNextRunTime(
automation.schedule,
automation.lastRun,
)}
</td>
<td className="px-4 py-2 whitespace-nowrap">
{getStatusBadge(automation.status)}
</td>
</tr>
))}
</tbody>
</table>
</div>
)}
</div>
)}
</div>
);
};
export default Automation;

View File

@@ -6,6 +6,8 @@ import {
Chart as ChartJS,
Legend,
LinearScale,
LineElement,
PointElement,
Title,
Tooltip,
} from "chart.js";
@@ -23,7 +25,7 @@ import {
WifiOff,
} from "lucide-react";
import { useEffect, useState } from "react";
import { Bar, Doughnut, Pie } from "react-chartjs-2";
import { Bar, Doughnut, Line, Pie } from "react-chartjs-2";
import { useNavigate } from "react-router-dom";
import DashboardSettingsModal from "../components/DashboardSettingsModal";
import { useAuth } from "../contexts/AuthContext";
@@ -43,12 +45,16 @@ ChartJS.register(
CategoryScale,
LinearScale,
BarElement,
LineElement,
PointElement,
Title,
);
const Dashboard = () => {
const [showSettingsModal, setShowSettingsModal] = useState(false);
const [cardPreferences, setCardPreferences] = useState([]);
const [packageTrendsPeriod, setPackageTrendsPeriod] = useState("1"); // days
const [packageTrendsHost, setPackageTrendsHost] = useState("all"); // host filter
const navigate = useNavigate();
const { isDark } = useTheme();
const { user } = useAuth();
@@ -91,7 +97,7 @@ const Dashboard = () => {
navigate("/repositories");
};
const handleOSDistributionClick = () => {
const _handleOSDistributionClick = () => {
navigate("/hosts?showFilters=true", { replace: true });
};
@@ -99,7 +105,7 @@ const Dashboard = () => {
navigate("/hosts?filter=needsUpdates", { replace: true });
};
const handlePackagePriorityClick = () => {
const _handlePackagePriorityClick = () => {
navigate("/packages?filter=security");
};
@@ -144,8 +150,8 @@ const Dashboard = () => {
// Map priority names to filter parameters
if (priorityName.toLowerCase().includes("security")) {
navigate("/packages?filter=security", { replace: true });
} else if (priorityName.toLowerCase().includes("outdated")) {
navigate("/packages?filter=outdated", { replace: true });
} else if (priorityName.toLowerCase().includes("regular")) {
navigate("/packages?filter=regular", { replace: true });
}
}
};
@@ -189,6 +195,26 @@ const Dashboard = () => {
refetchOnWindowFocus: false, // Don't refetch when window regains focus
});
// Package trends data query
const {
data: packageTrendsData,
isLoading: packageTrendsLoading,
error: _packageTrendsError,
} = useQuery({
queryKey: ["packageTrends", packageTrendsPeriod, packageTrendsHost],
queryFn: () => {
const params = {
days: packageTrendsPeriod,
};
if (packageTrendsHost !== "all") {
params.hostId = packageTrendsHost;
}
return dashboardAPI.getPackageTrends(params).then((res) => res.data);
},
staleTime: 5 * 60 * 1000, // 5 minutes
refetchOnWindowFocus: false,
});
// Fetch recent users (permission protected server-side)
const { data: recentUsers } = useQuery({
queryKey: ["dashboardRecentUsers"],
@@ -299,6 +325,8 @@ const Dashboard = () => {
].includes(cardId)
) {
return "charts";
} else if (["packageTrends"].includes(cardId)) {
return "charts";
} else if (["erroredHosts", "quickStats"].includes(cardId)) {
return "fullwidth";
}
@@ -312,6 +340,8 @@ const Dashboard = () => {
return "grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-4";
case "charts":
return "grid grid-cols-1 lg:grid-cols-3 gap-6";
case "widecharts":
return "grid grid-cols-1 lg:grid-cols-3 gap-6";
case "fullwidth":
return "space-y-6";
default:
@@ -651,17 +681,7 @@ const Dashboard = () => {
case "osDistribution":
return (
<button
type="button"
className="card p-6 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 w-full text-left"
onClick={handleOSDistributionClick}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
handleOSDistributionClick();
}
}}
>
<div className="card p-6 w-full">
<h3 className="text-lg font-medium text-secondary-900 dark:text-white mb-4">
OS Distribution
</h3>
@@ -670,22 +690,12 @@ const Dashboard = () => {
<Pie data={osChartData} options={chartOptions} />
</div>
</div>
</button>
</div>
);
case "osDistributionDoughnut":
return (
<button
type="button"
className="card p-6 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 w-full text-left"
onClick={handleOSDistributionClick}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
handleOSDistributionClick();
}
}}
>
<div className="card p-6 w-full">
<h3 className="text-lg font-medium text-secondary-900 dark:text-white mb-4">
OS Distribution
</h3>
@@ -694,29 +704,19 @@ const Dashboard = () => {
<Doughnut data={osChartData} options={doughnutChartOptions} />
</div>
</div>
</button>
</div>
);
case "osDistributionBar":
return (
<button
type="button"
className="card p-6 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 w-full text-left"
onClick={handleOSDistributionClick}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
handleOSDistributionClick();
}
}}
>
<div className="card p-6 w-full">
<h3 className="text-lg font-medium text-secondary-900 dark:text-white mb-4">
OS Distribution
</h3>
<div className="h-64">
<Bar data={osBarChartData} options={barChartOptions} />
</div>
</button>
</div>
);
case "updateStatus":
@@ -748,19 +748,9 @@ const Dashboard = () => {
case "packagePriority":
return (
<button
type="button"
className="card p-6 cursor-pointer hover:shadow-card-hover dark:hover:shadow-card-hover-dark transition-shadow duration-200 w-full text-left"
onClick={handlePackagePriorityClick}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
handlePackagePriorityClick();
}
}}
>
<div className="card p-6 w-full">
<h3 className="text-lg font-medium text-secondary-900 dark:text-white mb-4">
Package Priority
Outdated Packages by Priority
</h3>
<div className="h-64 w-full flex items-center justify-center">
<div className="w-full h-full max-w-sm">
@@ -770,7 +760,72 @@ const Dashboard = () => {
/>
</div>
</div>
</button>
</div>
);
case "packageTrends":
return (
<div className="card p-6 w-full">
<div className="flex items-center justify-between mb-4">
<h3 className="text-lg font-medium text-secondary-900 dark:text-white">
Package Trends Over Time
</h3>
<div className="flex items-center gap-3">
{/* Period Selector */}
<select
value={packageTrendsPeriod}
onChange={(e) => setPackageTrendsPeriod(e.target.value)}
className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:ring-2 focus:ring-primary-500 focus:border-primary-500"
>
<option value="1">Last 24 hours</option>
<option value="7">Last 7 days</option>
<option value="30">Last 30 days</option>
<option value="90">Last 90 days</option>
<option value="180">Last 6 months</option>
<option value="365">Last year</option>
</select>
{/* Host Selector */}
<select
value={packageTrendsHost}
onChange={(e) => setPackageTrendsHost(e.target.value)}
className="px-3 py-1.5 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:ring-2 focus:ring-primary-500 focus:border-primary-500"
>
<option value="all">All Hosts</option>
{packageTrendsData?.hosts?.length > 0 ? (
packageTrendsData.hosts.map((host) => (
<option key={host.id} value={host.id}>
{host.friendly_name || host.hostname}
</option>
))
) : (
<option disabled>
{packageTrendsLoading
? "Loading hosts..."
: "No hosts available"}
</option>
)}
</select>
</div>
</div>
<div className="h-64 w-full">
{packageTrendsLoading ? (
<div className="flex items-center justify-center h-full">
<RefreshCw className="h-8 w-8 animate-spin text-primary-600" />
</div>
) : packageTrendsData?.chartData ? (
<Line
data={packageTrendsData.chartData}
options={packageTrendsChartOptions}
/>
) : (
<div className="flex items-center justify-center h-full text-secondary-500 dark:text-secondary-400">
No data available
</div>
)}
</div>
</div>
);
case "quickStats": {
@@ -1068,6 +1123,167 @@ const Dashboard = () => {
onClick: handlePackagePriorityChartClick,
};
const packageTrendsChartOptions = {
responsive: true,
maintainAspectRatio: false,
plugins: {
legend: {
position: "top",
labels: {
color: isDark ? "#ffffff" : "#374151",
font: {
size: 12,
},
padding: 20,
usePointStyle: true,
pointStyle: "circle",
},
},
tooltip: {
mode: "index",
intersect: false,
backgroundColor: isDark ? "#374151" : "#ffffff",
titleColor: isDark ? "#ffffff" : "#374151",
bodyColor: isDark ? "#ffffff" : "#374151",
borderColor: isDark ? "#4B5563" : "#E5E7EB",
borderWidth: 1,
callbacks: {
title: (context) => {
const label = context[0].label;
// Handle empty or invalid labels
if (!label || typeof label !== "string") {
return "Unknown Date";
}
// Format hourly labels (e.g., "2025-10-07T14" -> "Oct 7, 2:00 PM")
if (label.includes("T")) {
try {
const date = new Date(`${label}:00:00`);
// Check if date is valid
if (isNaN(date.getTime())) {
return label; // Return original label if date is invalid
}
return date.toLocaleDateString("en-US", {
month: "short",
day: "numeric",
hour: "numeric",
minute: "2-digit",
hour12: true,
});
} catch (error) {
return label; // Return original label if parsing fails
}
}
// Format daily labels (e.g., "2025-10-07" -> "Oct 7")
try {
const date = new Date(label);
// Check if date is valid
if (isNaN(date.getTime())) {
return label; // Return original label if date is invalid
}
return date.toLocaleDateString("en-US", {
month: "short",
day: "numeric",
});
} catch (error) {
return label; // Return original label if parsing fails
}
},
},
},
},
scales: {
x: {
display: true,
title: {
display: true,
text: packageTrendsPeriod === "1" ? "Time (Hours)" : "Date",
color: isDark ? "#ffffff" : "#374151",
},
ticks: {
color: isDark ? "#ffffff" : "#374151",
font: {
size: 11,
},
callback: function (value, _index, _ticks) {
const label = this.getLabelForValue(value);
// Handle empty or invalid labels
if (!label || typeof label !== "string") {
return "Unknown";
}
// Format hourly labels (e.g., "2025-10-07T14" -> "2 PM")
if (label.includes("T")) {
try {
const hour = label.split("T")[1];
const hourNum = parseInt(hour, 10);
// Validate hour number
if (isNaN(hourNum) || hourNum < 0 || hourNum > 23) {
return hour; // Return original hour if invalid
}
return hourNum === 0
? "12 AM"
: hourNum < 12
? `${hourNum} AM`
: hourNum === 12
? "12 PM"
: `${hourNum - 12} PM`;
} catch (error) {
return label; // Return original label if parsing fails
}
}
// Format daily labels (e.g., "2025-10-07" -> "Oct 7")
try {
const date = new Date(label);
// Check if date is valid
if (isNaN(date.getTime())) {
return label; // Return original label if date is invalid
}
return date.toLocaleDateString("en-US", {
month: "short",
day: "numeric",
});
} catch (error) {
return label; // Return original label if parsing fails
}
},
},
grid: {
color: isDark ? "#374151" : "#E5E7EB",
},
},
y: {
display: true,
title: {
display: true,
text: "Number of Packages",
color: isDark ? "#ffffff" : "#374151",
},
ticks: {
color: isDark ? "#ffffff" : "#374151",
font: {
size: 11,
},
beginAtZero: true,
},
grid: {
color: isDark ? "#374151" : "#E5E7EB",
},
},
},
interaction: {
mode: "nearest",
axis: "x",
intersect: false,
},
};
const barChartOptions = {
responsive: true,
indexAxis: "y", // Make the chart horizontal
@@ -1100,6 +1316,7 @@ const Dashboard = () => {
},
},
},
onClick: handleOSChartClick,
};
const osChartData = {
@@ -1245,7 +1462,12 @@ const Dashboard = () => {
className={getGroupClassName(group.type)}
>
{group.cards.map((card, cardIndex) => (
<div key={`card-${card.cardId}-${groupIndex}-${cardIndex}`}>
<div
key={`card-${card.cardId}-${groupIndex}-${cardIndex}`}
className={
card.cardId === "packageTrends" ? "lg:col-span-2" : ""
}
>
{renderCard(card.cardId)}
</div>
))}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -657,6 +657,18 @@ const Hosts = () => {
hideStale,
]);
// Get unique OS types from hosts for dynamic dropdown
const uniqueOsTypes = useMemo(() => {
if (!hosts) return [];
const osTypes = new Set();
hosts.forEach((host) => {
if (host.os_type) {
osTypes.add(host.os_type);
}
});
return Array.from(osTypes).sort();
}, [hosts]);
// Group hosts by selected field
const groupedHosts = useMemo(() => {
if (groupBy === "none") {
@@ -870,9 +882,11 @@ const Hosts = () => {
return (
<button
type="button"
onClick={() => navigate(`/packages?host=${host.id}`)}
onClick={() =>
navigate(`/packages?host=${host.id}&filter=outdated`)
}
className="text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 font-medium hover:underline"
title="View packages for this host"
title="View outdated packages for this host"
>
{host.updatesCount || 0}
</button>
@@ -1266,9 +1280,11 @@ const Hosts = () => {
className="w-full border border-secondary-300 dark:border-secondary-600 rounded-lg px-3 py-2 focus:ring-2 focus:ring-primary-500 focus:border-primary-500 bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white"
>
<option value="all">All OS</option>
<option value="linux">Linux</option>
<option value="windows">Windows</option>
<option value="macos">macOS</option>
{uniqueOsTypes.map((osType) => (
<option key={osType} value={osType.toLowerCase()}>
{osType}
</option>
))}
</select>
</div>
<div className="flex items-end">
@@ -1554,6 +1570,7 @@ const BulkAssignModal = ({
isLoading,
}) => {
const [selectedGroupId, setSelectedGroupId] = useState("");
const bulkHostGroupId = useId();
// Fetch host groups for selection
const { data: hostGroups } = useQuery({
@@ -1572,28 +1589,31 @@ const BulkAssignModal = ({
return (
<div className="fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50">
<div className="bg-white rounded-lg p-6 w-full max-w-md">
<div className="bg-white dark:bg-secondary-800 rounded-lg p-6 w-full max-w-md">
<div className="flex justify-between items-center mb-4">
<h3 className="text-lg font-semibold text-secondary-900">
<h3 className="text-lg font-semibold text-secondary-900 dark:text-white">
Assign to Host Group
</h3>
<button
type="button"
onClick={onClose}
className="text-secondary-400 hover:text-secondary-600"
className="text-secondary-400 hover:text-secondary-600 dark:text-secondary-300 dark:hover:text-secondary-100"
>
<X className="h-5 w-5" />
</button>
</div>
<div className="mb-4">
<p className="text-sm text-secondary-600 mb-2">
<p className="text-sm text-secondary-600 dark:text-secondary-400 mb-2">
Assigning {selectedHosts.length} host
{selectedHosts.length !== 1 ? "s" : ""}:
</p>
<div className="max-h-32 overflow-y-auto bg-secondary-50 rounded-md p-3">
<div className="max-h-32 overflow-y-auto bg-secondary-50 dark:bg-secondary-700 rounded-md p-3">
{selectedHostNames.map((friendlyName) => (
<div key={friendlyName} className="text-sm text-secondary-700">
<div
key={friendlyName}
className="text-sm text-secondary-700 dark:text-secondary-300"
>
{friendlyName}
</div>
))}
@@ -1604,7 +1624,7 @@ const BulkAssignModal = ({
<div>
<label
htmlFor={bulkHostGroupId}
className="block text-sm font-medium text-secondary-700 mb-1"
className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-1"
>
Host Group
</label>
@@ -1612,7 +1632,7 @@ const BulkAssignModal = ({
id={bulkHostGroupId}
value={selectedGroupId}
onChange={(e) => setSelectedGroupId(e.target.value)}
className="w-full px-3 py-2 border border-secondary-300 rounded-md focus:outline-none focus:ring-2 focus:ring-primary-500"
className="w-full px-3 py-2 border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-700 text-secondary-900 dark:text-white focus:outline-none focus:ring-2 focus:ring-primary-500"
>
<option value="">No group (ungrouped)</option>
{hostGroups?.map((group) => (
@@ -1621,7 +1641,7 @@ const BulkAssignModal = ({
</option>
))}
</select>
<p className="mt-1 text-sm text-secondary-500">
<p className="mt-1 text-sm text-secondary-500 dark:text-secondary-400">
Select a group to assign these hosts to, or leave ungrouped.
</p>
</div>

View File

@@ -22,6 +22,7 @@ const Login = () => {
const emailId = useId();
const passwordId = useId();
const tokenId = useId();
const rememberMeId = useId();
const { login, setAuthState } = useAuth();
const [isSignupMode, setIsSignupMode] = useState(false);
const [formData, setFormData] = useState({
@@ -33,6 +34,7 @@ const Login = () => {
});
const [tfaData, setTfaData] = useState({
token: "",
remember_me: false,
});
const [showPassword, setShowPassword] = useState(false);
const [isLoading, setIsLoading] = useState(false);
@@ -127,7 +129,11 @@ const Login = () => {
setError("");
try {
const response = await authAPI.verifyTfa(tfaUsername, tfaData.token);
const response = await authAPI.verifyTfa(
tfaUsername,
tfaData.token,
tfaData.remember_me,
);
if (response.data?.token) {
// Update AuthContext with the new authentication state
@@ -158,9 +164,11 @@ const Login = () => {
};
const handleTfaInputChange = (e) => {
const { name, value, type, checked } = e.target;
setTfaData({
...tfaData,
[e.target.name]: e.target.value.replace(/\D/g, "").slice(0, 6),
[name]:
type === "checkbox" ? checked : value.replace(/\D/g, "").slice(0, 6),
});
// Clear error when user starts typing
if (error) {
@@ -170,7 +178,7 @@ const Login = () => {
const handleBackToLogin = () => {
setRequiresTfa(false);
setTfaData({ token: "" });
setTfaData({ token: "", remember_me: false });
setError("");
};
@@ -436,6 +444,23 @@ const Login = () => {
</div>
</div>
<div className="flex items-center">
<input
id={rememberMeId}
name="remember_me"
type="checkbox"
checked={tfaData.remember_me}
onChange={handleTfaInputChange}
className="h-4 w-4 text-primary-600 focus:ring-primary-500 border-secondary-300 rounded"
/>
<label
htmlFor={rememberMeId}
className="ml-2 block text-sm text-secondary-700"
>
Remember me on this computer (skip TFA for 30 days)
</label>
</div>
{error && (
<div className="bg-danger-50 border border-danger-200 rounded-md p-3">
<div className="flex">

View File

@@ -105,6 +105,10 @@ const Packages = () => {
// For security updates, filter to show only security updates
setUpdateStatusFilter("security-updates");
setCategoryFilter("all");
} else if (filter === "regular") {
// For regular (non-security) updates
setUpdateStatusFilter("regular-updates");
setCategoryFilter("all");
}
}, [searchParams]);
@@ -115,8 +119,20 @@ const Packages = () => {
refetch,
isFetching,
} = useQuery({
queryKey: ["packages"],
queryFn: () => packagesAPI.getAll({ limit: 1000 }).then((res) => res.data),
queryKey: ["packages", hostFilter, updateStatusFilter],
queryFn: () => {
const params = { limit: 10000 }; // High limit to effectively get all packages
if (hostFilter && hostFilter !== "all") {
params.host = hostFilter;
}
// Pass update status filter to backend to pre-filter packages
if (updateStatusFilter === "needs-updates") {
params.needsUpdate = "true";
} else if (updateStatusFilter === "security-updates") {
params.isSecurityUpdate = "true";
}
return packagesAPI.getAll(params).then((res) => res.data);
},
staleTime: 5 * 60 * 1000, // Data stays fresh for 5 minutes
refetchOnWindowFocus: false, // Don't refetch when window regains focus
});
@@ -160,15 +176,13 @@ const Packages = () => {
const matchesUpdateStatus =
updateStatusFilter === "all-packages" ||
updateStatusFilter === "needs-updates" ||
(updateStatusFilter === "security-updates" && pkg.isSecurityUpdate) ||
(updateStatusFilter === "regular-updates" && !pkg.isSecurityUpdate);
// For "all-packages", we don't filter by update status
// For other filters, we only show packages that need updates
const matchesUpdateNeeded =
updateStatusFilter === "all-packages" ||
(pkg.stats?.updatesNeeded || 0) > 0;
(updateStatusFilter === "needs-updates" &&
(pkg.stats?.updatesNeeded || 0) > 0) ||
(updateStatusFilter === "security-updates" &&
(pkg.stats?.securityUpdates || 0) > 0) ||
(updateStatusFilter === "regular-updates" &&
(pkg.stats?.updatesNeeded || 0) > 0 &&
(pkg.stats?.securityUpdates || 0) === 0);
const packageHosts = pkg.packageHosts || [];
const matchesHost =
@@ -176,11 +190,7 @@ const Packages = () => {
packageHosts.some((host) => host.hostId === hostFilter);
return (
matchesSearch &&
matchesCategory &&
matchesUpdateStatus &&
matchesUpdateNeeded &&
matchesHost
matchesSearch && matchesCategory && matchesUpdateStatus && matchesHost
);
});
@@ -435,8 +445,16 @@ const Packages = () => {
});
const uniquePackageHostsCount = uniquePackageHosts.size;
// Calculate total packages available
const totalPackagesCount = packages?.length || 0;
// Calculate total packages installed
// When filtering by host, count each package once (since it can only be installed once per host)
// When not filtering, sum up all installations across all hosts
const totalPackagesCount =
hostFilter && hostFilter !== "all"
? packages?.length || 0
: packages?.reduce(
(sum, pkg) => sum + (pkg.stats?.totalInstalls || 0),
0,
) || 0;
// Calculate outdated packages
const outdatedPackagesCount =
@@ -517,7 +535,7 @@ const Packages = () => {
<Package className="h-5 w-5 text-primary-600 mr-2" />
<div>
<p className="text-sm text-secondary-500 dark:text-white">
Total Packages
Total Installed
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{totalPackagesCount}

View File

@@ -2,12 +2,16 @@ import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import {
AlertCircle,
CheckCircle,
Clock,
Copy,
Download,
Eye,
EyeOff,
Key,
LogOut,
Mail,
MapPin,
Monitor,
Moon,
RefreshCw,
Save,
@@ -153,6 +157,7 @@ const Profile = () => {
{ id: "profile", name: "Profile Information", icon: User },
{ id: "password", name: "Change Password", icon: Key },
{ id: "tfa", name: "Multi-Factor Authentication", icon: Smartphone },
{ id: "sessions", name: "Active Sessions", icon: Monitor },
];
return (
@@ -533,6 +538,9 @@ const Profile = () => {
{/* Multi-Factor Authentication Tab */}
{activeTab === "tfa" && <TfaTab />}
{/* Sessions Tab */}
{activeTab === "sessions" && <SessionsTab />}
</div>
</div>
</div>
@@ -1072,4 +1080,256 @@ const TfaTab = () => {
);
};
// Sessions Tab Component
const SessionsTab = () => {
const _queryClient = useQueryClient();
const [_isLoading, _setIsLoading] = useState(false);
const [message, setMessage] = useState({ type: "", text: "" });
// Fetch user sessions
const {
data: sessionsData,
isLoading: sessionsLoading,
refetch,
} = useQuery({
queryKey: ["user-sessions"],
queryFn: async () => {
const response = await fetch("/api/v1/auth/sessions", {
headers: {
Authorization: `Bearer ${localStorage.getItem("token")}`,
},
});
if (!response.ok) throw new Error("Failed to fetch sessions");
return response.json();
},
});
// Revoke individual session mutation
const revokeSessionMutation = useMutation({
mutationFn: async (sessionId) => {
const response = await fetch(`/api/v1/auth/sessions/${sessionId}`, {
method: "DELETE",
headers: {
Authorization: `Bearer ${localStorage.getItem("token")}`,
},
});
if (!response.ok) throw new Error("Failed to revoke session");
return response.json();
},
onSuccess: () => {
setMessage({ type: "success", text: "Session revoked successfully" });
refetch();
},
onError: (error) => {
setMessage({ type: "error", text: error.message });
},
});
// Revoke all sessions mutation
const revokeAllSessionsMutation = useMutation({
mutationFn: async () => {
const response = await fetch("/api/v1/auth/sessions", {
method: "DELETE",
headers: {
Authorization: `Bearer ${localStorage.getItem("token")}`,
},
});
if (!response.ok) throw new Error("Failed to revoke sessions");
return response.json();
},
onSuccess: () => {
setMessage({
type: "success",
text: "All other sessions revoked successfully",
});
refetch();
},
onError: (error) => {
setMessage({ type: "error", text: error.message });
},
});
const formatDate = (dateString) => {
return new Date(dateString).toLocaleString();
};
const formatRelativeTime = (dateString) => {
const now = new Date();
const date = new Date(dateString);
const diff = now - date;
const minutes = Math.floor(diff / 60000);
const hours = Math.floor(diff / 3600000);
const days = Math.floor(diff / 86400000);
if (days > 0) return `${days} day${days > 1 ? "s" : ""} ago`;
if (hours > 0) return `${hours} hour${hours > 1 ? "s" : ""} ago`;
if (minutes > 0) return `${minutes} minute${minutes > 1 ? "s" : ""} ago`;
return "Just now";
};
const handleRevokeSession = (sessionId) => {
if (window.confirm("Are you sure you want to revoke this session?")) {
revokeSessionMutation.mutate(sessionId);
}
};
const handleRevokeAllSessions = () => {
if (
window.confirm(
"Are you sure you want to revoke all other sessions? This will log you out of all other devices.",
)
) {
revokeAllSessionsMutation.mutate();
}
};
return (
<div className="space-y-6">
{/* Header */}
<div>
<h3 className="text-lg font-medium text-secondary-900 dark:text-secondary-100">
Active Sessions
</h3>
<p className="text-sm text-secondary-600 dark:text-secondary-300">
Manage your active sessions and devices. You can see where you're
logged in and revoke access for any device.
</p>
</div>
{/* Message */}
{message.text && (
<div
className={`rounded-md p-4 ${
message.type === "success"
? "bg-success-50 border border-success-200 text-success-700"
: "bg-danger-50 border border-danger-200 text-danger-700"
}`}
>
<div className="flex">
{message.type === "success" ? (
<CheckCircle className="h-5 w-5" />
) : (
<AlertCircle className="h-5 w-5" />
)}
<div className="ml-3">
<p className="text-sm">{message.text}</p>
</div>
</div>
</div>
)}
{/* Sessions List */}
{sessionsLoading ? (
<div className="flex items-center justify-center py-8">
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-primary-600"></div>
</div>
) : sessionsData?.sessions?.length > 0 ? (
<div className="space-y-4">
{/* Revoke All Button */}
{sessionsData.sessions.filter((s) => !s.is_current_session).length >
0 && (
<div className="flex justify-end">
<button
type="button"
onClick={handleRevokeAllSessions}
disabled={revokeAllSessionsMutation.isPending}
className="inline-flex items-center px-4 py-2 border border-danger-300 text-sm font-medium rounded-md text-danger-700 bg-white hover:bg-danger-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-danger-500 disabled:opacity-50"
>
<LogOut className="h-4 w-4 mr-2" />
{revokeAllSessionsMutation.isPending
? "Revoking..."
: "Revoke All Other Sessions"}
</button>
</div>
)}
{/* Sessions */}
{sessionsData.sessions.map((session) => (
<div
key={session.id}
className={`border rounded-lg p-4 ${
session.is_current_session
? "border-primary-200 bg-primary-50 dark:border-primary-800 dark:bg-primary-900/20"
: "border-secondary-200 bg-white dark:border-secondary-700 dark:bg-secondary-800"
}`}
>
<div className="flex items-start justify-between">
<div className="flex-1">
<div className="flex items-center space-x-3">
<Monitor className="h-5 w-5 text-secondary-500" />
<div>
<div className="flex items-center space-x-2">
<h4 className="text-sm font-medium text-secondary-900 dark:text-secondary-100">
{session.device_info?.browser} on{" "}
{session.device_info?.os}
</h4>
{session.is_current_session && (
<span className="inline-flex items-center px-2 py-1 rounded-full text-xs font-medium bg-primary-100 text-primary-800 dark:bg-primary-900 dark:text-primary-200">
Current Session
</span>
)}
{session.tfa_remember_me && (
<span className="inline-flex items-center px-2 py-1 rounded-full text-xs font-medium bg-success-100 text-success-800 dark:bg-success-900 dark:text-success-200">
Remembered
</span>
)}
</div>
<p className="text-sm text-secondary-600 dark:text-secondary-400">
{session.device_info?.device} • {session.ip_address}
</p>
</div>
</div>
<div className="mt-3 grid grid-cols-1 md:grid-cols-2 gap-4 text-sm text-secondary-600 dark:text-secondary-400">
<div className="flex items-center space-x-2">
<MapPin className="h-4 w-4" />
<span>
{session.location_info?.city},{" "}
{session.location_info?.country}
</span>
</div>
<div className="flex items-center space-x-2">
<Clock className="h-4 w-4" />
<span>
Last active: {formatRelativeTime(session.last_activity)}
</span>
</div>
<div className="flex items-center space-x-2">
<span>Created: {formatDate(session.created_at)}</span>
</div>
<div className="flex items-center space-x-2">
<span>Login count: {session.login_count}</span>
</div>
</div>
</div>
{!session.is_current_session && (
<button
type="button"
onClick={() => handleRevokeSession(session.id)}
disabled={revokeSessionMutation.isPending}
className="ml-4 inline-flex items-center px-3 py-2 border border-danger-300 text-sm font-medium rounded-md text-danger-700 bg-white hover:bg-danger-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-danger-500 disabled:opacity-50"
>
<LogOut className="h-4 w-4" />
</button>
)}
</div>
</div>
))}
</div>
) : (
<div className="text-center py-8">
<Monitor className="mx-auto h-12 w-12 text-secondary-400" />
<h3 className="mt-2 text-sm font-medium text-secondary-900 dark:text-secondary-100">
No active sessions
</h3>
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
You don't have any active sessions at the moment.
</p>
</div>
)}
</div>
);
};
export default Profile;

View File

@@ -18,21 +18,31 @@ import {
Unlock,
X,
} from "lucide-react";
import { useMemo, useState } from "react";
import { useNavigate } from "react-router-dom";
import { repositoryAPI } from "../utils/api";
import { useEffect, useMemo, useState } from "react";
import { useNavigate, useSearchParams } from "react-router-dom";
import { dashboardAPI, repositoryAPI } from "../utils/api";
const Repositories = () => {
const queryClient = useQueryClient();
const navigate = useNavigate();
const [searchParams] = useSearchParams();
const [searchTerm, setSearchTerm] = useState("");
const [filterType, setFilterType] = useState("all"); // all, secure, insecure
const [filterStatus, setFilterStatus] = useState("all"); // all, active, inactive
const [hostFilter, setHostFilter] = useState("");
const [sortField, setSortField] = useState("name");
const [sortDirection, setSortDirection] = useState("asc");
const [showColumnSettings, setShowColumnSettings] = useState(false);
const [deleteModalData, setDeleteModalData] = useState(null);
// Handle host filter from URL parameter
useEffect(() => {
const hostParam = searchParams.get("host");
if (hostParam) {
setHostFilter(hostParam);
}
}, [searchParams]);
// Column configuration
const [columnConfig, setColumnConfig] = useState(() => {
const defaultConfig = [
@@ -82,6 +92,17 @@ const Repositories = () => {
queryFn: () => repositoryAPI.getStats().then((res) => res.data),
});
// Fetch host information when filtering by host
const { data: hosts } = useQuery({
queryKey: ["hosts"],
queryFn: () => dashboardAPI.getHosts().then((res) => res.data),
staleTime: 5 * 60 * 1000,
enabled: !!hostFilter,
});
// Get the filtered host information
const filteredHost = hosts?.find((host) => host.id === hostFilter);
// Delete repository mutation
const deleteRepositoryMutation = useMutation({
mutationFn: (repositoryId) => repositoryAPI.delete(repositoryId),
@@ -202,7 +223,11 @@ const Repositories = () => {
(filterStatus === "active" && repo.is_active === true) ||
(filterStatus === "inactive" && repo.is_active === false);
return matchesSearch && matchesType && matchesStatus;
// Filter by host if hostFilter is set
const matchesHost =
!hostFilter || repo.hosts?.some((host) => host.id === hostFilter);
return matchesSearch && matchesType && matchesStatus && matchesHost;
});
// Sort repositories
@@ -237,6 +262,7 @@ const Repositories = () => {
filterStatus,
sortField,
sortDirection,
hostFilter,
]);
if (isLoading) {
@@ -421,6 +447,31 @@ const Repositories = () => {
</div>
</div>
{/* Host Filter Indicator */}
{hostFilter && filteredHost && (
<div className="flex items-center gap-2 px-3 py-2 bg-primary-50 dark:bg-primary-900 border border-primary-200 dark:border-primary-700 rounded-md">
<Server className="h-4 w-4 text-primary-600 dark:text-primary-400" />
<span className="text-sm text-primary-700 dark:text-primary-300">
Filtered by: {filteredHost.friendly_name}
</span>
<button
type="button"
onClick={() => {
setHostFilter("");
// Update URL to remove host parameter
const newSearchParams = new URLSearchParams(searchParams);
newSearchParams.delete("host");
navigate(`/repositories?${newSearchParams.toString()}`, {
replace: true,
});
}}
className="text-primary-500 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-200"
>
<X className="h-4 w-4" />
</button>
</div>
)}
{/* Security Filter */}
<div className="sm:w-48">
<select

View File

@@ -0,0 +1,389 @@
import { useQuery } from "@tanstack/react-query";
import {
AlertTriangle,
ArrowLeft,
CheckCircle,
Container,
ExternalLink,
RefreshCw,
Server,
} from "lucide-react";
import { Link, useParams } from "react-router-dom";
import api, { formatRelativeTime } from "../../utils/api";
const ContainerDetail = () => {
const { id } = useParams();
const { data, isLoading, error } = useQuery({
queryKey: ["docker", "container", id],
queryFn: async () => {
const response = await api.get(`/docker/containers/${id}`);
return response.data;
},
refetchInterval: 30000,
});
const container = data?.container;
const similarContainers = data?.similarContainers || [];
if (isLoading) {
return (
<div className="flex items-center justify-center min-h-screen">
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
</div>
);
}
if (error || !container) {
return (
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
<div className="flex">
<AlertTriangle className="h-5 w-5 text-red-400" />
<div className="ml-3">
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
Container not found
</h3>
<p className="mt-2 text-sm text-red-700 dark:text-red-300">
The container you're looking for doesn't exist or has been
removed.
</p>
</div>
</div>
</div>
<Link
to="/docker"
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
</div>
);
}
const getStatusBadge = (status) => {
const statusClasses = {
running:
"bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200",
exited: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200",
paused:
"bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200",
restarting:
"bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200",
};
return (
<span
className={`inline-flex items-center px-3 py-1 rounded-full text-sm font-medium ${
statusClasses[status] ||
"bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200"
}`}
>
{status}
</span>
);
};
return (
<div className="space-y-6">
{/* Header */}
<div>
<Link
to="/docker"
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
<div className="flex items-center">
<Container className="h-8 w-8 text-secondary-400 mr-3" />
<div>
<div className="flex items-center gap-3">
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
{container.name}
</h1>
{getStatusBadge(container.status)}
</div>
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
Container ID: {container.container_id.substring(0, 12)}
</p>
</div>
</div>
</div>
{/* Overview Cards */}
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
{/* Update Status Card */}
{container.docker_images?.docker_image_updates &&
container.docker_images.docker_image_updates.length > 0 ? (
<div className="card p-4 bg-yellow-50 dark:bg-yellow-900/20 border-yellow-200 dark:border-yellow-800">
<div className="flex items-center">
<div className="flex-shrink-0">
<AlertTriangle className="h-5 w-5 text-yellow-600 dark:text-yellow-400 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-yellow-200">
Update Available
</p>
<p className="text-sm font-medium text-secondary-900 dark:text-yellow-100 truncate">
{
container.docker_images.docker_image_updates[0]
.available_tag
}
</p>
</div>
</div>
</div>
) : (
<div className="card p-4 bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800">
<div className="flex items-center">
<div className="flex-shrink-0">
<CheckCircle className="h-5 w-5 text-green-600 dark:text-green-400 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-green-200">
Update Status
</p>
<p className="text-sm font-medium text-secondary-900 dark:text-green-100">
Up to date
</p>
</div>
</div>
</div>
)}
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Server className="h-5 w-5 text-purple-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">Host</p>
<Link
to={`/hosts/${container.host?.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 truncate block"
>
{container.host?.friendly_name || container.host?.hostname}
</Link>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-green-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
State
</p>
<p className="text-sm font-medium text-secondary-900 dark:text-white">
{container.state || container.status}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<RefreshCw className="h-5 w-5 text-secondary-400 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Last Checked
</p>
<p className="text-sm font-medium text-secondary-900 dark:text-white">
{formatRelativeTime(container.last_checked)}
</p>
</div>
</div>
</div>
</div>
{/* Container and Image Information - Side by Side */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
{/* Container Details */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Container Information
</h3>
</div>
<div className="px-6 py-5">
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Container ID
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white font-mono break-all">
{container.container_id}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Image Tag
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{container.image_tag}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Created
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{formatRelativeTime(container.created_at)}
</dd>
</div>
{container.started_at && (
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Started
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{formatRelativeTime(container.started_at)}
</dd>
</div>
)}
{container.ports && Object.keys(container.ports).length > 0 && (
<div className="sm:col-span-2">
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Port Mappings
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
<div className="flex flex-wrap gap-2">
{Object.entries(container.ports).map(([key, value]) => (
<span
key={key}
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200"
>
{key} {value}
</span>
))}
</div>
</dd>
</div>
)}
</div>
</div>
</div>
{/* Image Information */}
{container.docker_images && (
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Image Information
</h3>
</div>
<div className="px-6 py-5">
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Repository
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
<Link
to={`/docker/images/${container.docker_images.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
{container.docker_images.repository}
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Tag
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{container.docker_images.tag}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Source
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{container.docker_images.source}
</dd>
</div>
{container.docker_images.size_bytes && (
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Size
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{(
Number(container.docker_images.size_bytes) /
1024 /
1024
).toFixed(2)}{" "}
MB
</dd>
</div>
)}
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Image ID
</dt>
<dd className="mt-1 text-xs text-secondary-900 dark:text-white font-mono break-all">
{container.docker_images.image_id?.substring(0, 12)}...
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Created
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{formatRelativeTime(container.docker_images.created_at)}
</dd>
</div>
</div>
</div>
</div>
)}
</div>
{/* Similar Containers */}
{similarContainers.length > 0 && (
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Similar Containers (Same Image)
</h3>
</div>
<div className="px-6 py-5">
<ul className="divide-y divide-secondary-200 dark:divide-secondary-700">
{similarContainers.map((similar) => (
<li
key={similar.id}
className="py-4 flex items-center justify-between"
>
<div className="flex items-center">
<Container className="h-5 w-5 text-secondary-400 mr-3" />
<div>
<Link
to={`/docker/containers/${similar.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{similar.name}
</Link>
<p className="text-sm text-secondary-500 dark:text-secondary-400">
{similar.status}
</p>
</div>
</div>
</li>
))}
</ul>
</div>
</div>
)}
</div>
);
};
export default ContainerDetail;

View File

@@ -0,0 +1,354 @@
import { useQuery } from "@tanstack/react-query";
import {
AlertTriangle,
ArrowLeft,
Container,
ExternalLink,
Package,
RefreshCw,
Server,
} from "lucide-react";
import { Link, useParams } from "react-router-dom";
import api from "../../utils/api";
const HostDetail = () => {
const { id } = useParams();
const { data, isLoading, error } = useQuery({
queryKey: ["docker", "host", id],
queryFn: async () => {
const response = await api.get(`/docker/hosts/${id}`);
return response.data;
},
refetchInterval: 30000,
});
const host = data?.host;
const containers = data?.containers || [];
const images = data?.images || [];
const stats = data?.stats;
if (isLoading) {
return (
<div className="flex items-center justify-center min-h-screen">
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
</div>
);
}
if (error || !host) {
return (
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
<div className="flex">
<AlertTriangle className="h-5 w-5 text-red-400" />
<div className="ml-3">
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
Host not found
</h3>
</div>
</div>
</div>
<Link
to="/docker"
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
</div>
);
}
return (
<div className="space-y-6">
<div>
<Link
to="/docker"
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
<div className="flex items-start justify-between">
<div className="flex items-center">
<Server className="h-8 w-8 text-secondary-400 mr-3" />
<div>
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
{host.friendly_name || host.hostname}
</h1>
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
{host.ip}
</p>
</div>
</div>
<Link
to={`/hosts/${id}`}
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
View Full Host Details
<ExternalLink className="ml-2 h-4 w-4" />
</Link>
</div>
</div>
{/* Overview Cards */}
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-blue-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Total Containers
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{stats?.totalContainers || 0}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-green-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Running
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{stats?.runningContainers || 0}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-red-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Stopped
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{stats?.stoppedContainers || 0}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Package className="h-5 w-5 text-purple-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Images
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{stats?.totalImages || 0}
</p>
</div>
</div>
</div>
</div>
{/* Host Information */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Host Information
</h3>
</div>
<div className="px-6 py-5 space-y-6">
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Friendly Name
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{host.friendly_name}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Hostname
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{host.hostname}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
IP Address
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{host.ip}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
OS
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{host.os_type} {host.os_version}
</dd>
</div>
</div>
</div>
</div>
{/* Containers */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Containers ({containers.length})
</h3>
</div>
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
<thead className="bg-secondary-50 dark:bg-secondary-900">
<tr>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Container Name
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Image
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Status
</th>
<th
scope="col"
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Actions
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
{containers.map((container) => (
<tr key={container.id}>
<td className="px-6 py-4 whitespace-nowrap">
<Link
to={`/docker/containers/${container.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{container.name}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
{container.image_name}:{container.image_tag}
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
{container.status}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
<Link
to={`/docker/containers/${container.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
View
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
{/* Images */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Images ({images.length})
</h3>
</div>
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
<thead className="bg-secondary-50 dark:bg-secondary-900">
<tr>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Repository
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Tag
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Source
</th>
<th
scope="col"
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Actions
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
{images.map((image) => (
<tr key={image.id}>
<td className="px-6 py-4 whitespace-nowrap">
<Link
to={`/docker/images/${image.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{image.repository}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
{image.tag}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
{image.source}
</td>
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
<Link
to={`/docker/images/${image.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
View
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
</div>
);
};
export default HostDetail;

View File

@@ -0,0 +1,439 @@
import { useQuery } from "@tanstack/react-query";
import {
AlertTriangle,
ArrowLeft,
Container,
ExternalLink,
Package,
RefreshCw,
Server,
Shield,
} from "lucide-react";
import { Link, useParams } from "react-router-dom";
import api, { formatRelativeTime } from "../../utils/api";
const ImageDetail = () => {
const { id } = useParams();
const { data, isLoading, error } = useQuery({
queryKey: ["docker", "image", id],
queryFn: async () => {
const response = await api.get(`/docker/images/${id}`);
return response.data;
},
refetchInterval: 30000,
});
const image = data?.image;
const hosts = data?.hosts || [];
const containers = image?.docker_containers || [];
const updates = image?.docker_image_updates || [];
if (isLoading) {
return (
<div className="flex items-center justify-center min-h-screen">
<RefreshCw className="h-8 w-8 animate-spin text-secondary-400" />
</div>
);
}
if (error || !image) {
return (
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4">
<div className="flex">
<AlertTriangle className="h-5 w-5 text-red-400" />
<div className="ml-3">
<h3 className="text-sm font-medium text-red-800 dark:text-red-200">
Image not found
</h3>
</div>
</div>
</div>
<Link
to="/docker"
className="mt-4 inline-flex items-center text-primary-600 hover:text-primary-900"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
</div>
);
}
return (
<div className="space-y-6">
<div>
<Link
to="/docker"
className="inline-flex items-center text-sm text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 mb-4"
>
<ArrowLeft className="h-4 w-4 mr-2" />
Back to Docker
</Link>
<div className="flex items-start justify-between">
<div className="flex items-center">
<Package className="h-8 w-8 text-secondary-400 mr-3" />
<div>
<h1 className="text-2xl font-bold text-secondary-900 dark:text-white">
{image.repository}:{image.tag}
</h1>
<p className="mt-1 text-sm text-secondary-600 dark:text-secondary-400">
Image ID: {image.image_id.substring(0, 12)}
</p>
</div>
</div>
</div>
</div>
{/* Overview Cards */}
<div className="grid grid-cols-1 gap-5 sm:grid-cols-2 lg:grid-cols-4">
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Container className="h-5 w-5 text-green-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Containers
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{containers.length}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Server className="h-5 w-5 text-purple-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Hosts
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{hosts.length}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<Package className="h-5 w-5 text-blue-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">Size</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{image.size_bytes ? (
<>{(Number(image.size_bytes) / 1024 / 1024).toFixed(0)} MB</>
) : (
"N/A"
)}
</p>
</div>
</div>
</div>
<div className="card p-4">
<div className="flex items-center">
<div className="flex-shrink-0">
<AlertTriangle className="h-5 w-5 text-warning-600 mr-2" />
</div>
<div className="w-0 flex-1">
<p className="text-sm text-secondary-500 dark:text-white">
Updates
</p>
<p className="text-xl font-semibold text-secondary-900 dark:text-white">
{updates.length}
</p>
</div>
</div>
</div>
</div>
{/* Available Updates with Digest Comparison */}
{updates.length > 0 && (
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4">
<div className="flex">
<AlertTriangle className="h-5 w-5 text-yellow-400" />
<div className="ml-3 flex-1">
<h3 className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
Updates Available
</h3>
<div className="mt-2 space-y-3">
{updates.map((update) => {
let digestInfo = null;
try {
if (update.changelog_url) {
digestInfo = JSON.parse(update.changelog_url);
}
} catch (_e) {
// Ignore parse errors
}
return (
<div
key={update.id}
className="bg-white dark:bg-secondary-800 rounded-lg p-3 border border-yellow-200 dark:border-yellow-700"
>
<div className="flex items-center justify-between mb-2">
<div className="flex items-center gap-2">
{update.is_security_update && (
<Shield className="h-4 w-4 text-red-500" />
)}
<span className="text-sm font-medium text-yellow-800 dark:text-yellow-200">
New version available:{" "}
<span className="font-semibold">
{update.available_tag}
</span>
</span>
</div>
{update.is_security_update && (
<span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-medium bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200">
Security
</span>
)}
</div>
{digestInfo &&
digestInfo.method === "digest_comparison" && (
<div className="mt-2 pt-2 border-t border-yellow-200 dark:border-yellow-700">
<p className="text-xs text-secondary-600 dark:text-secondary-400 mb-1">
Detected via digest comparison:
</p>
<div className="font-mono text-xs space-y-1">
<div className="text-red-600 dark:text-red-400">
<span className="font-bold">- Current: </span>
{digestInfo.current_digest}
</div>
<div className="text-green-600 dark:text-green-400">
<span className="font-bold">+ Available: </span>
{digestInfo.available_digest}
</div>
</div>
</div>
)}
</div>
);
})}
</div>
</div>
</div>
</div>
)}
{/* Image Information */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Image Information
</h3>
</div>
<div className="px-6 py-5 space-y-6">
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2">
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Repository
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.repository}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Tag
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.tag}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Source
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.source}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Created
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.created_at
? formatRelativeTime(image.created_at)
: "Unknown"}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Image ID
</dt>
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white">
{image.image_id}
</dd>
</div>
<div>
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Last Checked
</dt>
<dd className="mt-1 text-sm text-secondary-900 dark:text-white">
{image.last_checked
? formatRelativeTime(image.last_checked)
: "Never"}
</dd>
</div>
{image.digest && (
<div className="sm:col-span-2">
<dt className="text-sm font-medium text-secondary-500 dark:text-secondary-400">
Digest
</dt>
<dd className="mt-1 text-sm font-mono text-secondary-900 dark:text-white break-all">
{image.digest}
</dd>
</div>
)}
</div>
</div>
</div>
{/* Containers using this image */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Containers ({containers.length})
</h3>
</div>
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
<thead className="bg-secondary-50 dark:bg-secondary-900">
<tr>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Container Name
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Status
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Host
</th>
<th
scope="col"
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Actions
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
{containers.map((container) => (
<tr key={container.id}>
<td className="px-6 py-4 whitespace-nowrap">
<Link
to={`/docker/containers/${container.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{container.name}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800 dark:bg-secondary-700 dark:text-secondary-200">
{container.status}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
{container.host_id}
</td>
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
<Link
to={`/docker/containers/${container.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
View
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
{/* Hosts using this image */}
<div className="card">
<div className="px-6 py-5 border-b border-secondary-200 dark:border-secondary-700">
<h3 className="text-lg leading-6 font-medium text-secondary-900 dark:text-white">
Hosts ({hosts.length})
</h3>
</div>
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-secondary-200 dark:divide-secondary-700">
<thead className="bg-secondary-50 dark:bg-secondary-900">
<tr>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Host Name
</th>
<th
scope="col"
className="px-6 py-3 text-left text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
IP Address
</th>
<th
scope="col"
className="px-6 py-3 text-right text-xs font-medium text-secondary-500 dark:text-secondary-400 uppercase tracking-wider"
>
Actions
</th>
</tr>
</thead>
<tbody className="bg-white dark:bg-secondary-800 divide-y divide-secondary-200 dark:divide-secondary-700">
{hosts.map((host) => (
<tr key={host.id}>
<td className="px-6 py-4 whitespace-nowrap">
<Link
to={`/hosts/${host.id}`}
className="text-sm font-medium text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300"
>
{host.friendly_name || host.hostname}
</Link>
</td>
<td className="px-6 py-4 whitespace-nowrap text-sm text-secondary-500">
{host.ip}
</td>
<td className="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
<Link
to={`/hosts/${host.id}`}
className="text-primary-600 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-300 inline-flex items-center"
>
View
<ExternalLink className="ml-1 h-4 w-4" />
</Link>
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
</div>
);
};
export default ImageDetail;

File diff suppressed because it is too large Load Diff

View File

@@ -51,7 +51,16 @@ export const dashboardAPI = {
getStats: () => api.get("/dashboard/stats"),
getHosts: () => api.get("/dashboard/hosts"),
getPackages: () => api.get("/dashboard/packages"),
getHostDetail: (hostId) => api.get(`/dashboard/hosts/${hostId}`),
getHostDetail: (hostId, params = {}) => {
const queryString = new URLSearchParams(params).toString();
const url = `/dashboard/hosts/${hostId}${queryString ? `?${queryString}` : ""}`;
return api.get(url);
},
getPackageTrends: (params = {}) => {
const queryString = new URLSearchParams(params).toString();
const url = `/dashboard/package-trends${queryString ? `?${queryString}` : ""}`;
return api.get(url);
},
getRecentUsers: () => api.get("/dashboard/recent-users"),
getRecentCollection: () => api.get("/dashboard/recent-collection"),
};
@@ -224,8 +233,8 @@ export const versionAPI = {
export const authAPI = {
login: (username, password) =>
api.post("/auth/login", { username, password }),
verifyTfa: (username, token) =>
api.post("/auth/verify-tfa", { username, token }),
verifyTfa: (username, token, remember_me = false) =>
api.post("/auth/verify-tfa", { username, token, remember_me }),
signup: (username, email, password, firstName, lastName) =>
api.post("/auth/signup", {
username,

View File

@@ -24,8 +24,16 @@ export const getOSIcon = (osType) => {
// Linux distributions with authentic react-icons
if (os.includes("ubuntu")) return SiUbuntu;
if (os.includes("debian")) return SiDebian;
if (os.includes("centos") || os.includes("rhel") || os.includes("red hat"))
if (
os.includes("centos") ||
os.includes("rhel") ||
os.includes("red hat") ||
os.includes("almalinux") ||
os.includes("rocky")
)
return SiCentos;
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
return SiLinux; // Use generic Linux icon for Oracle Linux
if (os.includes("fedora")) return SiFedora;
if (os.includes("arch")) return SiArchlinux;
if (os.includes("alpine")) return SiAlpinelinux;
@@ -72,6 +80,10 @@ export const getOSDisplayName = (osType) => {
if (os.includes("ubuntu")) return "Ubuntu";
if (os.includes("debian")) return "Debian";
if (os.includes("centos")) return "CentOS";
if (os.includes("almalinux")) return "AlmaLinux";
if (os.includes("rocky")) return "Rocky Linux";
if (os === "ol" || os.includes("oraclelinux") || os.includes("oracle linux"))
return "Oracle Linux";
if (os.includes("rhel") || os.includes("red hat"))
return "Red Hat Enterprise Linux";
if (os.includes("fedora")) return "Fedora";

View File

@@ -43,5 +43,25 @@ export default defineConfig({
outDir: "dist",
sourcemap: process.env.NODE_ENV !== "production",
target: "es2018",
rollupOptions: {
output: {
manualChunks: {
// React core
"react-vendor": ["react", "react-dom", "react-router-dom"],
// Large utility libraries
"utils-vendor": ["axios", "@tanstack/react-query", "date-fns"],
// Chart libraries
"chart-vendor": ["chart.js", "react-chartjs-2"],
// Icon libraries
"icons-vendor": ["lucide-react", "react-icons"],
// DnD libraries
"dnd-vendor": [
"@dnd-kit/core",
"@dnd-kit/sortable",
"@dnd-kit/utilities",
],
},
},
},
},
});

1324
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "patchmon",
"version": "1.2.7",
"version": "1.2.9",
"description": "Linux Patch Monitoring System",
"license": "AGPL-3.0",
"private": true,

382
setup.sh
View File

@@ -34,7 +34,7 @@ BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Global variables
SCRIPT_VERSION="self-hosting-install.sh v1.2.7-selfhost-2025-01-20-1"
SCRIPT_VERSION="self-hosting-install.sh v1.2.9-selfhost-2025-10-11-1"
DEFAULT_GITHUB_REPO="https://github.com/PatchMon/PatchMon.git"
FQDN=""
CUSTOM_FQDN=""
@@ -60,6 +60,9 @@ SERVICE_USE_LETSENCRYPT="true" # Will be set based on user input
SERVER_PROTOCOL_SEL="https"
SERVER_PORT_SEL="" # Will be set to BACKEND_PORT in init_instance_vars
SETUP_NGINX="true"
UPDATE_MODE="false"
SELECTED_INSTANCE=""
SELECTED_SERVICE_NAME=""
# Functions
print_status() {
@@ -642,31 +645,61 @@ EOF
# Setup database for instance
setup_database() {
print_info "Creating database: $DB_NAME"
print_info "Setting up database: $DB_NAME"
# Check if sudo is available for user switching
if command -v sudo >/dev/null 2>&1; then
# Drop and recreate database and user for clean state
sudo -u postgres psql -c "DROP DATABASE IF EXISTS $DB_NAME;" || true
sudo -u postgres psql -c "DROP USER IF EXISTS $DB_USER;" || true
# Check if user exists
user_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'" || echo "0")
# Create database and user
sudo -u postgres psql -c "CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';"
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;"
if [ "$user_exists" = "1" ]; then
print_info "Database user $DB_USER already exists, skipping creation"
else
print_info "Creating database user $DB_USER"
sudo -u postgres psql -c "CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';"
fi
# Check if database exists
db_exists=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_database WHERE datname='$DB_NAME'" || echo "0")
if [ "$db_exists" = "1" ]; then
print_info "Database $DB_NAME already exists, skipping creation"
else
print_info "Creating database $DB_NAME"
sudo -u postgres psql -c "CREATE DATABASE $DB_NAME OWNER $DB_USER;"
fi
# Always grant privileges (in case they were revoked)
sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;"
else
# Alternative method for systems without sudo (run as postgres user directly)
print_warning "sudo not available, using alternative method for PostgreSQL setup"
# Switch to postgres user using su
su - postgres -c "psql -c \"DROP DATABASE IF EXISTS $DB_NAME;\"" || true
su - postgres -c "psql -c \"DROP USER IF EXISTS $DB_USER;\"" || true
su - postgres -c "psql -c \"CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';\""
su - postgres -c "psql -c \"CREATE DATABASE $DB_NAME OWNER $DB_USER;\""
# Check if user exists
user_exists=$(su - postgres -c "psql -tAc \"SELECT 1 FROM pg_roles WHERE rolname='$DB_USER'\"" || echo "0")
if [ "$user_exists" = "1" ]; then
print_info "Database user $DB_USER already exists, skipping creation"
else
print_info "Creating database user $DB_USER"
su - postgres -c "psql -c \"CREATE USER $DB_USER WITH PASSWORD '$DB_PASS';\""
fi
# Check if database exists
db_exists=$(su - postgres -c "psql -tAc \"SELECT 1 FROM pg_database WHERE datname='$DB_NAME'\"" || echo "0")
if [ "$db_exists" = "1" ]; then
print_info "Database $DB_NAME already exists, skipping creation"
else
print_info "Creating database $DB_NAME"
su - postgres -c "psql -c \"CREATE DATABASE $DB_NAME OWNER $DB_USER;\""
fi
# Always grant privileges (in case they were revoked)
su - postgres -c "psql -c \"GRANT ALL PRIVILEGES ON DATABASE $DB_NAME TO $DB_USER;\""
fi
print_status "Database $DB_NAME created with user $DB_USER"
print_status "Database setup complete for $DB_NAME"
}
# Clone application repository
@@ -834,7 +867,7 @@ EOF
cat > frontend/.env << EOF
VITE_API_URL=$SERVER_PROTOCOL_SEL://$FQDN/api/v1
VITE_APP_NAME=PatchMon
VITE_APP_VERSION=1.2.7
VITE_APP_VERSION=1.2.9
EOF
print_status "Environment files created"
@@ -1206,7 +1239,7 @@ create_agent_version() {
# Priority 2: Use fallback version if not found
if [ "$current_version" = "N/A" ] || [ -z "$current_version" ]; then
current_version="1.2.7"
current_version="1.2.9"
print_warning "Could not determine version, using fallback: $current_version"
fi
@@ -1550,11 +1583,295 @@ deploy_instance() {
:
}
# Detect existing PatchMon installations
detect_installations() {
local installations=()
# Find all directories in /opt that contain PatchMon installations
if [ -d "/opt" ]; then
for dir in /opt/*/; do
local dirname=$(basename "$dir")
# Skip backup directories
if [[ "$dirname" =~ \.backup\. ]]; then
continue
fi
# Check if it's a PatchMon installation
if [ -f "$dir/backend/package.json" ] && grep -q "patchmon" "$dir/backend/package.json" 2>/dev/null; then
installations+=("$dirname")
fi
done
fi
echo "${installations[@]}"
}
# Select installation to update
select_installation_to_update() {
local installations=($(detect_installations))
if [ ${#installations[@]} -eq 0 ]; then
print_error "No existing PatchMon installations found in /opt"
exit 1
fi
print_info "Found ${#installations[@]} existing installation(s):"
echo ""
local i=1
declare -A install_map
for install in "${installations[@]}"; do
# Get current version if possible
local version="unknown"
if [ -f "/opt/$install/backend/package.json" ]; then
version=$(grep '"version"' "/opt/$install/backend/package.json" | head -1 | sed 's/.*"version": "\([^"]*\)".*/\1/')
fi
# Get service status - try multiple naming conventions
# Convention 1: Just the install name (e.g., patchmon.internal)
local service_name="$install"
# Convention 2: patchmon. prefix (e.g., patchmon.patchmon.internal)
local alt_service_name1="patchmon.$install"
# Convention 3: patchmon- prefix with underscores (e.g., patchmon-patchmon_internal)
local alt_service_name2="patchmon-$(echo "$install" | tr '.' '_')"
local status="unknown"
# Try convention 1 first (most common)
if systemctl is-active --quiet "$service_name" 2>/dev/null; then
status="running"
elif systemctl is-enabled --quiet "$service_name" 2>/dev/null; then
status="stopped"
# Try convention 2
elif systemctl is-active --quiet "$alt_service_name1" 2>/dev/null; then
status="running"
service_name="$alt_service_name1"
elif systemctl is-enabled --quiet "$alt_service_name1" 2>/dev/null; then
status="stopped"
service_name="$alt_service_name1"
# Try convention 3
elif systemctl is-active --quiet "$alt_service_name2" 2>/dev/null; then
status="running"
service_name="$alt_service_name2"
elif systemctl is-enabled --quiet "$alt_service_name2" 2>/dev/null; then
status="stopped"
service_name="$alt_service_name2"
fi
printf "%2d. %-30s (v%-10s - %s)\n" "$i" "$install" "$version" "$status"
install_map[$i]="$install"
# Store the service name for later use
declare -g "service_map_$i=$service_name"
i=$((i + 1))
done
echo ""
while true; do
read_input "Select installation number to update" SELECTION "1"
if [[ "$SELECTION" =~ ^[0-9]+$ ]] && [ -n "${install_map[$SELECTION]}" ]; then
SELECTED_INSTANCE="${install_map[$SELECTION]}"
# Get the stored service name
local varname="service_map_$SELECTION"
SELECTED_SERVICE_NAME="${!varname}"
print_status "Selected: $SELECTED_INSTANCE"
print_info "Service: $SELECTED_SERVICE_NAME"
return 0
else
print_error "Invalid selection. Please enter a number from 1 to ${#installations[@]}"
fi
done
}
# Update existing installation
update_installation() {
local instance_dir="/opt/$SELECTED_INSTANCE"
local service_name="$SELECTED_SERVICE_NAME"
print_info "Updating PatchMon installation: $SELECTED_INSTANCE"
print_info "Installation directory: $instance_dir"
print_info "Service name: $service_name"
# Verify it's a git repository
if [ ! -d "$instance_dir/.git" ]; then
print_error "Installation directory is not a git repository"
print_error "Cannot perform git-based update"
exit 1
fi
# Add git safe.directory to avoid ownership issues when running as root
print_info "Configuring git safe.directory..."
git config --global --add safe.directory "$instance_dir" 2>/dev/null || true
# Load existing .env to get database credentials
if [ -f "$instance_dir/backend/.env" ]; then
source "$instance_dir/backend/.env"
print_status "Loaded existing configuration"
# Parse DATABASE_URL to extract credentials
# Format: postgresql://user:password@host:port/database
if [ -n "$DATABASE_URL" ]; then
# Extract components using regex
DB_USER=$(echo "$DATABASE_URL" | sed -n 's|postgresql://\([^:]*\):.*|\1|p')
DB_PASS=$(echo "$DATABASE_URL" | sed -n 's|postgresql://[^:]*:\([^@]*\)@.*|\1|p')
DB_HOST=$(echo "$DATABASE_URL" | sed -n 's|.*@\([^:]*\):.*|\1|p')
DB_PORT=$(echo "$DATABASE_URL" | sed -n 's|.*:\([0-9]*\)/.*|\1|p')
DB_NAME=$(echo "$DATABASE_URL" | sed -n 's|.*/\([^?]*\).*|\1|p')
print_info "Database: $DB_NAME (user: $DB_USER)"
else
print_error "DATABASE_URL not found in .env file"
exit 1
fi
else
print_error "Cannot find .env file at $instance_dir/backend/.env"
exit 1
fi
# Select branch/version to update to
select_branch
print_info "Updating to: $DEPLOYMENT_BRANCH"
echo ""
read_yes_no "Proceed with update? This will pull new code and restart services" CONFIRM_UPDATE "y"
if [ "$CONFIRM_UPDATE" != "y" ]; then
print_warning "Update cancelled by user"
exit 0
fi
# Stop the service
print_info "Stopping service: $service_name"
systemctl stop "$service_name" || true
# Create backup directory
local timestamp=$(date +%Y%m%d_%H%M%S)
local backup_dir="$instance_dir.backup.$timestamp"
local db_backup_file="$backup_dir/database_backup_$timestamp.sql"
print_info "Creating backup directory: $backup_dir"
mkdir -p "$backup_dir"
# Backup database
print_info "Backing up database: $DB_NAME"
if PGPASSWORD="$DB_PASS" pg_dump -h "$DB_HOST" -U "$DB_USER" -d "$DB_NAME" -F c -f "$db_backup_file" 2>/dev/null; then
print_status "Database backup created: $db_backup_file"
else
print_warning "Database backup failed, but continuing with code backup"
fi
# Backup code
print_info "Backing up code files..."
cp -r "$instance_dir" "$backup_dir/code"
print_status "Code backup created"
# Update code
print_info "Pulling latest code from branch: $DEPLOYMENT_BRANCH"
cd "$instance_dir"
# Clean up any untracked files that might conflict with incoming changes
print_info "Cleaning up untracked files to prevent merge conflicts..."
git clean -fd
# Reset any local changes to ensure clean state
print_info "Resetting local changes to ensure clean state..."
git reset --hard HEAD
# Fetch latest changes
git fetch origin
# Checkout the selected branch/tag
git checkout "$DEPLOYMENT_BRANCH"
git pull origin "$DEPLOYMENT_BRANCH" || git pull # For tags, just pull
print_status "Code updated successfully"
# Update dependencies
print_info "Updating backend dependencies..."
cd "$instance_dir/backend"
npm install --production --ignore-scripts
print_info "Updating frontend dependencies..."
cd "$instance_dir/frontend"
npm install --ignore-scripts
# Build frontend
print_info "Building frontend..."
npm run build
# Run database migrations and generate Prisma client
print_info "Running database migrations..."
cd "$instance_dir/backend"
npx prisma generate
npx prisma migrate deploy
# Start the service
print_info "Starting service: $service_name"
systemctl start "$service_name"
# Wait a moment and check status
sleep 3
if systemctl is-active --quiet "$service_name"; then
print_success "✅ Update completed successfully!"
print_status "Service $service_name is running"
# Get new version
local new_version=$(grep '"version"' "$instance_dir/backend/package.json" | head -1 | sed 's/.*"version": "\([^"]*\)".*/\1/')
print_info "Updated to version: $new_version"
echo ""
print_info "Backup Information:"
print_info " Code backup: $backup_dir/code"
print_info " Database backup: $db_backup_file"
echo ""
print_info "To restore database if needed:"
print_info " PGPASSWORD=\"$DB_PASS\" pg_restore -h \"$DB_HOST\" -U \"$DB_USER\" -d \"$DB_NAME\" -c \"$db_backup_file\""
echo ""
else
print_error "Service failed to start after update"
echo ""
print_warning "ROLLBACK INSTRUCTIONS:"
print_info "1. Restore code:"
print_info " sudo rm -rf $instance_dir"
print_info " sudo mv $backup_dir/code $instance_dir"
echo ""
print_info "2. Restore database:"
print_info " PGPASSWORD=\"$DB_PASS\" pg_restore -h \"$DB_HOST\" -U \"$DB_USER\" -d \"$DB_NAME\" -c \"$db_backup_file\""
echo ""
print_info "3. Restart service:"
print_info " sudo systemctl start $service_name"
echo ""
print_info "Check logs: journalctl -u $service_name -f"
exit 1
fi
}
# Main script execution
main() {
# Log script entry
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Interactive installation started" >> "$DEBUG_LOG"
# Parse command-line arguments
if [ "$1" = "--update" ]; then
UPDATE_MODE="true"
fi
# Log script entry
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Script started - Update mode: $UPDATE_MODE" >> "$DEBUG_LOG"
# Handle update mode
if [ "$UPDATE_MODE" = "true" ]; then
print_banner
print_info "🔄 PatchMon Update Mode"
echo ""
# Select installation to update
select_installation_to_update
# Perform update
update_installation
exit 0
fi
# Normal installation mode
# Run interactive setup
interactive_setup
@@ -1588,5 +1905,30 @@ main() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] deploy_instance function completed" >> "$DEBUG_LOG"
}
# Run main function (no arguments needed for interactive mode)
main
# Show usage/help
show_usage() {
echo "PatchMon Self-Hosting Installation & Update Script"
echo "Version: $SCRIPT_VERSION"
echo ""
echo "Usage:"
echo " $0 # Interactive installation (default)"
echo " $0 --update # Update existing installation"
echo " $0 --help # Show this help message"
echo ""
echo "Examples:"
echo " # New installation:"
echo " sudo bash $0"
echo ""
echo " # Update existing installation:"
echo " sudo bash $0 --update"
echo ""
}
# Check for help flag
if [ "$1" = "--help" ] || [ "$1" = "-h" ]; then
show_usage
exit 0
fi
# Run main function
main "$@"