mirror of
https://github.com/9technologygroup/patchmon.net.git
synced 2025-10-28 18:43:32 +00:00
Compare commits
335 Commits
latest
...
fb0f6ba028
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb0f6ba028 | ||
|
|
c328123bd3 | ||
|
|
46eb797ac3 | ||
|
|
c43afeb127 | ||
|
|
5b77a1328d | ||
|
|
9a40d5e6ee | ||
|
|
fdd0cfd619 | ||
|
|
de236f9ae2 | ||
|
|
4d5040e0e9 | ||
|
|
28c5310b99 | ||
|
|
a2e9743da6 | ||
|
|
3863d641fa | ||
|
|
cc8f77a946 | ||
|
|
36455e2bfd | ||
|
|
af65d38cad | ||
|
|
29266b6d77 | ||
|
|
f96e468482 | ||
|
|
9f8c88badf | ||
|
|
7985a225d7 | ||
|
|
8c538bd99c | ||
|
|
623bf5e2c8 | ||
|
|
ed8cc81b89 | ||
|
|
5c4353a688 | ||
|
|
6ebcdd57d5 | ||
|
|
a3d0dfd665 | ||
|
|
d99ded6d65 | ||
|
|
1ea96b6172 | ||
|
|
1e5ee66825 | ||
|
|
88130797e4 | ||
|
|
0ad1a96871 | ||
|
|
566c415471 | ||
|
|
cfc91243eb | ||
|
|
84cf31869b | ||
|
|
18c9d241eb | ||
|
|
86b5da3ea0 | ||
|
|
c9b5ee63d8 | ||
|
|
ac4415e1dc | ||
|
|
3737a5a935 | ||
|
|
bcce48948a | ||
|
|
5e4c628110 | ||
|
|
a8668ee3f3 | ||
|
|
5487206384 | ||
|
|
daa31973f9 | ||
|
|
561c78fb08 | ||
|
|
6d3f2d94ba | ||
|
|
93534ebe52 | ||
|
|
5cf2811bfd | ||
|
|
8fd91eae1a | ||
|
|
da8c661d20 | ||
|
|
2bf639e315 | ||
|
|
c02ac4bd6f | ||
|
|
4e0eaf7323 | ||
|
|
ef9ef58bcb | ||
|
|
29afe3da1f | ||
|
|
a861e4f9eb | ||
|
|
12ef6fd8e1 | ||
|
|
ba9de097dc | ||
|
|
8103581d17 | ||
|
|
cdb24520d8 | ||
|
|
831adf3038 | ||
|
|
2a1eed1354 | ||
|
|
7819d4512e | ||
|
|
a305fe23d3 | ||
|
|
2b36e88d85 | ||
|
|
6624ec002d | ||
|
|
840779844a | ||
|
|
f91d3324ba | ||
|
|
8c60b5277e | ||
|
|
2ac756af84 | ||
|
|
e227004d6b | ||
|
|
d379473568 | ||
|
|
2edc773adf | ||
|
|
2db839556c | ||
|
|
aab6fc244e | ||
|
|
811f5b5885 | ||
|
|
b43c9e94fd | ||
|
|
2e2a554aa3 | ||
|
|
eabcfd370c | ||
|
|
55cb07b3c8 | ||
|
|
0e049ec3d5 | ||
|
|
a2464fac5c | ||
|
|
5dc3e8ba81 | ||
|
|
63817b450f | ||
|
|
1fa0502d7d | ||
|
|
581dc5884c | ||
|
|
dcaffe2805 | ||
|
|
a3005bccb4 | ||
|
|
499ef9d5d9 | ||
|
|
6eb6ea3fd6 | ||
|
|
a27c607d9e | ||
|
|
d4e0abd407 | ||
|
|
8d447cab0d | ||
|
|
6988ecab12 | ||
|
|
fd108c6a21 | ||
|
|
3ea8cc74b6 | ||
|
|
a43fc9d380 | ||
|
|
864719b4b3 | ||
|
|
cc89df161b | ||
|
|
2659a930d6 | ||
|
|
fa57b35270 | ||
|
|
766d36ff80 | ||
|
|
3a76d54707 | ||
|
|
dd28e741d4 | ||
|
|
35d3c28ae5 | ||
|
|
3cf2ada84e | ||
|
|
b25bba50a7 | ||
|
|
811930d1e2 | ||
|
|
f3db16d6d0 | ||
|
|
b3887c818d | ||
|
|
f7b73ba280 | ||
|
|
5c2bacb322 | ||
|
|
657017801b | ||
|
|
5e8cfa6b63 | ||
|
|
f9bd56215d | ||
|
|
aa8b42cbb0 | ||
|
|
51f6fabd45 | ||
|
|
32ab004f3f | ||
|
|
71b27b4bcf | ||
|
|
60ca2064bf | ||
|
|
5ccd0aa163 | ||
|
|
a13b4941cd | ||
|
|
482a9e27c9 | ||
|
|
f085596b87 | ||
|
|
757feab9cd | ||
|
|
fffc571453 | ||
|
|
6f59a1981d | ||
|
|
8bb16f0896 | ||
|
|
b454b8d130 | ||
|
|
3fc4b799be | ||
|
|
9c39d83fe5 | ||
|
|
2ce6d9cd73 | ||
|
|
e97ccc5cbd | ||
|
|
1f77e459ce | ||
|
|
9ddc27e50c | ||
|
|
26c58f687b | ||
|
|
c004734a44 | ||
|
|
841b97cb5d | ||
|
|
8464a3692d | ||
|
|
258bc67efc | ||
|
|
b3c1319df4 | ||
|
|
f6d21e0ed5 | ||
|
|
b85eddf22a | ||
|
|
01dac49c05 | ||
|
|
ab97e04cc1 | ||
|
|
50b47bdd65 | ||
|
|
7a17958ad8 | ||
|
|
806f554b96 | ||
|
|
373ef8f468 | ||
|
|
513c268b36 | ||
|
|
13c4342135 | ||
|
|
bbb97dbfda | ||
|
|
31a95ed946 | ||
|
|
3eb4130865 | ||
|
|
5a498a5f7a | ||
|
|
e0eb544205 | ||
|
|
51982010db | ||
|
|
dc68afcb87 | ||
|
|
bec09b9457 | ||
|
|
55c8f74b73 | ||
|
|
16ea1dc743 | ||
|
|
8c326c8fe2 | ||
|
|
2abc9b1f8a | ||
|
|
e5f3b0ed26 | ||
|
|
bfc5db11da | ||
|
|
a0bea9b6e5 | ||
|
|
ebda7331a9 | ||
|
|
9963cfa417 | ||
|
|
4e6a9829cf | ||
|
|
b99f4aad4e | ||
|
|
7a8e9d95a0 | ||
|
|
ac22adde67 | ||
|
|
db1f03b0e0 | ||
|
|
74cc13b7de | ||
|
|
65025b50cf | ||
|
|
de76836ba0 | ||
|
|
fe448d0111 | ||
|
|
1b08be8864 | ||
|
|
28124f5fba | ||
|
|
f789c1cebe | ||
|
|
d13469ce33 | ||
|
|
443ec145e1 | ||
|
|
42f882c1c6 | ||
|
|
69acd1726c | ||
|
|
02f9899b23 | ||
|
|
0742c4b05c | ||
|
|
5d8a1e71d6 | ||
|
|
84c26054b2 | ||
|
|
f254b54404 | ||
|
|
7682d2fffd | ||
|
|
d6db557d87 | ||
|
|
af62a466c8 | ||
|
|
434fa86941 | ||
|
|
a61a8681e0 | ||
|
|
8eb75fba7d | ||
|
|
b3d7e49961 | ||
|
|
8be25283dc | ||
|
|
ed0cf79b53 | ||
|
|
678efa9574 | ||
|
|
8ca22dc7ab | ||
|
|
3466c0c7fb | ||
|
|
3da0625231 | ||
|
|
21d6a3b763 | ||
|
|
33b2b4b0fe | ||
|
|
479909ecf3 | ||
|
|
61ca05526b | ||
|
|
e04680bc33 | ||
|
|
a765b58868 | ||
|
|
654943a00c | ||
|
|
b54900aaed | ||
|
|
cdaba97232 | ||
|
|
45ec71c387 | ||
|
|
823ae7f30a | ||
|
|
8553f717e2 | ||
|
|
841b6e41ff | ||
|
|
d626493100 | ||
|
|
12a82a8522 | ||
|
|
44f90edcd2 | ||
|
|
1cc5254331 | ||
|
|
5bf6283a1c | ||
|
|
e9843f80f8 | ||
|
|
b49ea6b197 | ||
|
|
49c02a54dc | ||
|
|
c7b177d5cb | ||
|
|
8409b71857 | ||
|
|
78eb2b183e | ||
|
|
b49d225e32 | ||
|
|
470948165c | ||
|
|
20df1eceb1 | ||
|
|
372bb42fc5 | ||
|
|
4a6b486ba1 | ||
|
|
1f5b33eb73 | ||
|
|
aca8b300dd | ||
|
|
c6459a965f | ||
|
|
3b72794307 | ||
|
|
b5b110fed2 | ||
|
|
40bf8747b1 | ||
|
|
178f871582 | ||
|
|
840664c39e | ||
|
|
c18696f772 | ||
|
|
6adbbca439 | ||
|
|
edfd82a86d | ||
|
|
bed52a04b2 | ||
|
|
7369e23061 | ||
|
|
271d2c0df1 | ||
|
|
518b08895e | ||
|
|
aba8ec5d01 | ||
|
|
630949b7b9 | ||
|
|
82d0ff315f | ||
|
|
df04770113 | ||
|
|
038d4c515b | ||
|
|
f99e01a120 | ||
|
|
175042690e | ||
|
|
102546e45d | ||
|
|
751a202fec | ||
|
|
c886b812d6 | ||
|
|
be3fe52aea | ||
|
|
d85920669d | ||
|
|
c4e056711b | ||
|
|
60fa598803 | ||
|
|
5c66887732 | ||
|
|
ba087eb23e | ||
|
|
e3aa28a8d9 | ||
|
|
71d9884a86 | ||
|
|
2c47999cb4 | ||
|
|
6bf2a21f48 | ||
|
|
a76a722364 | ||
|
|
40a9003e6f | ||
|
|
e9bac06526 | ||
|
|
0c0446ad69 | ||
|
|
dbebb866b9 | ||
|
|
eb3f3599f9 | ||
|
|
527b0ccc3c | ||
|
|
1ff3da0a21 | ||
|
|
641272dfb8 | ||
|
|
3c01c4bfb2 | ||
|
|
35eb9303b1 | ||
|
|
469107c149 | ||
|
|
22f6befc89 | ||
|
|
03802daf13 | ||
|
|
17509cbf3c | ||
|
|
ffbf5f12e5 | ||
|
|
3bdf3d1843 | ||
|
|
ea550259ff | ||
|
|
047fdb4bd1 | ||
|
|
adc142fd85 | ||
|
|
42f6971da7 | ||
|
|
0414ea39d0 | ||
|
|
6357839619 | ||
|
|
c840a3fdcc | ||
|
|
a1bf2df59d | ||
|
|
67a5462a25 | ||
|
|
a32007f56b | ||
|
|
6e1ec0d031 | ||
|
|
ce2ba0face | ||
|
|
53f8471d75 | ||
|
|
74f42b5bee | ||
|
|
a84da7c731 | ||
|
|
83ce7c64fd | ||
|
|
15902da87c | ||
|
|
a11f180d23 | ||
|
|
35bf858977 | ||
|
|
330f80478d | ||
|
|
b43b20fbe9 | ||
|
|
591389a91f | ||
|
|
6d70a67a49 | ||
|
|
0cca6607d7 | ||
|
|
5f0ce7f26a | ||
|
|
38d0dcb3c4 | ||
|
|
03d6ebb43a | ||
|
|
b7ce2a3f54 | ||
|
|
783f8d73fe | ||
|
|
9f72690f82 | ||
|
|
48d2a656e5 | ||
|
|
e9402dbf32 | ||
|
|
dc1ad6882c | ||
|
|
22f616e110 | ||
|
|
3af269ee47 | ||
|
|
f4ece11636 | ||
|
|
da6bd2c098 | ||
|
|
a479003ba9 | ||
|
|
78f4eff375 | ||
|
|
c4376d35c9 | ||
|
|
9f3016be57 | ||
|
|
c3013cccd3 | ||
|
|
456184d327 | ||
|
|
a9c579bdd0 | ||
|
|
13fe8a0bc5 | ||
|
|
1d8742ccad | ||
|
|
bccfc0876f | ||
|
|
43a42aa931 | ||
|
|
ad5627362b | ||
|
|
61db61e1ba | ||
|
|
8dc702e7fc | ||
|
|
45f5ccc638 | ||
|
|
3fbe1369cf |
25
.github/workflows/app_build.yml
vendored
Normal file
25
.github/workflows/app_build.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name: Build on Merge
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- 'docker/**'
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: self-hosted
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Run rebuild script
|
||||
run: /root/patchmon/platform/scripts/app_build.sh ${{ github.ref_name }}
|
||||
|
||||
rebuild-pmon:
|
||||
runs-on: self-hosted
|
||||
needs: deploy
|
||||
if: github.ref_name == 'dev'
|
||||
steps:
|
||||
- name: Rebuild pmon
|
||||
run: /root/patchmon/platform/scripts/manage_pmon_auto.sh
|
||||
28
.github/workflows/code_quality.yml
vendored
Normal file
28
.github/workflows/code_quality.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: Code quality
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- 'docker/**'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docker/**'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Biome
|
||||
uses: biomejs/setup-biome@v2
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Run Biome
|
||||
run: biome ci .
|
||||
41
.github/workflows/docker.yml
vendored
41
.github/workflows/docker.yml
vendored
@@ -1,13 +1,21 @@
|
||||
name: Build and Push Docker Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- 'v*'
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
push:
|
||||
description: Push images to registry
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
@@ -26,29 +34,30 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Container Registry
|
||||
- name: Log in to container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.repository_owner }}
|
||||
# Using PAT as a hack due to issues with GITHUB_TOKEN and package permissions
|
||||
# This should be reverted to use GITHUB_TOKEN once a solution is discovered.
|
||||
password: ${{ secrets.GHCR_PAT }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata (tags, labels)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ github.repository_owner }}/patchmon-${{ matrix.image }}
|
||||
images: ${{ env.REGISTRY }}/${{ github.repository }}-${{ matrix.image }}
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=edge,branch=main
|
||||
|
||||
- name: Build and push ${{ matrix.image }} image
|
||||
uses: docker/build-push-action@v6
|
||||
@@ -56,7 +65,11 @@ jobs:
|
||||
context: .
|
||||
file: docker/${{ matrix.image }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
# Push if:
|
||||
# - Event is not workflow_dispatch OR input 'push' is true
|
||||
# AND
|
||||
# - Event is not pull_request OR the PR is from the same repository (to avoid pushing from forks)
|
||||
push: ${{ (github.event_name != 'workflow_dispatch' || inputs.push == 'true') && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha,scope=${{ matrix.image }}
|
||||
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -71,6 +71,13 @@ jspm_packages/
|
||||
.cache/
|
||||
public
|
||||
|
||||
# Exception: Allow frontend/public/assets for logo files
|
||||
!frontend/public/
|
||||
!frontend/public/assets/
|
||||
!frontend/public/assets/*.png
|
||||
!frontend/public/assets/*.svg
|
||||
!frontend/public/assets/*.jpg
|
||||
|
||||
# Storybook build outputs
|
||||
.out
|
||||
.storybook-out
|
||||
@@ -130,6 +137,9 @@ agents/*.log
|
||||
test-results/
|
||||
playwright-report/
|
||||
test-results.xml
|
||||
test_*.sh
|
||||
test-*.sh
|
||||
*.code-workspace
|
||||
|
||||
# Package manager lock files (uncomment if you want to ignore them)
|
||||
# package-lock.json
|
||||
@@ -145,4 +155,4 @@ setup-installer-site.sh
|
||||
install-server.*
|
||||
notify-clients-upgrade.sh
|
||||
debug-agent.sh
|
||||
docker/agents
|
||||
docker/compose_dev_*
|
||||
|
||||
674
LICENSE
Normal file
674
LICENSE
Normal file
@@ -0,0 +1,674 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
286
README.md
286
README.md
@@ -1,127 +1,59 @@
|
||||
# PatchMon - Linux Patch Monitoring made Simple
|
||||
|
||||
[](https://patchmon.net)
|
||||
[](https://patchmon.net/discord)
|
||||
[](https://github.com/9technologygroup/patchmon.net)
|
||||
[](https://github.com/users/9technologygroup/projects/1)
|
||||
[](https://docs.patchmon.net/)
|
||||
|
||||
---
|
||||
|
||||
## Please STAR this repo :D
|
||||
|
||||
## Purpose
|
||||
|
||||
PatchMon provides centralized patch management across diverse server environments. Agents communicate outbound-only to the PatchMon server, eliminating inbound ports on monitored hosts while delivering comprehensive visibility and safe automation.
|
||||
|
||||

|
||||
|
||||
## Features
|
||||
|
||||
### Users & Authentication
|
||||
- Multi-user accounts (admin and standard users)
|
||||
- Email/username-based login
|
||||
- Optional Two‑Factor Authentication (TFA/MFA) with verification flow
|
||||
- First‑time admin bootstrap flow (no default credentials; secure setup)
|
||||
- Self‑registration toggle in settings (enable/disable public signup)
|
||||
|
||||
### Roles, Permissions & RBAC
|
||||
- Built‑in roles: `admin`, `user`
|
||||
- Fine‑grained permission flags (e.g., view/manage hosts, packages, users, reports, settings)
|
||||
- Server‑side enforcement for protected routes and UI guards per permission
|
||||
|
||||
### Dashboard
|
||||
- Customisable dashboard with per‑user card layout and ordering
|
||||
- Role/permission‑aware defaults on first login
|
||||
- “Reset to Defaults” uses consistent server‑provided defaults
|
||||
- Cards include: Total Hosts, Needs Updating, Up‑to‑Date Hosts, Host Groups, Outdated Packages, Security Updates, Package Priority, Repositories, Users, OS Distribution (pie/bar), Update Status, Recent Collection, Recent Users, Quick Stats
|
||||
|
||||
### Users & Authentication
|
||||
- Multi-user accounts (admin and standard users)
|
||||
- Roles, Permissions & RBAC
|
||||
|
||||
### Hosts & Inventory
|
||||
- Host inventory with key attributes and OS details
|
||||
- Host inventory/groups with key attributes and OS details
|
||||
- Host grouping (create and manage host groups)
|
||||
- OS distribution summaries and visualisations
|
||||
- Recent telemetry collection indicator
|
||||
|
||||
### Packages & Updates
|
||||
- Package inventory across hosts
|
||||
- Outdated packages overview and counts
|
||||
- Security updates highlight
|
||||
- Update status breakdown (up‑to‑date vs needs updates)
|
||||
|
||||
### Repositories
|
||||
- Repositories per host tracking
|
||||
- Repository module pages and totals
|
||||
|
||||
### Agent & Data Collection
|
||||
- Outbound‑only agent communication (no inbound ports required on hosts)
|
||||
- Agent version management and script content stored in DB
|
||||
- Version marking (current/default) with update history
|
||||
|
||||
### Settings & Configuration
|
||||
- Server URL/protocol/host/port
|
||||
- Update interval and auto‑update toggle
|
||||
- Public signup toggle and default user role selection
|
||||
- Repository settings: GitHub repo URL, repository type, SSH key path
|
||||
- Rate‑limit windows and thresholds for API/auth/agent
|
||||
|
||||
### Admin & User Management
|
||||
- Admin user CRUD (create, list, update, delete)
|
||||
- Password reset (admin‑initiated)
|
||||
- Role assignment on user create/update
|
||||
|
||||
### Reporting & Analytics
|
||||
- Dashboard stats and card‑level metrics
|
||||
- OS distribution charts (pie/bar)
|
||||
- Update status and recent activity summaries
|
||||
- Signup toggle and default user role selection
|
||||
|
||||
### API & Integrations
|
||||
- REST API under `/api/v1` with JWT auth
|
||||
- Consistent JSON responses; errors with appropriate status codes
|
||||
- CORS configured per server settings
|
||||
- Proxmox LXC Auto-Enrollment - Automatically discover and enroll LXC containers from Proxmox hosts
|
||||
|
||||
### Security
|
||||
- JWT‑secured API with short, scoped tokens
|
||||
- Permissions enforced server‑side on every route
|
||||
- Rate limiting for general, auth, and agent endpoints
|
||||
- Outbound‑only agent model reduces attack surface
|
||||
|
||||
### Deployment & Operations
|
||||
- One‑line self‑host installer (Ubuntu/Debian)
|
||||
- Automated provisioning: Node.js, PostgreSQL, nginx
|
||||
- Prisma migrations and client generation
|
||||
- Docker installation & One‑line self‑host installer (Ubuntu/Debian)
|
||||
- systemd service for backend lifecycle
|
||||
- nginx vhost for frontend + API proxy; optional Let’s Encrypt integration
|
||||
- Consolidated deployment info file with commands and paths
|
||||
|
||||
### UX & Frontend
|
||||
- Vite + React single‑page app
|
||||
- Protected routes with permission checks
|
||||
- Theming and modern components (icons, modals, notifications)
|
||||
|
||||
### Observability & Logging
|
||||
- Structured server logs
|
||||
- Deployment logs copied to instance dir for later review
|
||||
|
||||
### Road‑Readiness
|
||||
- Works for internal (HTTP) and public (HTTPS) deployments
|
||||
- Defaults safe for first‑time setup; admin created interactively
|
||||
|
||||
## Communication Model
|
||||
|
||||
- Outbound-only agents: servers initiate communication to PatchMon
|
||||
- No inbound connections required on monitored servers
|
||||
- Secure server-side API with JWT authentication and rate limiting
|
||||
|
||||
## Architecture
|
||||
|
||||
- Backend: Node.js/Express + Prisma + PostgreSQL
|
||||
- Frontend: Vite + React
|
||||
- Reverse proxy: nginx
|
||||
- Database: PostgreSQL
|
||||
- System service: systemd-managed backend
|
||||
|
||||
```
|
||||
+----------------------+ HTTPS +--------------------+ HTTP +------------------------+ TCP +---------------+
|
||||
| End Users (Browser) | ---------> | nginx | --------> | Backend (Node/Express) | ------> | PostgreSQL |
|
||||
| Admin UI / Frontend | | serve FE, proxy API| | /api, auth, Prisma | | Database |
|
||||
+----------------------+ +--------------------+ +------------------------+ +---------------+
|
||||
|
||||
Agents (Outbound Only)
|
||||
+---------------------------+ HTTPS +------------------------+
|
||||
| Agents on your servers | ----------> | Backend API (/api/v1) |
|
||||
+---------------------------+ +------------------------+
|
||||
|
||||
Operational
|
||||
- systemd manages backend service
|
||||
- certbot/nginx for TLS (public)
|
||||
- setup.sh bootstraps OS, app, DB, config
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
@@ -131,12 +63,43 @@ Managed, zero-maintenance PatchMon hosting. Stay tuned.
|
||||
|
||||
### Self-hosted Installation
|
||||
|
||||
#### Docker (preferred)
|
||||
|
||||
For getting started with Docker, see the [Docker documentation](https://github.com/PatchMon/PatchMon/blob/main/docker/README.md)
|
||||
|
||||
#### Native Install (advanced/non-docker)
|
||||
|
||||
Run on a clean Ubuntu/Debian server with internet access:
|
||||
|
||||
#### Debian:
|
||||
```bash
|
||||
curl -fsSL -o setup.sh https://raw.githubusercontent.com/9technologygroup/patchmon.net/main/setup.sh && chmod +x && bash setup.sh
|
||||
apt update -y
|
||||
apt upgrade -y
|
||||
apt install curl -y
|
||||
```
|
||||
|
||||
#### Ubuntu:
|
||||
```bash
|
||||
apt-get update -y
|
||||
apt-get upgrade -y
|
||||
apt install curl -y
|
||||
```
|
||||
|
||||
#### Install Script
|
||||
```bash
|
||||
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh
|
||||
```
|
||||
|
||||
#### Update Script (--update flag)
|
||||
```bash
|
||||
curl -fsSL -o setup.sh https://raw.githubusercontent.com/PatchMon/PatchMon/refs/heads/main/setup.sh && chmod +x setup.sh && bash setup.sh --update
|
||||
```
|
||||
|
||||
#### Minimum specs for building : #####
|
||||
CPU : 2 vCPU
|
||||
RAM : 2GB
|
||||
Disk : 15GB
|
||||
|
||||
During setup you’ll be asked:
|
||||
- Domain/IP: public DNS or local IP (default: `patchmon.internal`)
|
||||
- SSL/HTTPS: `y` for public deployments with a public IP, `n` for internal networks
|
||||
@@ -155,49 +118,54 @@ After installation:
|
||||
- Visit `http(s)://<your-domain>` and complete first-time admin setup
|
||||
- See all useful info in `deployment-info.txt`
|
||||
|
||||
## Forcing updates after host package changes
|
||||
Should you perform a manual package update on your host and wish to see the results reflected in PatchMon quicker than the usual scheduled update, you can trigger the process manually by running:
|
||||
```bash
|
||||
/usr/local/bin/patchmon-agent.sh update
|
||||
```
|
||||
|
||||
This will send the results immediately to PatchMon.
|
||||
|
||||
## Communication Model
|
||||
|
||||
- Outbound-only agents: servers initiate communication to PatchMon
|
||||
- No inbound connections required on monitored servers
|
||||
- Secure server-side API with JWT authentication and rate limiting
|
||||
|
||||
## Architecture
|
||||
|
||||
- Backend: Node.js/Express + Prisma + PostgreSQL
|
||||
- Frontend: Vite + React
|
||||
- Reverse proxy: nginx
|
||||
- Database: PostgreSQL
|
||||
- System service: systemd-managed backend
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
A[End Users / Browser<br>Admin UI / Frontend] -- HTTPS --> B[nginx<br>serve FE, proxy API]
|
||||
B -- HTTP --> C["Backend<br>(Node/Express)<br>/api, auth, Prisma"]
|
||||
C -- TCP --> D[PostgreSQL<br>Database]
|
||||
|
||||
E["Agents on your servers (Outbound Only)"] -- HTTPS --> F["Backend API<br>(/api/v1)"]
|
||||
```
|
||||
Operational
|
||||
- systemd manages backend service
|
||||
- certbot/nginx for TLS (public)
|
||||
- setup.sh bootstraps OS, app, DB, config
|
||||
|
||||
## Support
|
||||
|
||||
- Discord: https://discord.gg/S7RXUHwg
|
||||
- Discord: [https://patchmon.net/discord](https://patchmon.net/discord)
|
||||
- Email: support@patchmon.net
|
||||
|
||||
## Roadmap
|
||||
|
||||
- PatchMon Cloud (managed offering)
|
||||
- Additional dashboards and reporting widgets
|
||||
- More OS distributions and agent enhancements
|
||||
- Advanced workflow automations and approvals
|
||||
- Roadmap board: https://github.com/orgs/PatchMon/projects/2
|
||||
|
||||
Roadmap board: https://github.com/users/9technologygroup/projects/1
|
||||
|
||||
## Security
|
||||
|
||||
- Outbound-only agent communications; no inbound ports on monitored hosts
|
||||
- JWT-based API auth, rate limiting, role/permission checks
|
||||
- Follow least-privilege defaults; sensitive operations audited
|
||||
|
||||
## Support Methods
|
||||
|
||||
- Community: Discord for quick questions and feedback
|
||||
- Email: SLA-backed assistance for incidents and issues
|
||||
- GitHub Issues: bug reports and feature requests
|
||||
|
||||
## License
|
||||
|
||||
AGPLv3 (More information on this soon)
|
||||
|
||||
## Links
|
||||
|
||||
- Repository: https://github.com/9technologygroup/patchmon.net/
|
||||
- Raw installer: https://raw.githubusercontent.com/9technologygroup/patchmon.net/main/setup.sh
|
||||
---
|
||||
|
||||
|
||||
# PatchMon
|
||||
|
||||
[](https://discord.gg/S7RXUHwg)
|
||||
[](https://github.com/9technologygroup/patchmon.net)
|
||||
[](https://github.com/users/9technologygroup/projects/1)
|
||||
|
||||
- AGPLv3 (More information on this soon)
|
||||
|
||||
|
||||
---
|
||||
@@ -221,14 +189,19 @@ We welcome contributions from the community! Here's how you can get involved:
|
||||
git checkout -b fix/your-bug-fix
|
||||
```
|
||||
|
||||
4. **Install Dependencies and Setup Hooks**
|
||||
```bash
|
||||
npm install
|
||||
npm run prepare
|
||||
```
|
||||
|
||||
4. **Make Your Changes**
|
||||
5. **Make Your Changes**
|
||||
- Write clean, well-documented code
|
||||
- Follow existing code style and patterns
|
||||
- Add tests for new functionality
|
||||
- Update documentation as needed
|
||||
|
||||
5. **Test Your Changes**
|
||||
6. **Test Your Changes**
|
||||
```bash
|
||||
# Run backend tests
|
||||
cd backend
|
||||
@@ -239,56 +212,28 @@ We welcome contributions from the community! Here's how you can get involved:
|
||||
npm test
|
||||
```
|
||||
|
||||
6. **Commit and Push**
|
||||
7. **Commit and Push**
|
||||
```bash
|
||||
git add .
|
||||
git commit -m "Add: descriptive commit message"
|
||||
git push origin feature/your-feature-name
|
||||
```
|
||||
|
||||
7. **Create a Pull Request**
|
||||
8. **Create a Pull Request**
|
||||
- Go to your fork on GitHub
|
||||
- Click "New Pull Request"
|
||||
- Provide a clear description of your changes
|
||||
- Link any related issues
|
||||
|
||||
### Contribution Guidelines
|
||||
- **Code Style**: Follow the existing code patterns and ESLint configuration
|
||||
- **Code Style**: Follow the existing code patterns and Biome configuration
|
||||
- **Commits**: Use conventional commit messages (feat:, fix:, docs:, etc.)
|
||||
- **Testing**: Ensure all tests pass and add tests for new features
|
||||
- **Documentation**: Update README and code comments as needed
|
||||
- **Issues**: Check existing issues before creating new ones
|
||||
|
||||
### Areas We Need Help With
|
||||
- 🐳 **Docker & Containerization** (led by @Adam20054)
|
||||
- 🔄 **CI/CD Pipelines** (led by @tigattack)
|
||||
- 🔒 **Security Improvements** - Security audits, vulnerability assessments, and security feature enhancements
|
||||
- ⚡ **Performance for Large Scale Deployments** - Database optimization, caching strategies, and horizontal scaling
|
||||
- 📚 **Documentation** improvements
|
||||
- 🧪 **Testing** coverage
|
||||
- 🌐 **Internationalization** (i18n)
|
||||
- 📱 **Mobile** responsive improvements
|
||||
-
|
||||
|
||||
---
|
||||
|
||||
## 🗺️ Roadmap
|
||||
|
||||
Check out our [public roadmap](https://github.com/users/9technologygroup/projects/1) to see what we're working on and what's coming next!
|
||||
|
||||
**Upcoming Features:**
|
||||
- 🐳 Docker Compose deployment
|
||||
- 🔄 Automated CI/CD pipelines
|
||||
- 📊 Advanced reporting and analytics
|
||||
- 🔔 Enhanced notification system
|
||||
- 📱 Mobile application
|
||||
- 🔄 Patch management workflows and policies
|
||||
- 👥 Users inventory management
|
||||
- 🔍 Services and ports monitoring
|
||||
- 🖥️ Proxmox integration for auto LXC discovery and registration
|
||||
- 📧 Notifications via Slack/Email
|
||||
|
||||
---
|
||||
|
||||
## 🏢 Enterprise & Custom Solutions
|
||||
|
||||
@@ -316,22 +261,6 @@ Check out our [public roadmap](https://github.com/users/9technologygroup/project
|
||||
|
||||
|
||||
|
||||
---
|
||||
|
||||
## 📞 Support & Community
|
||||
|
||||
### Get Help
|
||||
- 💬 **Discord Community**: [Join our Discord](https://discord.gg/S7RXUHwg) for real-time support and discussions
|
||||
- 📧 **Email Support**: support@patchmon.net
|
||||
- 📚 **Documentation**: Check our wiki and documentation
|
||||
- 🐛 **Bug Reports**: Use GitHub Issues
|
||||
|
||||
### Community
|
||||
- 🌟 **Star the Project**: Show your support by starring this repository
|
||||
- 🍴 **Fork & Contribute**: Help improve PatchMon
|
||||
- 📢 **Share**: Tell others about PatchMon
|
||||
- 💡 **Feature Requests**: Suggest new features via GitHub Issues
|
||||
|
||||
---
|
||||
|
||||
## 🙏 Acknowledgments
|
||||
@@ -341,6 +270,9 @@ Check out our [public roadmap](https://github.com/users/9technologygroup/project
|
||||
- **@Adam20054** - For working on Docker Compose deployment
|
||||
- **@tigattack** - For working on GitHub CI/CD pipelines
|
||||
- **Cloud X** and **Crazy Dead** - For moderating our Discord server and keeping the community awesome
|
||||
- **Beta Testers** - For keeping me awake at night
|
||||
- **My family** - For understanding my passion
|
||||
|
||||
|
||||
### Contributors
|
||||
Thank you to all our contributors who help make PatchMon better every day!
|
||||
@@ -349,9 +281,9 @@ Thank you to all our contributors who help make PatchMon better every day!
|
||||
## 🔗 Links
|
||||
|
||||
- **Website**: [patchmon.net](https://patchmon.net)
|
||||
- **Discord**: [discord.gg/S7RXUHwg](https://discord.gg/S7RXUHwg)
|
||||
- **Discord**: [https://patchmon.net/discord](https://patchmon.net/discord)
|
||||
- **Roadmap**: [GitHub Projects](https://github.com/users/9technologygroup/projects/1)
|
||||
- **Documentation**: [Coming Soon]
|
||||
- **Documentation**: [https://docs.patchmon.net](https://docs.patchmon.net)
|
||||
- **Support**: support@patchmon.net
|
||||
|
||||
---
|
||||
@@ -360,7 +292,7 @@ Thank you to all our contributors who help make PatchMon better every day!
|
||||
|
||||
**Made with ❤️ by the PatchMon Team**
|
||||
|
||||
[](https://discord.gg/S7RXUHwg)
|
||||
[](https://github.com/9technologygroup/patchmon.net)
|
||||
[](https://patchmon.net/discord)
|
||||
[](https://github.com/PatchMon/PatchMon)
|
||||
|
||||
</div>
|
||||
File diff suppressed because it is too large
Load Diff
BIN
agents/patchmon-agent-linux-386
Executable file
BIN
agents/patchmon-agent-linux-386
Executable file
Binary file not shown.
BIN
agents/patchmon-agent-linux-amd64
Executable file
BIN
agents/patchmon-agent-linux-amd64
Executable file
Binary file not shown.
BIN
agents/patchmon-agent-linux-arm64
Executable file
BIN
agents/patchmon-agent-linux-arm64
Executable file
Binary file not shown.
496
agents/patchmon-docker-agent.sh
Executable file
496
agents/patchmon-docker-agent.sh
Executable file
@@ -0,0 +1,496 @@
|
||||
#!/bin/bash
|
||||
|
||||
# PatchMon Docker Agent Script v1.2.9
|
||||
# This script collects Docker container and image information and sends it to PatchMon
|
||||
|
||||
# Configuration
|
||||
PATCHMON_SERVER="${PATCHMON_SERVER:-http://localhost:3001}"
|
||||
API_VERSION="v1"
|
||||
AGENT_VERSION="1.2.9"
|
||||
CONFIG_FILE="/etc/patchmon/agent.conf"
|
||||
CREDENTIALS_FILE="/etc/patchmon/credentials"
|
||||
LOG_FILE="/var/log/patchmon-docker-agent.log"
|
||||
|
||||
# Curl flags placeholder (replaced by server based on SSL settings)
|
||||
CURL_FLAGS=""
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Logging function
|
||||
log() {
|
||||
if [[ -w "$(dirname "$LOG_FILE")" ]] 2>/dev/null; then
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" >> "$LOG_FILE" 2>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
# Error handling
|
||||
error() {
|
||||
echo -e "${RED}ERROR: $1${NC}" >&2
|
||||
log "ERROR: $1"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Info logging
|
||||
info() {
|
||||
echo -e "${BLUE}ℹ️ $1${NC}" >&2
|
||||
log "INFO: $1"
|
||||
}
|
||||
|
||||
# Success logging
|
||||
success() {
|
||||
echo -e "${GREEN}✅ $1${NC}" >&2
|
||||
log "SUCCESS: $1"
|
||||
}
|
||||
|
||||
# Warning logging
|
||||
warning() {
|
||||
echo -e "${YELLOW}⚠️ $1${NC}" >&2
|
||||
log "WARNING: $1"
|
||||
}
|
||||
|
||||
# Check if Docker is installed and running
|
||||
check_docker() {
|
||||
if ! command -v docker &> /dev/null; then
|
||||
error "Docker is not installed on this system"
|
||||
fi
|
||||
|
||||
if ! docker info &> /dev/null; then
|
||||
error "Docker daemon is not running or you don't have permission to access it. Try running with sudo."
|
||||
fi
|
||||
}
|
||||
|
||||
# Load credentials
|
||||
load_credentials() {
|
||||
if [[ ! -f "$CREDENTIALS_FILE" ]]; then
|
||||
error "Credentials file not found at $CREDENTIALS_FILE. Please configure the main PatchMon agent first."
|
||||
fi
|
||||
|
||||
source "$CREDENTIALS_FILE"
|
||||
|
||||
if [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
|
||||
error "API credentials not found in $CREDENTIALS_FILE"
|
||||
fi
|
||||
|
||||
# Use PATCHMON_URL from credentials if available, otherwise use default
|
||||
if [[ -n "$PATCHMON_URL" ]]; then
|
||||
PATCHMON_SERVER="$PATCHMON_URL"
|
||||
fi
|
||||
}
|
||||
|
||||
# Load configuration
|
||||
load_config() {
|
||||
if [[ -f "$CONFIG_FILE" ]]; then
|
||||
source "$CONFIG_FILE"
|
||||
if [[ -n "$SERVER_URL" ]]; then
|
||||
PATCHMON_SERVER="$SERVER_URL"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Collect Docker containers
|
||||
collect_containers() {
|
||||
info "Collecting Docker container information..."
|
||||
|
||||
local containers_json="["
|
||||
local first=true
|
||||
|
||||
# Get all containers (running and stopped)
|
||||
while IFS='|' read -r container_id name image status state created started ports; do
|
||||
if [[ -z "$container_id" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Parse image name and tag
|
||||
local image_name="${image%%:*}"
|
||||
local image_tag="${image##*:}"
|
||||
if [[ "$image_tag" == "$image_name" ]]; then
|
||||
image_tag="latest"
|
||||
fi
|
||||
|
||||
# Determine image source based on registry
|
||||
local image_source="docker-hub"
|
||||
if [[ "$image_name" == ghcr.io/* ]]; then
|
||||
image_source="github"
|
||||
elif [[ "$image_name" == registry.gitlab.com/* ]]; then
|
||||
image_source="gitlab"
|
||||
elif [[ "$image_name" == *"/"*"/"* ]]; then
|
||||
image_source="private"
|
||||
fi
|
||||
|
||||
# Get repository name (without registry prefix for common registries)
|
||||
local image_repository="$image_name"
|
||||
image_repository="${image_repository#ghcr.io/}"
|
||||
image_repository="${image_repository#registry.gitlab.com/}"
|
||||
|
||||
# Get image ID
|
||||
local full_image_id=$(docker inspect --format='{{.Image}}' "$container_id" 2>/dev/null || echo "unknown")
|
||||
full_image_id="${full_image_id#sha256:}"
|
||||
|
||||
# Normalize status (extract just the status keyword)
|
||||
local normalized_status="unknown"
|
||||
if [[ "$status" =~ ^Up ]]; then
|
||||
normalized_status="running"
|
||||
elif [[ "$status" =~ ^Exited ]]; then
|
||||
normalized_status="exited"
|
||||
elif [[ "$status" =~ ^Created ]]; then
|
||||
normalized_status="created"
|
||||
elif [[ "$status" =~ ^Restarting ]]; then
|
||||
normalized_status="restarting"
|
||||
elif [[ "$status" =~ ^Paused ]]; then
|
||||
normalized_status="paused"
|
||||
elif [[ "$status" =~ ^Dead ]]; then
|
||||
normalized_status="dead"
|
||||
fi
|
||||
|
||||
# Parse ports
|
||||
local ports_json="null"
|
||||
if [[ -n "$ports" && "$ports" != "null" ]]; then
|
||||
# Convert Docker port format to JSON
|
||||
ports_json=$(echo "$ports" | jq -R -s -c 'split(",") | map(select(length > 0)) | map(split("->") | {(.[0]): .[1]}) | add // {}')
|
||||
fi
|
||||
|
||||
# Convert dates to ISO 8601 format
|
||||
# If date conversion fails, use null instead of invalid date string
|
||||
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
|
||||
local started_iso="null"
|
||||
if [[ -n "$started" && "$started" != "null" ]]; then
|
||||
started_iso=$(date -d "$started" -Iseconds 2>/dev/null || echo "null")
|
||||
fi
|
||||
|
||||
# Add comma for JSON array
|
||||
if [[ "$first" == false ]]; then
|
||||
containers_json+=","
|
||||
fi
|
||||
first=false
|
||||
|
||||
# Build JSON object for this container
|
||||
containers_json+="{\"container_id\":\"$container_id\","
|
||||
containers_json+="\"name\":\"$name\","
|
||||
containers_json+="\"image_name\":\"$image_name\","
|
||||
containers_json+="\"image_tag\":\"$image_tag\","
|
||||
containers_json+="\"image_repository\":\"$image_repository\","
|
||||
containers_json+="\"image_source\":\"$image_source\","
|
||||
containers_json+="\"image_id\":\"$full_image_id\","
|
||||
containers_json+="\"status\":\"$normalized_status\","
|
||||
containers_json+="\"state\":\"$state\","
|
||||
containers_json+="\"ports\":$ports_json"
|
||||
|
||||
# Only add created_at if we have a valid date
|
||||
if [[ "$created_iso" != "null" ]]; then
|
||||
containers_json+=",\"created_at\":\"$created_iso\""
|
||||
fi
|
||||
|
||||
# Only add started_at if we have a valid date
|
||||
if [[ "$started_iso" != "null" ]]; then
|
||||
containers_json+=",\"started_at\":\"$started_iso\""
|
||||
fi
|
||||
|
||||
containers_json+="}"
|
||||
|
||||
done < <(docker ps -a --format '{{.ID}}|{{.Names}}|{{.Image}}|{{.Status}}|{{.State}}|{{.CreatedAt}}|{{.RunningFor}}|{{.Ports}}' 2>/dev/null)
|
||||
|
||||
containers_json+="]"
|
||||
|
||||
echo "$containers_json"
|
||||
}
|
||||
|
||||
# Collect Docker images
|
||||
collect_images() {
|
||||
info "Collecting Docker image information..."
|
||||
|
||||
local images_json="["
|
||||
local first=true
|
||||
|
||||
while IFS='|' read -r repository tag image_id created size digest; do
|
||||
if [[ -z "$repository" || "$repository" == "<none>" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Clean up tag
|
||||
if [[ -z "$tag" || "$tag" == "<none>" ]]; then
|
||||
tag="latest"
|
||||
fi
|
||||
|
||||
# Clean image ID
|
||||
image_id="${image_id#sha256:}"
|
||||
|
||||
# Determine source
|
||||
local source="docker-hub"
|
||||
if [[ "$repository" == ghcr.io/* ]]; then
|
||||
source="github"
|
||||
elif [[ "$repository" == registry.gitlab.com/* ]]; then
|
||||
source="gitlab"
|
||||
elif [[ "$repository" == *"/"*"/"* ]]; then
|
||||
source="private"
|
||||
fi
|
||||
|
||||
# Convert size to bytes (approximate)
|
||||
local size_bytes=0
|
||||
if [[ "$size" =~ ([0-9.]+)([KMGT]?B) ]]; then
|
||||
local num="${BASH_REMATCH[1]}"
|
||||
local unit="${BASH_REMATCH[2]}"
|
||||
case "$unit" in
|
||||
KB) size_bytes=$(echo "$num * 1024" | bc | cut -d. -f1) ;;
|
||||
MB) size_bytes=$(echo "$num * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||
GB) size_bytes=$(echo "$num * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||
TB) size_bytes=$(echo "$num * 1024 * 1024 * 1024 * 1024" | bc | cut -d. -f1) ;;
|
||||
B) size_bytes=$(echo "$num" | cut -d. -f1) ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Convert created date to ISO 8601
|
||||
# If date conversion fails, use null instead of invalid date string
|
||||
local created_iso=$(date -d "$created" -Iseconds 2>/dev/null || echo "null")
|
||||
|
||||
# Add comma for JSON array
|
||||
if [[ "$first" == false ]]; then
|
||||
images_json+=","
|
||||
fi
|
||||
first=false
|
||||
|
||||
# Build JSON object for this image
|
||||
images_json+="{\"repository\":\"$repository\","
|
||||
images_json+="\"tag\":\"$tag\","
|
||||
images_json+="\"image_id\":\"$image_id\","
|
||||
images_json+="\"source\":\"$source\","
|
||||
images_json+="\"size_bytes\":$size_bytes"
|
||||
|
||||
# Only add created_at if we have a valid date
|
||||
if [[ "$created_iso" != "null" ]]; then
|
||||
images_json+=",\"created_at\":\"$created_iso\""
|
||||
fi
|
||||
|
||||
# Only add digest if present
|
||||
if [[ -n "$digest" && "$digest" != "<none>" ]]; then
|
||||
images_json+=",\"digest\":\"$digest\""
|
||||
fi
|
||||
|
||||
images_json+="}"
|
||||
|
||||
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.CreatedAt}}|{{.Size}}|{{.Digest}}' --no-trunc 2>/dev/null)
|
||||
|
||||
images_json+="]"
|
||||
|
||||
echo "$images_json"
|
||||
}
|
||||
|
||||
# Check for image updates
|
||||
check_image_updates() {
|
||||
info "Checking for image updates..."
|
||||
|
||||
local updates_json="["
|
||||
local first=true
|
||||
local update_count=0
|
||||
|
||||
# Get all images
|
||||
while IFS='|' read -r repository tag image_id digest; do
|
||||
if [[ -z "$repository" || "$repository" == "<none>" || "$tag" == "<none>" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Skip checking 'latest' tag as it's always considered current by name
|
||||
# We'll still check digest though
|
||||
local full_image="${repository}:${tag}"
|
||||
|
||||
# Try to get remote digest from registry
|
||||
# Use docker manifest inspect to avoid pulling the image
|
||||
local remote_digest=$(docker manifest inspect "$full_image" 2>/dev/null | jq -r '.config.digest // .manifests[0].digest // empty' 2>/dev/null)
|
||||
|
||||
if [[ -z "$remote_digest" ]]; then
|
||||
# If manifest inspect fails, try buildx imagetools inspect (works for more registries)
|
||||
remote_digest=$(docker buildx imagetools inspect "$full_image" 2>/dev/null | grep -oP 'Digest:\s*\K\S+' | head -1)
|
||||
fi
|
||||
|
||||
# Clean up digests for comparison
|
||||
local local_digest="${digest#sha256:}"
|
||||
remote_digest="${remote_digest#sha256:}"
|
||||
|
||||
# If we got a remote digest and it's different from local, there's an update
|
||||
if [[ -n "$remote_digest" && -n "$local_digest" && "$remote_digest" != "$local_digest" ]]; then
|
||||
if [[ "$first" == false ]]; then
|
||||
updates_json+=","
|
||||
fi
|
||||
first=false
|
||||
|
||||
# Build update JSON object
|
||||
updates_json+="{\"repository\":\"$repository\","
|
||||
updates_json+="\"current_tag\":\"$tag\","
|
||||
updates_json+="\"available_tag\":\"$tag\","
|
||||
updates_json+="\"current_digest\":\"$local_digest\","
|
||||
updates_json+="\"available_digest\":\"$remote_digest\","
|
||||
updates_json+="\"image_id\":\"${image_id#sha256:}\""
|
||||
updates_json+="}"
|
||||
|
||||
((update_count++))
|
||||
fi
|
||||
|
||||
done < <(docker images --format '{{.Repository}}|{{.Tag}}|{{.ID}}|{{.Digest}}' --no-trunc 2>/dev/null)
|
||||
|
||||
updates_json+="]"
|
||||
|
||||
info "Found $update_count image update(s) available"
|
||||
|
||||
echo "$updates_json"
|
||||
}
|
||||
|
||||
# Send Docker data to server
|
||||
send_docker_data() {
|
||||
load_credentials
|
||||
|
||||
info "Collecting Docker data..."
|
||||
|
||||
local containers=$(collect_containers)
|
||||
local images=$(collect_images)
|
||||
local updates=$(check_image_updates)
|
||||
|
||||
# Count collected items
|
||||
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
|
||||
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
|
||||
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
|
||||
|
||||
info "Found $container_count containers, $image_count images, and $update_count update(s) available"
|
||||
|
||||
# Build payload
|
||||
local payload="{\"apiId\":\"$API_ID\",\"apiKey\":\"$API_KEY\",\"containers\":$containers,\"images\":$images,\"updates\":$updates}"
|
||||
|
||||
# Send to server
|
||||
info "Sending Docker data to PatchMon server..."
|
||||
|
||||
local response=$(curl $CURL_FLAGS -s -w "\n%{http_code}" -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$payload" \
|
||||
"${PATCHMON_SERVER}/api/${API_VERSION}/docker/collect" 2>&1)
|
||||
|
||||
local http_code=$(echo "$response" | tail -n1)
|
||||
local response_body=$(echo "$response" | head -n-1)
|
||||
|
||||
if [[ "$http_code" == "200" ]]; then
|
||||
success "Docker data sent successfully!"
|
||||
log "Docker data sent: $container_count containers, $image_count images"
|
||||
return 0
|
||||
else
|
||||
error "Failed to send Docker data. HTTP Status: $http_code\nResponse: $response_body"
|
||||
fi
|
||||
}
|
||||
|
||||
# Test Docker data collection without sending
|
||||
test_collection() {
|
||||
check_docker
|
||||
|
||||
info "Testing Docker data collection (dry run)..."
|
||||
echo ""
|
||||
|
||||
local containers=$(collect_containers)
|
||||
local images=$(collect_images)
|
||||
local updates=$(check_image_updates)
|
||||
|
||||
local container_count=$(echo "$containers" | jq '. | length' 2>/dev/null || echo "0")
|
||||
local image_count=$(echo "$images" | jq '. | length' 2>/dev/null || echo "0")
|
||||
local update_count=$(echo "$updates" | jq '. | length' 2>/dev/null || echo "0")
|
||||
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo -e "${GREEN}Docker Data Collection Results${NC}"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo -e "Containers found: ${GREEN}$container_count${NC}"
|
||||
echo -e "Images found: ${GREEN}$image_count${NC}"
|
||||
echo -e "Updates available: ${YELLOW}$update_count${NC}"
|
||||
echo ""
|
||||
|
||||
if command -v jq &> /dev/null; then
|
||||
echo "━━━ Containers ━━━"
|
||||
echo "$containers" | jq -r '.[] | "\(.name) (\(.status)) - \(.image_name):\(.image_tag)"' | head -10
|
||||
if [[ $container_count -gt 10 ]]; then
|
||||
echo "... and $((container_count - 10)) more"
|
||||
fi
|
||||
echo ""
|
||||
echo "━━━ Images ━━━"
|
||||
echo "$images" | jq -r '.[] | "\(.repository):\(.tag) (\(.size_bytes / 1024 / 1024 | floor)MB)"' | head -10
|
||||
if [[ $image_count -gt 10 ]]; then
|
||||
echo "... and $((image_count - 10)) more"
|
||||
fi
|
||||
|
||||
if [[ $update_count -gt 0 ]]; then
|
||||
echo ""
|
||||
echo "━━━ Available Updates ━━━"
|
||||
echo "$updates" | jq -r '.[] | "\(.repository):\(.current_tag) → \(.available_tag)"'
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
success "Test collection completed successfully!"
|
||||
}
|
||||
|
||||
# Show help
|
||||
show_help() {
|
||||
cat << EOF
|
||||
PatchMon Docker Agent v${AGENT_VERSION}
|
||||
|
||||
This agent collects Docker container and image information and sends it to PatchMon.
|
||||
|
||||
USAGE:
|
||||
$0 <command>
|
||||
|
||||
COMMANDS:
|
||||
collect Collect and send Docker data to PatchMon server
|
||||
test Test Docker data collection without sending (dry run)
|
||||
help Show this help message
|
||||
|
||||
REQUIREMENTS:
|
||||
- Docker must be installed and running
|
||||
- Main PatchMon agent must be configured first
|
||||
- Credentials file must exist at $CREDENTIALS_FILE
|
||||
|
||||
EXAMPLES:
|
||||
# Test collection (dry run)
|
||||
sudo $0 test
|
||||
|
||||
# Collect and send Docker data
|
||||
sudo $0 collect
|
||||
|
||||
SCHEDULING:
|
||||
To run this agent automatically, add a cron job:
|
||||
|
||||
# Run every 5 minutes
|
||||
*/5 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
|
||||
|
||||
# Run every hour
|
||||
0 * * * * /usr/local/bin/patchmon-docker-agent.sh collect
|
||||
|
||||
FILES:
|
||||
Config: $CONFIG_FILE
|
||||
Credentials: $CREDENTIALS_FILE
|
||||
Log: $LOG_FILE
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
# Main function
|
||||
main() {
|
||||
case "$1" in
|
||||
"collect")
|
||||
check_docker
|
||||
load_config
|
||||
send_docker_data
|
||||
;;
|
||||
"test")
|
||||
check_docker
|
||||
load_config
|
||||
test_collection
|
||||
;;
|
||||
"help"|"--help"|"-h"|"")
|
||||
show_help
|
||||
;;
|
||||
*)
|
||||
error "Unknown command: $1\n\nRun '$0 help' for usage information."
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
# PatchMon Agent Installation Script
|
||||
# Usage: curl -sSL {PATCHMON_URL}/api/v1/hosts/install | bash -s -- {PATCHMON_URL} {API_ID} {API_KEY}
|
||||
# Usage: curl -s {PATCHMON_URL}/api/v1/hosts/install -H "X-API-ID: {API_ID}" -H "X-API-KEY: {API_KEY}" | bash
|
||||
|
||||
set -e
|
||||
|
||||
# This placeholder will be dynamically replaced by the server when serving this
|
||||
# script based on the "ignore SSL self-signed" setting. If set to -k, curl will
|
||||
# ignore certificate validation. Otherwise, it will be empty for secure default.
|
||||
# CURL_FLAGS is now set via environment variables by the backend
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
@@ -35,157 +40,523 @@ if [[ $EUID -ne 0 ]]; then
|
||||
error "This script must be run as root (use sudo)"
|
||||
fi
|
||||
|
||||
# Verify system datetime and timezone
|
||||
verify_datetime() {
|
||||
info "🕐 Verifying system datetime and timezone..."
|
||||
|
||||
# Get current system time
|
||||
local system_time=$(date)
|
||||
local timezone=$(timedatectl show --property=Timezone --value 2>/dev/null || echo "Unknown")
|
||||
|
||||
# Display current datetime info
|
||||
echo ""
|
||||
echo -e "${BLUE}📅 Current System Date/Time:${NC}"
|
||||
echo " • Date/Time: $system_time"
|
||||
echo " • Timezone: $timezone"
|
||||
echo ""
|
||||
|
||||
# Check if we can read from stdin (interactive terminal)
|
||||
if [[ -t 0 ]]; then
|
||||
# Interactive terminal - ask user
|
||||
read -p "Does this date/time look correct to you? (y/N): " -r response
|
||||
if [[ "$response" =~ ^[Yy]$ ]]; then
|
||||
success "✅ Date/time verification passed"
|
||||
echo ""
|
||||
return 0
|
||||
else
|
||||
echo ""
|
||||
echo -e "${RED}❌ Date/time verification failed${NC}"
|
||||
echo ""
|
||||
echo -e "${YELLOW}💡 Please fix the date/time and re-run the installation script:${NC}"
|
||||
echo " sudo timedatectl set-time 'YYYY-MM-DD HH:MM:SS'"
|
||||
echo " sudo timedatectl set-timezone 'America/New_York' # or your timezone"
|
||||
echo " sudo timedatectl list-timezones # to see available timezones"
|
||||
echo ""
|
||||
echo -e "${BLUE}ℹ️ After fixing the date/time, re-run this installation script.${NC}"
|
||||
error "Installation cancelled - please fix date/time and re-run"
|
||||
fi
|
||||
else
|
||||
# Non-interactive (piped from curl) - show warning and continue
|
||||
echo -e "${YELLOW}⚠️ Non-interactive installation detected${NC}"
|
||||
echo ""
|
||||
echo "Please verify the date/time shown above is correct."
|
||||
echo "If the date/time is incorrect, it may cause issues with:"
|
||||
echo " • Logging timestamps"
|
||||
echo " • Scheduled updates"
|
||||
echo " • Data synchronization"
|
||||
echo ""
|
||||
echo -e "${GREEN}✅ Continuing with installation...${NC}"
|
||||
success "✅ Date/time verification completed (assumed correct)"
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Run datetime verification
|
||||
verify_datetime
|
||||
|
||||
# Clean up old files (keep only last 3 of each type)
|
||||
cleanup_old_files() {
|
||||
# Clean up old credential backups
|
||||
ls -t /etc/patchmon/credentials.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||
|
||||
# Clean up old config backups
|
||||
ls -t /etc/patchmon/config.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||
|
||||
# Clean up old agent backups
|
||||
ls -t /usr/local/bin/patchmon-agent.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||
|
||||
# Clean up old log files
|
||||
ls -t /etc/patchmon/logs/patchmon-agent.log.old.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||
|
||||
# Clean up old shell script backups (if any exist)
|
||||
ls -t /usr/local/bin/patchmon-agent.sh.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||
|
||||
# Clean up old credentials backups (if any exist)
|
||||
ls -t /etc/patchmon/credentials.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||
}
|
||||
|
||||
# Run cleanup at start
|
||||
cleanup_old_files
|
||||
|
||||
# Generate or retrieve machine ID
|
||||
get_machine_id() {
|
||||
# Try multiple sources for machine ID
|
||||
if [[ -f /etc/machine-id ]]; then
|
||||
cat /etc/machine-id
|
||||
elif [[ -f /var/lib/dbus/machine-id ]]; then
|
||||
cat /var/lib/dbus/machine-id
|
||||
else
|
||||
# Fallback: generate from hardware info (less ideal but works)
|
||||
echo "patchmon-$(cat /sys/class/dmi/id/product_uuid 2>/dev/null || cat /proc/sys/kernel/random/uuid)"
|
||||
fi
|
||||
}
|
||||
|
||||
# Parse arguments from environment (passed via HTTP headers)
|
||||
if [[ -z "$PATCHMON_URL" ]] || [[ -z "$API_ID" ]] || [[ -z "$API_KEY" ]]; then
|
||||
error "Missing required parameters. This script should be called via the PatchMon web interface."
|
||||
fi
|
||||
|
||||
# Parse architecture parameter (default to amd64)
|
||||
ARCHITECTURE="${ARCHITECTURE:-amd64}"
|
||||
if [[ "$ARCHITECTURE" != "amd64" && "$ARCHITECTURE" != "386" && "$ARCHITECTURE" != "arm64" ]]; then
|
||||
error "Invalid architecture '$ARCHITECTURE'. Must be one of: amd64, 386, arm64"
|
||||
fi
|
||||
|
||||
# Check if --force flag is set (for bypassing broken packages)
|
||||
FORCE_INSTALL="${FORCE_INSTALL:-false}"
|
||||
if [[ "$*" == *"--force"* ]] || [[ "$FORCE_INSTALL" == "true" ]]; then
|
||||
FORCE_INSTALL="true"
|
||||
warning "⚠️ Force mode enabled - will bypass broken packages"
|
||||
fi
|
||||
|
||||
# Get unique machine ID for this host
|
||||
MACHINE_ID=$(get_machine_id)
|
||||
export MACHINE_ID
|
||||
|
||||
info "🚀 Starting PatchMon Agent Installation..."
|
||||
info "📋 Server: $PATCHMON_URL"
|
||||
info "🔑 API ID: ${API_ID:0:16}..."
|
||||
info "🆔 Machine ID: ${MACHINE_ID:0:16}..."
|
||||
info "🏗️ Architecture: $ARCHITECTURE"
|
||||
|
||||
# Display diagnostic information
|
||||
echo ""
|
||||
echo -e "${BLUE}🔧 Installation Diagnostics:${NC}"
|
||||
echo " • URL: $PATCHMON_URL"
|
||||
echo " • CURL FLAGS: $CURL_FLAGS"
|
||||
echo " • API ID: ${API_ID:0:16}..."
|
||||
echo " • API Key: ${API_KEY:0:16}..."
|
||||
echo " • Architecture: $ARCHITECTURE"
|
||||
echo ""
|
||||
|
||||
# Install required dependencies
|
||||
info "📦 Installing required dependencies..."
|
||||
echo ""
|
||||
|
||||
# Detect package manager and install jq
|
||||
# Function to check if a command exists
|
||||
command_exists() {
|
||||
command -v "$1" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# Function to install packages with error handling
|
||||
install_apt_packages() {
|
||||
local packages=("$@")
|
||||
local missing_packages=()
|
||||
|
||||
# Check which packages are missing
|
||||
for pkg in "${packages[@]}"; do
|
||||
if ! command_exists "$pkg"; then
|
||||
missing_packages+=("$pkg")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#missing_packages[@]} -eq 0 ]; then
|
||||
success "All required packages are already installed"
|
||||
return 0
|
||||
fi
|
||||
|
||||
info "Need to install: ${missing_packages[*]}"
|
||||
|
||||
# Build apt-get command based on force mode
|
||||
local apt_cmd="apt-get install ${missing_packages[*]} -y"
|
||||
|
||||
if [[ "$FORCE_INSTALL" == "true" ]]; then
|
||||
info "Using force mode - bypassing broken packages..."
|
||||
apt_cmd="$apt_cmd -o APT::Get::Fix-Broken=false -o DPkg::Options::=\"--force-confold\" -o DPkg::Options::=\"--force-confdef\""
|
||||
fi
|
||||
|
||||
# Try to install packages
|
||||
if eval "$apt_cmd" 2>&1 | tee /tmp/patchmon_apt_install.log; then
|
||||
success "Packages installed successfully"
|
||||
return 0
|
||||
else
|
||||
warning "Package installation encountered issues, checking if required tools are available..."
|
||||
|
||||
# Verify critical dependencies are actually available
|
||||
local all_ok=true
|
||||
for pkg in "${packages[@]}"; do
|
||||
if ! command_exists "$pkg"; then
|
||||
if [[ "$FORCE_INSTALL" == "true" ]]; then
|
||||
error "Critical dependency '$pkg' is not available even with --force. Please install manually."
|
||||
else
|
||||
error "Critical dependency '$pkg' is not available. Try again with --force flag or install manually: apt-get install $pkg"
|
||||
fi
|
||||
all_ok=false
|
||||
fi
|
||||
done
|
||||
|
||||
if $all_ok; then
|
||||
success "All required tools are available despite installation warnings"
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Detect package manager and install jq and curl
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
# Debian/Ubuntu
|
||||
apt-get update >/dev/null 2>&1
|
||||
apt-get install -y jq curl >/dev/null 2>&1
|
||||
info "Detected apt-get (Debian/Ubuntu)"
|
||||
echo ""
|
||||
|
||||
# Check for broken packages
|
||||
if dpkg -l | grep -q "^iH\|^iF" 2>/dev/null; then
|
||||
if [[ "$FORCE_INSTALL" == "true" ]]; then
|
||||
warning "Detected broken packages on system - force mode will work around them"
|
||||
else
|
||||
warning "⚠️ Broken packages detected on system"
|
||||
warning "If installation fails, retry with: curl -s {URL}/api/v1/hosts/install --force -H ..."
|
||||
fi
|
||||
fi
|
||||
|
||||
info "Updating package lists..."
|
||||
apt-get update || true
|
||||
echo ""
|
||||
info "Installing jq, curl, and bc..."
|
||||
install_apt_packages jq curl bc
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
# CentOS/RHEL 7
|
||||
yum install -y jq curl >/dev/null 2>&1
|
||||
info "Detected yum (CentOS/RHEL 7)"
|
||||
echo ""
|
||||
info "Installing jq, curl, and bc..."
|
||||
yum install -y jq curl bc
|
||||
elif command -v dnf >/dev/null 2>&1; then
|
||||
# CentOS/RHEL 8+/Fedora
|
||||
dnf install -y jq curl >/dev/null 2>&1
|
||||
info "Detected dnf (CentOS/RHEL 8+/Fedora)"
|
||||
echo ""
|
||||
info "Installing jq, curl, and bc..."
|
||||
dnf install -y jq curl bc
|
||||
elif command -v zypper >/dev/null 2>&1; then
|
||||
# openSUSE
|
||||
zypper install -y jq curl >/dev/null 2>&1
|
||||
info "Detected zypper (openSUSE)"
|
||||
echo ""
|
||||
info "Installing jq, curl, and bc..."
|
||||
zypper install -y jq curl bc
|
||||
elif command -v pacman >/dev/null 2>&1; then
|
||||
# Arch Linux
|
||||
pacman -S --noconfirm jq curl >/dev/null 2>&1
|
||||
info "Detected pacman (Arch Linux)"
|
||||
echo ""
|
||||
info "Installing jq, curl, and bc..."
|
||||
pacman -S --noconfirm jq curl bc
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
# Alpine Linux
|
||||
apk add --no-cache jq curl >/dev/null 2>&1
|
||||
info "Detected apk (Alpine Linux)"
|
||||
echo ""
|
||||
info "Installing jq, curl, and bc..."
|
||||
apk add --no-cache jq curl bc
|
||||
else
|
||||
warning "Could not detect package manager. Please ensure 'jq' and 'curl' are installed manually."
|
||||
warning "Could not detect package manager. Please ensure 'jq', 'curl', and 'bc' are installed manually."
|
||||
fi
|
||||
|
||||
# Verify jq installation
|
||||
if ! command -v jq >/dev/null 2>&1; then
|
||||
error "Failed to install 'jq'. Please install it manually: https://stedolan.github.io/jq/download/"
|
||||
fi
|
||||
|
||||
success "Dependencies installed successfully!"
|
||||
|
||||
# Default server URL (will be replaced by backend with configured URL)
|
||||
PATCHMON_URL="http://localhost:3001"
|
||||
|
||||
# Parse arguments
|
||||
if [[ $# -ne 3 ]]; then
|
||||
echo "Usage: curl -sSL {PATCHMON_URL}/api/v1/hosts/install | bash -s -- {PATCHMON_URL} {API_ID} {API_KEY}"
|
||||
echo ""
|
||||
echo "Example:"
|
||||
echo "curl -sSL http://patchmon.example.com/api/v1/hosts/install | bash -s -- http://patchmon.example.com patchmon_1a2b3c4d abcd1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
|
||||
success "Dependencies installation completed"
|
||||
echo ""
|
||||
echo "Contact your PatchMon administrator to get your API credentials."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PATCHMON_URL="$1"
|
||||
API_ID="$2"
|
||||
API_KEY="$3"
|
||||
# Step 1: Handle existing configuration directory
|
||||
info "📁 Setting up configuration directory..."
|
||||
|
||||
# Validate inputs
|
||||
if [[ ! "$PATCHMON_URL" =~ ^https?:// ]]; then
|
||||
error "Invalid URL format. Must start with http:// or https://"
|
||||
fi
|
||||
# Check if configuration directory already exists
|
||||
if [[ -d "/etc/patchmon" ]]; then
|
||||
warning "⚠️ Configuration directory already exists at /etc/patchmon"
|
||||
warning "⚠️ Preserving existing configuration files"
|
||||
|
||||
if [[ ! "$API_ID" =~ ^patchmon_[a-f0-9]{16}$ ]]; then
|
||||
error "Invalid API ID format. API ID should be in format: patchmon_xxxxxxxxxxxxxxxx"
|
||||
fi
|
||||
|
||||
if [[ ! "$API_KEY" =~ ^[a-f0-9]{64}$ ]]; then
|
||||
error "Invalid API Key format. API Key should be 64 hexadecimal characters."
|
||||
fi
|
||||
|
||||
info "🚀 Installing PatchMon Agent..."
|
||||
info " Server: $PATCHMON_URL"
|
||||
info " API ID: $API_ID"
|
||||
|
||||
# Create patchmon directory
|
||||
info "📁 Creating configuration directory..."
|
||||
# List existing files for user awareness
|
||||
info "📋 Existing files in /etc/patchmon:"
|
||||
ls -la /etc/patchmon/ 2>/dev/null | grep -v "^total" | while read -r line; do
|
||||
echo " $line"
|
||||
done
|
||||
else
|
||||
info "📁 Creating new configuration directory..."
|
||||
mkdir -p /etc/patchmon
|
||||
|
||||
# Download the agent script
|
||||
info "📥 Downloading PatchMon agent script..."
|
||||
curl -sSL "$PATCHMON_URL/api/v1/hosts/agent/download" -o /usr/local/bin/patchmon-agent.sh
|
||||
chmod +x /usr/local/bin/patchmon-agent.sh
|
||||
|
||||
# Get the agent version from the downloaded script
|
||||
AGENT_VERSION=$(grep '^AGENT_VERSION=' /usr/local/bin/patchmon-agent.sh | cut -d'"' -f2)
|
||||
info "📋 Agent version: $AGENT_VERSION"
|
||||
|
||||
# Get expected agent version from server
|
||||
EXPECTED_VERSION=$(curl -s "$PATCHMON_URL/api/v1/hosts/agent/version" | grep -o '"currentVersion":"[^"]*' | cut -d'"' -f4 2>/dev/null || echo "Unknown")
|
||||
if [[ "$EXPECTED_VERSION" != "Unknown" ]]; then
|
||||
info "📋 Expected version: $EXPECTED_VERSION"
|
||||
if [[ "$AGENT_VERSION" != "$EXPECTED_VERSION" ]]; then
|
||||
warning "⚠️ Agent version mismatch! Installed: $AGENT_VERSION, Expected: $EXPECTED_VERSION"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Get update interval policy from server
|
||||
UPDATE_INTERVAL=$(curl -s "$PATCHMON_URL/api/v1/settings/update-interval" | grep -o '"updateInterval":[0-9]*' | cut -d':' -f2 2>/dev/null || echo "60")
|
||||
info "📋 Update interval: $UPDATE_INTERVAL minutes"
|
||||
# Step 2: Create configuration files
|
||||
info "🔐 Creating configuration files..."
|
||||
|
||||
# Create credentials file
|
||||
info "🔐 Setting up API credentials..."
|
||||
cat > /etc/patchmon/credentials << EOF
|
||||
# PatchMon API Credentials
|
||||
# Check if config file already exists
|
||||
if [[ -f "/etc/patchmon/config.yml" ]]; then
|
||||
warning "⚠️ Config file already exists at /etc/patchmon/config.yml"
|
||||
warning "⚠️ Moving existing file out of the way for fresh installation"
|
||||
|
||||
# Clean up old config backups (keep only last 3)
|
||||
ls -t /etc/patchmon/config.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||
|
||||
# Move existing file out of the way
|
||||
mv /etc/patchmon/config.yml /etc/patchmon/config.yml.backup.$(date +%Y%m%d_%H%M%S)
|
||||
info "📋 Moved existing config to: /etc/patchmon/config.yml.backup.$(date +%Y%m%d_%H%M%S)"
|
||||
fi
|
||||
|
||||
# Check if credentials file already exists
|
||||
if [[ -f "/etc/patchmon/credentials.yml" ]]; then
|
||||
warning "⚠️ Credentials file already exists at /etc/patchmon/credentials.yml"
|
||||
warning "⚠️ Moving existing file out of the way for fresh installation"
|
||||
|
||||
# Clean up old credential backups (keep only last 3)
|
||||
ls -t /etc/patchmon/credentials.yml.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||
|
||||
# Move existing file out of the way
|
||||
mv /etc/patchmon/credentials.yml /etc/patchmon/credentials.yml.backup.$(date +%Y%m%d_%H%M%S)
|
||||
info "📋 Moved existing credentials to: /etc/patchmon/credentials.yml.backup.$(date +%Y%m%d_%H%M%S)"
|
||||
fi
|
||||
|
||||
# Clean up old credentials file if it exists (from previous installations)
|
||||
if [[ -f "/etc/patchmon/credentials" ]]; then
|
||||
warning "⚠️ Found old credentials file, removing it..."
|
||||
rm -f /etc/patchmon/credentials
|
||||
info "📋 Removed old credentials file"
|
||||
fi
|
||||
|
||||
# Create main config file
|
||||
cat > /etc/patchmon/config.yml << EOF
|
||||
# PatchMon Agent Configuration
|
||||
# Generated on $(date)
|
||||
PATCHMON_URL="$PATCHMON_URL"
|
||||
API_ID="$API_ID"
|
||||
API_KEY="$API_KEY"
|
||||
patchmon_server: "$PATCHMON_URL"
|
||||
api_version: "v1"
|
||||
credentials_file: "/etc/patchmon/credentials.yml"
|
||||
log_file: "/etc/patchmon/logs/patchmon-agent.log"
|
||||
log_level: "info"
|
||||
EOF
|
||||
|
||||
chmod 600 /etc/patchmon/credentials
|
||||
# Create credentials file
|
||||
cat > /etc/patchmon/credentials.yml << EOF
|
||||
# PatchMon API Credentials
|
||||
# Generated on $(date)
|
||||
api_id: "$API_ID"
|
||||
api_key: "$API_KEY"
|
||||
EOF
|
||||
|
||||
# Test the configuration
|
||||
info "🧪 Testing configuration..."
|
||||
if /usr/local/bin/patchmon-agent.sh test; then
|
||||
success "Configuration test passed!"
|
||||
else
|
||||
error "Configuration test failed. Please check your credentials."
|
||||
chmod 600 /etc/patchmon/config.yml
|
||||
chmod 600 /etc/patchmon/credentials.yml
|
||||
|
||||
# Step 3: Download the PatchMon agent binary using API credentials
|
||||
info "📥 Downloading PatchMon agent binary..."
|
||||
|
||||
# Determine the binary filename based on architecture
|
||||
BINARY_NAME="patchmon-agent-linux-${ARCHITECTURE}"
|
||||
|
||||
# Check if agent binary already exists
|
||||
if [[ -f "/usr/local/bin/patchmon-agent" ]]; then
|
||||
warning "⚠️ Agent binary already exists at /usr/local/bin/patchmon-agent"
|
||||
warning "⚠️ Moving existing file out of the way for fresh installation"
|
||||
|
||||
# Clean up old agent backups (keep only last 3)
|
||||
ls -t /usr/local/bin/patchmon-agent.backup.* 2>/dev/null | tail -n +4 | xargs -r rm -f
|
||||
|
||||
# Move existing file out of the way
|
||||
mv /usr/local/bin/patchmon-agent /usr/local/bin/patchmon-agent.backup.$(date +%Y%m%d_%H%M%S)
|
||||
info "📋 Moved existing agent to: /usr/local/bin/patchmon-agent.backup.$(date +%Y%m%d_%H%M%S)"
|
||||
fi
|
||||
|
||||
# Send initial update
|
||||
info "📊 Sending initial package data..."
|
||||
if /usr/local/bin/patchmon-agent.sh update; then
|
||||
success "Initial package data sent successfully!"
|
||||
else
|
||||
warning "Initial package data failed, but agent is configured. You can run 'patchmon-agent.sh update' manually."
|
||||
# Clean up old shell script if it exists (from previous installations)
|
||||
if [[ -f "/usr/local/bin/patchmon-agent.sh" ]]; then
|
||||
warning "⚠️ Found old shell script agent, removing it..."
|
||||
rm -f /usr/local/bin/patchmon-agent.sh
|
||||
info "📋 Removed old shell script agent"
|
||||
fi
|
||||
|
||||
# Setup crontab for automatic updates
|
||||
info "⏰ Setting up automatic updates every $UPDATE_INTERVAL minutes..."
|
||||
if [[ $UPDATE_INTERVAL -eq 60 ]]; then
|
||||
# Hourly updates
|
||||
echo "0 * * * * /usr/local/bin/patchmon-agent.sh update >/dev/null 2>&1" | crontab -
|
||||
else
|
||||
# Custom interval updates
|
||||
echo "*/$UPDATE_INTERVAL * * * * /usr/local/bin/patchmon-agent.sh update >/dev/null 2>&1" | crontab -
|
||||
# Download the binary
|
||||
curl $CURL_FLAGS \
|
||||
-H "X-API-ID: $API_ID" \
|
||||
-H "X-API-KEY: $API_KEY" \
|
||||
"$PATCHMON_URL/api/v1/hosts/agent/download?arch=$ARCHITECTURE" \
|
||||
-o /usr/local/bin/patchmon-agent
|
||||
|
||||
chmod +x /usr/local/bin/patchmon-agent
|
||||
|
||||
# Get the agent version from the binary
|
||||
AGENT_VERSION=$(/usr/local/bin/patchmon-agent version 2>/dev/null || echo "Unknown")
|
||||
info "📋 Agent version: $AGENT_VERSION"
|
||||
|
||||
# Handle existing log files and create log directory
|
||||
info "📁 Setting up log directory..."
|
||||
|
||||
# Create log directory if it doesn't exist
|
||||
mkdir -p /etc/patchmon/logs
|
||||
|
||||
# Handle existing log files
|
||||
if [[ -f "/etc/patchmon/logs/patchmon-agent.log" ]]; then
|
||||
warning "⚠️ Existing log file found at /etc/patchmon/logs/patchmon-agent.log"
|
||||
warning "⚠️ Rotating log file for fresh start"
|
||||
|
||||
# Rotate the log file
|
||||
mv /etc/patchmon/logs/patchmon-agent.log /etc/patchmon/logs/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)
|
||||
info "📋 Log file rotated to: /etc/patchmon/logs/patchmon-agent.log.old.$(date +%Y%m%d_%H%M%S)"
|
||||
fi
|
||||
|
||||
success "🎉 PatchMon Agent installation complete!"
|
||||
# Step 4: Test the configuration
|
||||
# Check if this machine is already enrolled
|
||||
info "🔍 Checking if machine is already enrolled..."
|
||||
existing_check=$(curl $CURL_FLAGS -s -X POST \
|
||||
-H "X-API-ID: $API_ID" \
|
||||
-H "X-API-KEY: $API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"machine_id\": \"$MACHINE_ID\"}" \
|
||||
"$PATCHMON_URL/api/v1/hosts/check-machine-id" \
|
||||
-w "\n%{http_code}" 2>&1)
|
||||
|
||||
http_code=$(echo "$existing_check" | tail -n 1)
|
||||
response_body=$(echo "$existing_check" | sed '$d')
|
||||
|
||||
if [[ "$http_code" == "200" ]]; then
|
||||
already_enrolled=$(echo "$response_body" | jq -r '.exists' 2>/dev/null || echo "false")
|
||||
if [[ "$already_enrolled" == "true" ]]; then
|
||||
warning "⚠️ This machine is already enrolled in PatchMon"
|
||||
info "Machine ID: $MACHINE_ID"
|
||||
info "Existing host: $(echo "$response_body" | jq -r '.host.friendly_name' 2>/dev/null)"
|
||||
info ""
|
||||
info "The agent will be reinstalled/updated with existing credentials."
|
||||
echo ""
|
||||
echo "📋 Installation Summary:"
|
||||
echo " • Dependencies installed: jq, curl"
|
||||
echo " • Agent installed: /usr/local/bin/patchmon-agent.sh"
|
||||
echo " • Agent version: $AGENT_VERSION"
|
||||
if [[ "$EXPECTED_VERSION" != "Unknown" ]]; then
|
||||
echo " • Expected version: $EXPECTED_VERSION"
|
||||
else
|
||||
success "✅ Machine not yet enrolled - proceeding with installation"
|
||||
fi
|
||||
fi
|
||||
echo " • Config directory: /etc/patchmon/"
|
||||
echo " • Credentials file: /etc/patchmon/credentials"
|
||||
echo " • Automatic updates: Every $UPDATE_INTERVAL minutes via crontab"
|
||||
echo " • View logs: tail -f /var/log/patchmon-agent.sh"
|
||||
echo ""
|
||||
echo "🔧 Manual commands:"
|
||||
echo " • Test connection: patchmon-agent.sh test"
|
||||
echo " • Send update: patchmon-agent.sh update"
|
||||
echo " • Check status: patchmon-agent.sh ping"
|
||||
echo ""
|
||||
success "Your host is now connected to PatchMon!"
|
||||
|
||||
info "🧪 Testing API credentials and connectivity..."
|
||||
if /usr/local/bin/patchmon-agent ping; then
|
||||
success "✅ TEST: API credentials are valid and server is reachable"
|
||||
else
|
||||
error "❌ Failed to validate API credentials or reach server"
|
||||
fi
|
||||
|
||||
# Step 5: Send initial data and setup systemd service
|
||||
info "📊 Sending initial package data to server..."
|
||||
if /usr/local/bin/patchmon-agent report; then
|
||||
success "✅ UPDATE: Initial package data sent successfully"
|
||||
else
|
||||
warning "⚠️ Failed to send initial data. You can retry later with: /usr/local/bin/patchmon-agent report"
|
||||
fi
|
||||
|
||||
# Step 6: Setup systemd service for WebSocket connection
|
||||
info "🔧 Setting up systemd service..."
|
||||
|
||||
# Stop and disable existing service if it exists
|
||||
if systemctl is-active --quiet patchmon-agent.service 2>/dev/null; then
|
||||
warning "⚠️ Stopping existing PatchMon agent service..."
|
||||
systemctl stop patchmon-agent.service
|
||||
fi
|
||||
|
||||
if systemctl is-enabled --quiet patchmon-agent.service 2>/dev/null; then
|
||||
warning "⚠️ Disabling existing PatchMon agent service..."
|
||||
systemctl disable patchmon-agent.service
|
||||
fi
|
||||
|
||||
# Create systemd service file
|
||||
cat > /etc/systemd/system/patchmon-agent.service << EOF
|
||||
[Unit]
|
||||
Description=PatchMon Agent Service
|
||||
After=network.target
|
||||
Wants=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
ExecStart=/usr/local/bin/patchmon-agent serve
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
WorkingDirectory=/etc/patchmon
|
||||
|
||||
# Logging
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=patchmon-agent
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
# Clean up old crontab entries if they exist (from previous installations)
|
||||
if crontab -l 2>/dev/null | grep -q "patchmon-agent"; then
|
||||
warning "⚠️ Found old crontab entries, removing them..."
|
||||
crontab -l 2>/dev/null | grep -v "patchmon-agent" | crontab -
|
||||
info "📋 Removed old crontab entries"
|
||||
fi
|
||||
|
||||
# Reload systemd and enable/start the service
|
||||
systemctl daemon-reload
|
||||
systemctl enable patchmon-agent.service
|
||||
systemctl start patchmon-agent.service
|
||||
|
||||
# Check if service started successfully
|
||||
if systemctl is-active --quiet patchmon-agent.service; then
|
||||
success "✅ PatchMon Agent service started successfully"
|
||||
info "🔗 WebSocket connection established"
|
||||
else
|
||||
warning "⚠️ Service may have failed to start. Check status with: systemctl status patchmon-agent"
|
||||
fi
|
||||
|
||||
# Installation complete
|
||||
success "🎉 PatchMon Agent installation completed successfully!"
|
||||
echo ""
|
||||
echo -e "${GREEN}📋 Installation Summary:${NC}"
|
||||
echo " • Configuration directory: /etc/patchmon"
|
||||
echo " • Agent binary installed: /usr/local/bin/patchmon-agent"
|
||||
echo " • Architecture: $ARCHITECTURE"
|
||||
echo " • Dependencies installed: jq, curl, bc"
|
||||
echo " • Systemd service configured and running"
|
||||
echo " • API credentials configured and tested"
|
||||
echo " • WebSocket connection established"
|
||||
echo " • Logs directory: /etc/patchmon/logs"
|
||||
|
||||
# Check for moved files and show them
|
||||
MOVED_FILES=$(ls /etc/patchmon/credentials.yml.backup.* /etc/patchmon/config.yml.backup.* /usr/local/bin/patchmon-agent.backup.* /etc/patchmon/logs/patchmon-agent.log.old.* /usr/local/bin/patchmon-agent.sh.backup.* /etc/patchmon/credentials.backup.* 2>/dev/null || true)
|
||||
if [[ -n "$MOVED_FILES" ]]; then
|
||||
echo ""
|
||||
echo -e "${YELLOW}📋 Files Moved for Fresh Installation:${NC}"
|
||||
echo "$MOVED_FILES" | while read -r moved_file; do
|
||||
echo " • $moved_file"
|
||||
done
|
||||
echo ""
|
||||
echo -e "${BLUE}💡 Note: Old files are automatically cleaned up (keeping last 3)${NC}"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -e "${BLUE}🔧 Management Commands:${NC}"
|
||||
echo " • Test connection: /usr/local/bin/patchmon-agent ping"
|
||||
echo " • Manual report: /usr/local/bin/patchmon-agent report"
|
||||
echo " • Check status: /usr/local/bin/patchmon-agent diagnostics"
|
||||
echo " • Service status: systemctl status patchmon-agent"
|
||||
echo " • Service logs: journalctl -u patchmon-agent -f"
|
||||
echo " • Restart service: systemctl restart patchmon-agent"
|
||||
echo ""
|
||||
success "✅ Your system is now being monitored by PatchMon!"
|
||||
|
||||
222
agents/patchmon_remove.sh
Executable file
222
agents/patchmon_remove.sh
Executable file
@@ -0,0 +1,222 @@
|
||||
#!/bin/bash
|
||||
|
||||
# PatchMon Agent Removal Script
|
||||
# Usage: curl -s {PATCHMON_URL}/api/v1/hosts/remove | bash
|
||||
# This script completely removes PatchMon from the system
|
||||
|
||||
set -e
|
||||
|
||||
# This placeholder will be dynamically replaced by the server when serving this
|
||||
# script based on the "ignore SSL self-signed" setting for any curl calls in
|
||||
# future (left for consistency with install script).
|
||||
CURL_FLAGS=""
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Functions
|
||||
error() {
|
||||
echo -e "${RED}❌ ERROR: $1${NC}" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
info() {
|
||||
echo -e "${BLUE}ℹ️ $1${NC}"
|
||||
}
|
||||
|
||||
success() {
|
||||
echo -e "${GREEN}✅ $1${NC}"
|
||||
}
|
||||
|
||||
warning() {
|
||||
echo -e "${YELLOW}⚠️ $1${NC}"
|
||||
}
|
||||
|
||||
# Check if running as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
error "This script must be run as root (use sudo)"
|
||||
fi
|
||||
|
||||
info "🗑️ Starting PatchMon Agent Removal..."
|
||||
echo ""
|
||||
|
||||
# Step 1: Stop any running PatchMon processes
|
||||
info "🛑 Stopping PatchMon processes..."
|
||||
if pgrep -f "patchmon-agent.sh" >/dev/null; then
|
||||
warning "Found running PatchMon processes, stopping them..."
|
||||
pkill -f "patchmon-agent.sh" || true
|
||||
sleep 2
|
||||
success "PatchMon processes stopped"
|
||||
else
|
||||
info "No running PatchMon processes found"
|
||||
fi
|
||||
|
||||
# Step 2: Remove crontab entries
|
||||
info "📅 Removing PatchMon crontab entries..."
|
||||
if crontab -l 2>/dev/null | grep -q "patchmon-agent.sh"; then
|
||||
warning "Found PatchMon crontab entries, removing them..."
|
||||
crontab -l 2>/dev/null | grep -v "patchmon-agent.sh" | crontab -
|
||||
success "Crontab entries removed"
|
||||
else
|
||||
info "No PatchMon crontab entries found"
|
||||
fi
|
||||
|
||||
# Step 3: Remove agent script
|
||||
info "📄 Removing agent script..."
|
||||
if [[ -f "/usr/local/bin/patchmon-agent.sh" ]]; then
|
||||
warning "Removing agent script: /usr/local/bin/patchmon-agent.sh"
|
||||
rm -f /usr/local/bin/patchmon-agent.sh
|
||||
success "Agent script removed"
|
||||
else
|
||||
info "Agent script not found"
|
||||
fi
|
||||
|
||||
# Step 4: Remove configuration directory and files
|
||||
info "📁 Removing configuration files..."
|
||||
if [[ -d "/etc/patchmon" ]]; then
|
||||
warning "Removing configuration directory: /etc/patchmon"
|
||||
|
||||
# Show what's being removed
|
||||
info "📋 Files in /etc/patchmon:"
|
||||
ls -la /etc/patchmon/ 2>/dev/null | grep -v "^total" | while read -r line; do
|
||||
echo " $line"
|
||||
done
|
||||
|
||||
# Remove the directory
|
||||
rm -rf /etc/patchmon
|
||||
success "Configuration directory removed"
|
||||
else
|
||||
info "Configuration directory not found"
|
||||
fi
|
||||
|
||||
# Step 5: Remove log files
|
||||
info "📝 Removing log files..."
|
||||
if [[ -f "/var/log/patchmon-agent.log" ]]; then
|
||||
warning "Removing log file: /var/log/patchmon-agent.log"
|
||||
rm -f /var/log/patchmon-agent.log
|
||||
success "Log file removed"
|
||||
else
|
||||
info "Log file not found"
|
||||
fi
|
||||
|
||||
# Step 6: Clean up backup files (optional)
|
||||
info "🧹 Cleaning up backup files..."
|
||||
BACKUP_COUNT=0
|
||||
|
||||
# Count credential backups
|
||||
CRED_BACKUPS=$(ls /etc/patchmon/credentials.backup.* 2>/dev/null | wc -l || echo "0")
|
||||
if [[ $CRED_BACKUPS -gt 0 ]]; then
|
||||
BACKUP_COUNT=$((BACKUP_COUNT + CRED_BACKUPS))
|
||||
fi
|
||||
|
||||
# Count agent backups
|
||||
AGENT_BACKUPS=$(ls /usr/local/bin/patchmon-agent.sh.backup.* 2>/dev/null | wc -l || echo "0")
|
||||
if [[ $AGENT_BACKUPS -gt 0 ]]; then
|
||||
BACKUP_COUNT=$((BACKUP_COUNT + AGENT_BACKUPS))
|
||||
fi
|
||||
|
||||
# Count log backups
|
||||
LOG_BACKUPS=$(ls /var/log/patchmon-agent.log.old.* 2>/dev/null | wc -l || echo "0")
|
||||
if [[ $LOG_BACKUPS -gt 0 ]]; then
|
||||
BACKUP_COUNT=$((BACKUP_COUNT + LOG_BACKUPS))
|
||||
fi
|
||||
|
||||
if [[ $BACKUP_COUNT -gt 0 ]]; then
|
||||
warning "Found $BACKUP_COUNT backup files"
|
||||
echo ""
|
||||
echo -e "${YELLOW}📋 Backup files found:${NC}"
|
||||
|
||||
# Show credential backups
|
||||
if [[ $CRED_BACKUPS -gt 0 ]]; then
|
||||
echo " Credential backups:"
|
||||
ls /etc/patchmon/credentials.backup.* 2>/dev/null | while read -r file; do
|
||||
echo " • $file"
|
||||
done
|
||||
fi
|
||||
|
||||
# Show agent backups
|
||||
if [[ $AGENT_BACKUPS -gt 0 ]]; then
|
||||
echo " Agent script backups:"
|
||||
ls /usr/local/bin/patchmon-agent.sh.backup.* 2>/dev/null | while read -r file; do
|
||||
echo " • $file"
|
||||
done
|
||||
fi
|
||||
|
||||
# Show log backups
|
||||
if [[ $LOG_BACKUPS -gt 0 ]]; then
|
||||
echo " Log file backups:"
|
||||
ls /var/log/patchmon-agent.log.old.* 2>/dev/null | while read -r file; do
|
||||
echo " • $file"
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -e "${BLUE}💡 Note: Backup files are preserved for safety${NC}"
|
||||
echo -e "${BLUE}💡 You can remove them manually if not needed${NC}"
|
||||
else
|
||||
info "No backup files found"
|
||||
fi
|
||||
|
||||
# Step 7: Remove dependencies (optional)
|
||||
info "📦 Checking for PatchMon-specific dependencies..."
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
warning "jq is installed (used by PatchMon)"
|
||||
echo -e "${BLUE}💡 Note: jq may be used by other applications${NC}"
|
||||
echo -e "${BLUE}💡 Consider keeping it unless you're sure it's not needed${NC}"
|
||||
else
|
||||
info "jq not found"
|
||||
fi
|
||||
|
||||
if command -v curl >/dev/null 2>&1; then
|
||||
warning "curl is installed (used by PatchMon)"
|
||||
echo -e "${BLUE}💡 Note: curl is commonly used by many applications${NC}"
|
||||
echo -e "${BLUE}💡 Consider keeping it unless you're sure it's not needed${NC}"
|
||||
else
|
||||
info "curl not found"
|
||||
fi
|
||||
|
||||
# Step 8: Final verification
|
||||
info "🔍 Verifying removal..."
|
||||
REMAINING_FILES=0
|
||||
|
||||
if [[ -f "/usr/local/bin/patchmon-agent.sh" ]]; then
|
||||
REMAINING_FILES=$((REMAINING_FILES + 1))
|
||||
fi
|
||||
|
||||
if [[ -d "/etc/patchmon" ]]; then
|
||||
REMAINING_FILES=$((REMAINING_FILES + 1))
|
||||
fi
|
||||
|
||||
if [[ -f "/var/log/patchmon-agent.log" ]]; then
|
||||
REMAINING_FILES=$((REMAINING_FILES + 1))
|
||||
fi
|
||||
|
||||
if crontab -l 2>/dev/null | grep -q "patchmon-agent.sh"; then
|
||||
REMAINING_FILES=$((REMAINING_FILES + 1))
|
||||
fi
|
||||
|
||||
if [[ $REMAINING_FILES -eq 0 ]]; then
|
||||
success "✅ PatchMon has been completely removed from the system!"
|
||||
else
|
||||
warning "⚠️ Some PatchMon files may still remain ($REMAINING_FILES items)"
|
||||
echo -e "${BLUE}💡 You may need to remove them manually${NC}"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}📋 Removal Summary:${NC}"
|
||||
echo " • Agent script: Removed"
|
||||
echo " • Configuration files: Removed"
|
||||
echo " • Log files: Removed"
|
||||
echo " • Crontab entries: Removed"
|
||||
echo " • Running processes: Stopped"
|
||||
echo " • Backup files: Preserved (if any)"
|
||||
echo ""
|
||||
echo -e "${BLUE}🔧 Manual cleanup (if needed):${NC}"
|
||||
echo " • Remove backup files: rm /etc/patchmon/credentials.backup.* /usr/local/bin/patchmon-agent.sh.backup.* /var/log/patchmon-agent.log.old.*"
|
||||
echo " • Remove dependencies: apt remove jq curl (if not needed by other apps)"
|
||||
echo ""
|
||||
success "🎉 PatchMon removal completed!"
|
||||
437
agents/proxmox_auto_enroll.sh
Executable file
437
agents/proxmox_auto_enroll.sh
Executable file
@@ -0,0 +1,437 @@
|
||||
#!/bin/bash
|
||||
set -eo pipefail # Exit on error, pipe failures (removed -u as we handle unset vars explicitly)
|
||||
|
||||
# Trap to catch errors only (not normal exits)
|
||||
trap 'echo "[ERROR] Script failed at line $LINENO with exit code $?"' ERR
|
||||
|
||||
SCRIPT_VERSION="2.0.0"
|
||||
echo "[DEBUG] Script Version: $SCRIPT_VERSION ($(date +%Y-%m-%d\ %H:%M:%S))"
|
||||
|
||||
# =============================================================================
|
||||
# PatchMon Proxmox LXC Auto-Enrollment Script
|
||||
# =============================================================================
|
||||
# This script discovers LXC containers on a Proxmox host and automatically
|
||||
# enrolls them into PatchMon for patch management.
|
||||
#
|
||||
# Usage:
|
||||
# 1. Set environment variables or edit configuration below
|
||||
# 2. Run: bash proxmox_auto_enroll.sh
|
||||
#
|
||||
# Requirements:
|
||||
# - Must run on Proxmox host (requires 'pct' command)
|
||||
# - Auto-enrollment token from PatchMon
|
||||
# - Network access to PatchMon server
|
||||
# =============================================================================
|
||||
|
||||
# ===== CONFIGURATION =====
|
||||
PATCHMON_URL="${PATCHMON_URL:-https://patchmon.example.com}"
|
||||
AUTO_ENROLLMENT_KEY="${AUTO_ENROLLMENT_KEY:-}"
|
||||
AUTO_ENROLLMENT_SECRET="${AUTO_ENROLLMENT_SECRET:-}"
|
||||
CURL_FLAGS="${CURL_FLAGS:--s}"
|
||||
DRY_RUN="${DRY_RUN:-false}"
|
||||
HOST_PREFIX="${HOST_PREFIX:-}"
|
||||
SKIP_STOPPED="${SKIP_STOPPED:-true}"
|
||||
PARALLEL_INSTALL="${PARALLEL_INSTALL:-false}"
|
||||
MAX_PARALLEL="${MAX_PARALLEL:-5}"
|
||||
FORCE_INSTALL="${FORCE_INSTALL:-false}"
|
||||
|
||||
# ===== COLOR OUTPUT =====
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# ===== LOGGING FUNCTIONS =====
|
||||
info() { echo -e "${GREEN}[INFO]${NC} $1"; return 0; }
|
||||
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; return 0; }
|
||||
error() { echo -e "${RED}[ERROR]${NC} $1"; exit 1; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; return 0; }
|
||||
debug() { [[ "${DEBUG:-false}" == "true" ]] && echo -e "${BLUE}[DEBUG]${NC} $1" || true; return 0; }
|
||||
|
||||
# ===== BANNER =====
|
||||
cat << "EOF"
|
||||
╔═══════════════════════════════════════════════════════════════╗
|
||||
║ ║
|
||||
║ ____ _ _ __ __ ║
|
||||
║ | _ \ __ _| |_ ___| |__ | \/ | ___ _ __ ║
|
||||
║ | |_) / _` | __/ __| '_ \| |\/| |/ _ \| '_ \ ║
|
||||
║ | __/ (_| | || (__| | | | | | | (_) | | | | ║
|
||||
║ |_| \__,_|\__\___|_| |_|_| |_|\___/|_| |_| ║
|
||||
║ ║
|
||||
║ Proxmox LXC Auto-Enrollment Script ║
|
||||
║ ║
|
||||
╚═══════════════════════════════════════════════════════════════╝
|
||||
EOF
|
||||
echo ""
|
||||
|
||||
# ===== VALIDATION =====
|
||||
info "Validating configuration..."
|
||||
|
||||
if [[ -z "$AUTO_ENROLLMENT_KEY" ]] || [[ -z "$AUTO_ENROLLMENT_SECRET" ]]; then
|
||||
error "AUTO_ENROLLMENT_KEY and AUTO_ENROLLMENT_SECRET must be set"
|
||||
fi
|
||||
|
||||
if [[ -z "$PATCHMON_URL" ]]; then
|
||||
error "PATCHMON_URL must be set"
|
||||
fi
|
||||
|
||||
# Check if running on Proxmox
|
||||
if ! command -v pct &> /dev/null; then
|
||||
error "This script must run on a Proxmox host (pct command not found)"
|
||||
fi
|
||||
|
||||
# Check for required commands
|
||||
for cmd in curl jq; do
|
||||
if ! command -v $cmd &> /dev/null; then
|
||||
error "Required command '$cmd' not found. Please install it first."
|
||||
fi
|
||||
done
|
||||
|
||||
info "Configuration validated successfully"
|
||||
info "PatchMon Server: $PATCHMON_URL"
|
||||
info "Dry Run Mode: $DRY_RUN"
|
||||
info "Skip Stopped Containers: $SKIP_STOPPED"
|
||||
echo ""
|
||||
|
||||
# ===== DISCOVER LXC CONTAINERS =====
|
||||
info "Discovering LXC containers..."
|
||||
lxc_list=$(pct list | tail -n +2) # Skip header
|
||||
|
||||
if [[ -z "$lxc_list" ]]; then
|
||||
warn "No LXC containers found on this Proxmox host"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Count containers
|
||||
total_containers=$(echo "$lxc_list" | wc -l)
|
||||
info "Found $total_containers LXC container(s)"
|
||||
echo ""
|
||||
|
||||
info "Initializing statistics..."
|
||||
# ===== STATISTICS =====
|
||||
enrolled_count=0
|
||||
skipped_count=0
|
||||
failed_count=0
|
||||
|
||||
# Track containers with dpkg errors for later recovery
|
||||
declare -A dpkg_error_containers
|
||||
|
||||
# Track all failed containers for summary
|
||||
declare -A failed_containers
|
||||
info "Statistics initialized"
|
||||
|
||||
# ===== PROCESS CONTAINERS =====
|
||||
info "Starting container processing loop..."
|
||||
while IFS= read -r line; do
|
||||
info "[DEBUG] Read line from lxc_list"
|
||||
vmid=$(echo "$line" | awk '{print $1}')
|
||||
status=$(echo "$line" | awk '{print $2}')
|
||||
name=$(echo "$line" | awk '{print $3}')
|
||||
|
||||
info "Processing LXC $vmid: $name (status: $status)"
|
||||
|
||||
# Skip stopped containers if configured
|
||||
if [[ "$status" != "running" ]] && [[ "$SKIP_STOPPED" == "true" ]]; then
|
||||
warn " Skipping $name - container not running"
|
||||
((skipped_count++)) || true
|
||||
echo ""
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if container is stopped
|
||||
if [[ "$status" != "running" ]]; then
|
||||
warn " Container $name is stopped - cannot gather info or install agent"
|
||||
((skipped_count++)) || true
|
||||
echo ""
|
||||
continue
|
||||
fi
|
||||
|
||||
# Get container details
|
||||
debug " Gathering container information..."
|
||||
hostname=$(timeout 5 pct exec "$vmid" -- hostname 2>/dev/null </dev/null || echo "$name")
|
||||
ip_address=$(timeout 5 pct exec "$vmid" -- hostname -I 2>/dev/null </dev/null | awk '{print $1}' || echo "unknown")
|
||||
os_info=$(timeout 5 pct exec "$vmid" -- cat /etc/os-release 2>/dev/null </dev/null | grep "^PRETTY_NAME=" | cut -d'"' -f2 || echo "unknown")
|
||||
|
||||
# Get machine ID from container
|
||||
machine_id=$(timeout 5 pct exec "$vmid" -- bash -c "cat /etc/machine-id 2>/dev/null || cat /var/lib/dbus/machine-id 2>/dev/null || echo 'proxmox-lxc-$vmid-'$(cat /proc/sys/kernel/random/uuid)" </dev/null 2>/dev/null || echo "proxmox-lxc-$vmid-unknown")
|
||||
|
||||
friendly_name="${HOST_PREFIX}${hostname}"
|
||||
|
||||
info " Hostname: $hostname"
|
||||
info " IP Address: $ip_address"
|
||||
info " OS: $os_info"
|
||||
info " Machine ID: ${machine_id:0:16}..."
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
info " [DRY RUN] Would enroll: $friendly_name"
|
||||
((enrolled_count++)) || true
|
||||
echo ""
|
||||
continue
|
||||
fi
|
||||
|
||||
# Call PatchMon auto-enrollment API
|
||||
info " Enrolling $friendly_name in PatchMon..."
|
||||
|
||||
response=$(curl $CURL_FLAGS -X POST \
|
||||
-H "X-Auto-Enrollment-Key: $AUTO_ENROLLMENT_KEY" \
|
||||
-H "X-Auto-Enrollment-Secret: $AUTO_ENROLLMENT_SECRET" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"friendly_name\": \"$friendly_name\",
|
||||
\"machine_id\": \"$machine_id\",
|
||||
\"metadata\": {
|
||||
\"vmid\": \"$vmid\",
|
||||
\"proxmox_node\": \"$(hostname)\",
|
||||
\"ip_address\": \"$ip_address\",
|
||||
\"os_info\": \"$os_info\"
|
||||
}
|
||||
}" \
|
||||
"$PATCHMON_URL/api/v1/auto-enrollment/enroll" \
|
||||
-w "\n%{http_code}" 2>&1)
|
||||
|
||||
http_code=$(echo "$response" | tail -n 1)
|
||||
body=$(echo "$response" | sed '$d')
|
||||
|
||||
if [[ "$http_code" == "201" ]]; then
|
||||
api_id=$(echo "$body" | jq -r '.host.api_id' 2>/dev/null || echo "")
|
||||
api_key=$(echo "$body" | jq -r '.host.api_key' 2>/dev/null || echo "")
|
||||
|
||||
if [[ -z "$api_id" ]] || [[ -z "$api_key" ]]; then
|
||||
error " Failed to parse API credentials from response"
|
||||
fi
|
||||
|
||||
info " ✓ Host enrolled successfully: $api_id"
|
||||
|
||||
# Ensure curl is installed in the container
|
||||
info " Checking for curl in container..."
|
||||
curl_check=$(timeout 10 pct exec "$vmid" -- bash -c "command -v curl >/dev/null 2>&1 && echo 'installed' || echo 'missing'" 2>/dev/null </dev/null || echo "error")
|
||||
|
||||
if [[ "$curl_check" == "missing" ]]; then
|
||||
info " Installing curl in container..."
|
||||
|
||||
# Detect package manager and install curl
|
||||
curl_install_output=$(timeout 60 pct exec "$vmid" -- bash -c "
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update -qq && apt-get install -y -qq curl
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum install -y -q curl
|
||||
elif command -v dnf >/dev/null 2>&1; then
|
||||
dnf install -y -q curl
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
apk add --no-cache curl
|
||||
else
|
||||
echo 'ERROR: No supported package manager found'
|
||||
exit 1
|
||||
fi
|
||||
" 2>&1 </dev/null) || true
|
||||
|
||||
if [[ "$curl_install_output" == *"ERROR: No supported package manager"* ]]; then
|
||||
warn " ✗ Could not install curl - no supported package manager found"
|
||||
failed_containers["$vmid"]="$friendly_name|No package manager for curl|$curl_install_output"
|
||||
((failed_count++)) || true
|
||||
echo ""
|
||||
sleep 1
|
||||
continue
|
||||
else
|
||||
info " ✓ curl installed successfully"
|
||||
fi
|
||||
else
|
||||
info " ✓ curl already installed"
|
||||
fi
|
||||
|
||||
# Install PatchMon agent in container
|
||||
info " Installing PatchMon agent..."
|
||||
|
||||
# Build install URL with force flag if enabled
|
||||
install_url="$PATCHMON_URL/api/v1/hosts/install"
|
||||
if [[ "$FORCE_INSTALL" == "true" ]]; then
|
||||
install_url="$install_url?force=true"
|
||||
info " Using force mode - will bypass broken packages"
|
||||
fi
|
||||
|
||||
# Reset exit code for this container
|
||||
install_exit_code=0
|
||||
|
||||
# Download and execute in separate steps to avoid stdin issues with piping
|
||||
install_output=$(timeout 180 pct exec "$vmid" -- bash -c "
|
||||
cd /tmp
|
||||
curl $CURL_FLAGS \
|
||||
-H \"X-API-ID: $api_id\" \
|
||||
-H \"X-API-KEY: $api_key\" \
|
||||
-o patchmon-install.sh \
|
||||
'$install_url' && \
|
||||
bash patchmon-install.sh && \
|
||||
rm -f patchmon-install.sh
|
||||
" 2>&1 </dev/null) || install_exit_code=$?
|
||||
|
||||
# Check both exit code AND success message in output for reliability
|
||||
if [[ $install_exit_code -eq 0 ]] || [[ "$install_output" == *"PatchMon Agent installation completed successfully"* ]]; then
|
||||
info " ✓ Agent installed successfully in $friendly_name"
|
||||
((enrolled_count++)) || true
|
||||
elif [[ $install_exit_code -eq 124 ]]; then
|
||||
warn " ⏱ Agent installation timed out (>180s) in $friendly_name"
|
||||
info " Install output: $install_output"
|
||||
# Store failure details
|
||||
failed_containers["$vmid"]="$friendly_name|Timeout (>180s)|$install_output"
|
||||
((failed_count++)) || true
|
||||
else
|
||||
# Check if it's a dpkg error
|
||||
if [[ "$install_output" == *"dpkg was interrupted"* ]] || [[ "$install_output" == *"dpkg --configure -a"* ]]; then
|
||||
warn " ⚠ Failed due to dpkg error in $friendly_name (can be fixed)"
|
||||
dpkg_error_containers["$vmid"]="$friendly_name:$api_id:$api_key"
|
||||
# Store failure details
|
||||
failed_containers["$vmid"]="$friendly_name|dpkg error|$install_output"
|
||||
else
|
||||
warn " ✗ Failed to install agent in $friendly_name (exit: $install_exit_code)"
|
||||
# Store failure details
|
||||
failed_containers["$vmid"]="$friendly_name|Exit code $install_exit_code|$install_output"
|
||||
fi
|
||||
info " Install output: $install_output"
|
||||
((failed_count++)) || true
|
||||
fi
|
||||
|
||||
elif [[ "$http_code" == "409" ]]; then
|
||||
warn " ⊘ Host $friendly_name already enrolled - skipping"
|
||||
((skipped_count++)) || true
|
||||
elif [[ "$http_code" == "429" ]]; then
|
||||
error " ✗ Rate limit exceeded - maximum hosts per day reached"
|
||||
failed_containers["$vmid"]="$friendly_name|Rate limit exceeded|$body"
|
||||
((failed_count++)) || true
|
||||
else
|
||||
error " ✗ Failed to enroll $friendly_name - HTTP $http_code"
|
||||
debug " Response: $body"
|
||||
failed_containers["$vmid"]="$friendly_name|HTTP $http_code enrollment failed|$body"
|
||||
((failed_count++)) || true
|
||||
fi
|
||||
|
||||
echo ""
|
||||
sleep 1 # Rate limiting between containers
|
||||
|
||||
done <<< "$lxc_list"
|
||||
|
||||
# ===== SUMMARY =====
|
||||
echo ""
|
||||
echo "╔═══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ ENROLLMENT SUMMARY ║"
|
||||
echo "╚═══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
info "Total Containers Found: $total_containers"
|
||||
info "Successfully Enrolled: $enrolled_count"
|
||||
info "Skipped: $skipped_count"
|
||||
info "Failed: $failed_count"
|
||||
echo ""
|
||||
|
||||
# ===== FAILURE DETAILS =====
|
||||
if [[ ${#failed_containers[@]} -gt 0 ]]; then
|
||||
echo "╔═══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ FAILURE DETAILS ║"
|
||||
echo "╚═══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
|
||||
for vmid in "${!failed_containers[@]}"; do
|
||||
IFS='|' read -r name reason output <<< "${failed_containers[$vmid]}"
|
||||
|
||||
warn "Container $vmid: $name"
|
||||
info " Reason: $reason"
|
||||
info " Last 5 lines of output:"
|
||||
|
||||
# Get last 5 lines of output
|
||||
last_5_lines=$(echo "$output" | tail -n 5)
|
||||
|
||||
# Display each line with proper indentation
|
||||
while IFS= read -r line; do
|
||||
echo " $line"
|
||||
done <<< "$last_5_lines"
|
||||
|
||||
echo ""
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
warn "This was a DRY RUN - no actual changes were made"
|
||||
warn "Set DRY_RUN=false to perform actual enrollment"
|
||||
fi
|
||||
|
||||
# ===== DPKG ERROR RECOVERY =====
|
||||
if [[ ${#dpkg_error_containers[@]} -gt 0 ]]; then
|
||||
echo ""
|
||||
echo "╔═══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ DPKG ERROR RECOVERY AVAILABLE ║"
|
||||
echo "╚═══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
warn "Detected ${#dpkg_error_containers[@]} container(s) with dpkg errors:"
|
||||
for vmid in "${!dpkg_error_containers[@]}"; do
|
||||
IFS=':' read -r name api_id api_key <<< "${dpkg_error_containers[$vmid]}"
|
||||
info " • Container $vmid: $name"
|
||||
done
|
||||
echo ""
|
||||
|
||||
# Ask user if they want to fix dpkg errors
|
||||
read -p "Would you like to fix dpkg errors and retry installation? (y/N): " -n 1 -r
|
||||
echo ""
|
||||
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo ""
|
||||
info "Starting dpkg recovery process..."
|
||||
echo ""
|
||||
|
||||
recovered_count=0
|
||||
|
||||
for vmid in "${!dpkg_error_containers[@]}"; do
|
||||
IFS=':' read -r name api_id api_key <<< "${dpkg_error_containers[$vmid]}"
|
||||
|
||||
info "Fixing dpkg in container $vmid ($name)..."
|
||||
|
||||
# Run dpkg --configure -a
|
||||
dpkg_output=$(timeout 60 pct exec "$vmid" -- dpkg --configure -a 2>&1 </dev/null || true)
|
||||
|
||||
if [[ $? -eq 0 ]]; then
|
||||
info " ✓ dpkg fixed successfully"
|
||||
|
||||
# Retry agent installation
|
||||
info " Retrying agent installation..."
|
||||
|
||||
install_exit_code=0
|
||||
install_output=$(timeout 180 pct exec "$vmid" -- bash -c "
|
||||
cd /tmp
|
||||
curl $CURL_FLAGS \
|
||||
-H \"X-API-ID: $api_id\" \
|
||||
-H \"X-API-KEY: $api_key\" \
|
||||
-o patchmon-install.sh \
|
||||
'$PATCHMON_URL/api/v1/hosts/install' && \
|
||||
bash patchmon-install.sh && \
|
||||
rm -f patchmon-install.sh
|
||||
" 2>&1 </dev/null) || install_exit_code=$?
|
||||
|
||||
if [[ $install_exit_code -eq 0 ]] || [[ "$install_output" == *"PatchMon Agent installation completed successfully"* ]]; then
|
||||
info " ✓ Agent installed successfully in $name"
|
||||
((recovered_count++)) || true
|
||||
((enrolled_count++)) || true
|
||||
((failed_count--)) || true
|
||||
else
|
||||
warn " ✗ Agent installation still failed (exit: $install_exit_code)"
|
||||
fi
|
||||
else
|
||||
warn " ✗ Failed to fix dpkg in $name"
|
||||
info " dpkg output: $dpkg_output"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
done
|
||||
|
||||
echo ""
|
||||
info "Recovery complete: $recovered_count container(s) recovered"
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $failed_count -gt 0 ]]; then
|
||||
warn "Some containers failed to enroll. Check the logs above for details."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
info "Auto-enrollment complete! ✓"
|
||||
exit 0
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
# Database Configuration
|
||||
DATABASE_URL="postgresql://patchmon_user:p@tchm0n_p@55@localhost:5432/patchmon_db"
|
||||
PM_DB_CONN_MAX_ATTEMPTS=30
|
||||
PM_DB_CONN_WAIT_INTERVAL=2
|
||||
|
||||
# Redis Configuration
|
||||
REDIS_HOST=localhost
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD=your-redis-password-here
|
||||
REDIS_DB=0
|
||||
|
||||
# Server Configuration
|
||||
PORT=3001
|
||||
@@ -23,3 +31,14 @@ ENABLE_LOGGING=true
|
||||
|
||||
# User Registration
|
||||
DEFAULT_USER_ROLE=user
|
||||
|
||||
# JWT Configuration
|
||||
JWT_SECRET=your-secure-random-secret-key-change-this-in-production
|
||||
JWT_EXPIRES_IN=1h
|
||||
JWT_REFRESH_EXPIRES_IN=7d
|
||||
SESSION_INACTIVITY_TIMEOUT_MINUTES=30
|
||||
|
||||
# TFA Configuration
|
||||
TFA_REMEMBER_ME_EXPIRES_IN=30d
|
||||
TFA_MAX_REMEMBER_SESSIONS=5
|
||||
TFA_SUSPICIOUS_ACTIVITY_THRESHOLD=3
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"name": "patchmon-backend",
|
||||
"version": "1.2.6",
|
||||
"version": "1.2.9",
|
||||
"description": "Backend API for Linux Patch Monitoring System",
|
||||
"license": "AGPL-3.0",
|
||||
"main": "src/server.js",
|
||||
"scripts": {
|
||||
"dev": "nodemon src/server.js",
|
||||
@@ -13,24 +14,31 @@
|
||||
"db:studio": "prisma studio"
|
||||
},
|
||||
"dependencies": {
|
||||
"@prisma/client": "^5.7.0",
|
||||
"@bull-board/api": "^6.13.1",
|
||||
"@bull-board/express": "^6.13.1",
|
||||
"@prisma/client": "^6.1.0",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"bullmq": "^5.61.0",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.3.1",
|
||||
"express": "^4.18.2",
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"express-validator": "^7.0.1",
|
||||
"helmet": "^7.1.0",
|
||||
"dotenv": "^16.4.7",
|
||||
"express": "^5.0.0",
|
||||
"express-rate-limit": "^7.5.0",
|
||||
"express-validator": "^7.2.0",
|
||||
"helmet": "^8.0.0",
|
||||
"ioredis": "^5.8.1",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"moment": "^2.30.1",
|
||||
"qrcode": "^1.5.4",
|
||||
"speakeasy": "^2.0.0",
|
||||
"uuid": "^9.0.1",
|
||||
"winston": "^3.11.0"
|
||||
"uuid": "^11.0.3",
|
||||
"winston": "^3.17.0",
|
||||
"ws": "^8.18.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.2",
|
||||
"prisma": "^5.7.0"
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"nodemon": "^3.1.9",
|
||||
"prisma": "^6.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
-- Fix dashboard preferences unique constraint
|
||||
-- This migration fixes the unique constraint on dashboard_preferences table
|
||||
|
||||
-- Drop existing indexes if they exist
|
||||
DROP INDEX IF EXISTS "dashboard_preferences_card_id_key";
|
||||
DROP INDEX IF EXISTS "dashboard_preferences_user_id_card_id_key";
|
||||
DROP INDEX IF EXISTS "dashboard_preferences_user_id_key";
|
||||
|
||||
-- Add the correct unique constraint
|
||||
ALTER TABLE "dashboard_preferences" ADD CONSTRAINT "dashboard_preferences_user_id_card_id_key" UNIQUE ("user_id", "card_id");
|
||||
@@ -0,0 +1,2 @@
|
||||
-- DropTable
|
||||
DROP TABLE "agent_versions";
|
||||
@@ -0,0 +1,4 @@
|
||||
-- Add ignore_ssl_self_signed column to settings table
|
||||
-- This allows users to configure whether curl commands should ignore SSL certificate validation
|
||||
|
||||
ALTER TABLE "settings" ADD COLUMN "ignore_ssl_self_signed" BOOLEAN NOT NULL DEFAULT false;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "hosts" ADD COLUMN "notes" TEXT;
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "auto_enrollment_tokens" (
|
||||
"id" TEXT NOT NULL,
|
||||
"token_name" TEXT NOT NULL,
|
||||
"token_key" TEXT NOT NULL,
|
||||
"token_secret" TEXT NOT NULL,
|
||||
"created_by_user_id" TEXT,
|
||||
"is_active" BOOLEAN NOT NULL DEFAULT true,
|
||||
"allowed_ip_ranges" TEXT[],
|
||||
"max_hosts_per_day" INTEGER NOT NULL DEFAULT 100,
|
||||
"hosts_created_today" INTEGER NOT NULL DEFAULT 0,
|
||||
"last_reset_date" DATE NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"default_host_group_id" TEXT,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||
"last_used_at" TIMESTAMP(3),
|
||||
"expires_at" TIMESTAMP(3),
|
||||
"metadata" JSONB,
|
||||
|
||||
CONSTRAINT "auto_enrollment_tokens_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "auto_enrollment_tokens_token_key_key" ON "auto_enrollment_tokens"("token_key");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "auto_enrollment_tokens_token_key_idx" ON "auto_enrollment_tokens"("token_key");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "auto_enrollment_tokens_is_active_idx" ON "auto_enrollment_tokens"("is_active");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "auto_enrollment_tokens" ADD CONSTRAINT "auto_enrollment_tokens_created_by_user_id_fkey" FOREIGN KEY ("created_by_user_id") REFERENCES "users"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "auto_enrollment_tokens" ADD CONSTRAINT "auto_enrollment_tokens_default_host_group_id_fkey" FOREIGN KEY ("default_host_group_id") REFERENCES "host_groups"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
-- Add machine_id column as nullable first
|
||||
ALTER TABLE "hosts" ADD COLUMN "machine_id" TEXT;
|
||||
|
||||
-- Generate machine_ids for existing hosts using their API ID as a fallback
|
||||
UPDATE "hosts" SET "machine_id" = 'migrated-' || "api_id" WHERE "machine_id" IS NULL;
|
||||
|
||||
-- Remove the unique constraint from friendly_name
|
||||
ALTER TABLE "hosts" DROP CONSTRAINT IF EXISTS "hosts_friendly_name_key";
|
||||
|
||||
-- Also drop the unique index if it exists (constraint and index can exist separately)
|
||||
DROP INDEX IF EXISTS "hosts_friendly_name_key";
|
||||
|
||||
-- Now make machine_id NOT NULL and add unique constraint
|
||||
ALTER TABLE "hosts" ALTER COLUMN "machine_id" SET NOT NULL;
|
||||
ALTER TABLE "hosts" ADD CONSTRAINT "hosts_machine_id_key" UNIQUE ("machine_id");
|
||||
|
||||
-- Create indexes for better query performance
|
||||
CREATE INDEX "hosts_machine_id_idx" ON "hosts"("machine_id");
|
||||
CREATE INDEX "hosts_friendly_name_idx" ON "hosts"("friendly_name");
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
-- AddLogoFieldsToSettings
|
||||
ALTER TABLE "settings" ADD COLUMN "logo_dark" VARCHAR(255) DEFAULT '/assets/logo_dark.png';
|
||||
ALTER TABLE "settings" ADD COLUMN "logo_light" VARCHAR(255) DEFAULT '/assets/logo_light.png';
|
||||
ALTER TABLE "settings" ADD COLUMN "favicon" VARCHAR(255) DEFAULT '/assets/logo_square.svg';
|
||||
@@ -0,0 +1,64 @@
|
||||
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
|
||||
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
|
||||
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
table_exists boolean := false;
|
||||
migration_exists boolean := false;
|
||||
BEGIN
|
||||
-- Check if user_sessions table exists
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'user_sessions'
|
||||
) INTO table_exists;
|
||||
|
||||
-- Check if the migration record already exists
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM _prisma_migrations
|
||||
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||
) INTO migration_exists;
|
||||
|
||||
-- If table exists but no migration record, create one
|
||||
IF table_exists AND NOT migration_exists THEN
|
||||
RAISE NOTICE 'Table exists but no migration record found - creating migration record for 1.2.7 upgrade';
|
||||
|
||||
-- Insert a successful migration record for the existing table
|
||||
INSERT INTO _prisma_migrations (
|
||||
id,
|
||||
checksum,
|
||||
finished_at,
|
||||
migration_name,
|
||||
logs,
|
||||
rolled_back_at,
|
||||
started_at,
|
||||
applied_steps_count
|
||||
) VALUES (
|
||||
gen_random_uuid()::text,
|
||||
'', -- Empty checksum since we're reconciling
|
||||
NOW(),
|
||||
'20251005000000_add_user_sessions',
|
||||
'Reconciled from 1.2.7 - table already exists',
|
||||
NULL,
|
||||
NOW(),
|
||||
1
|
||||
);
|
||||
|
||||
RAISE NOTICE 'Migration record created for existing table';
|
||||
ELSIF table_exists AND migration_exists THEN
|
||||
RAISE NOTICE 'Table exists and migration record exists - no action needed';
|
||||
ELSE
|
||||
RAISE NOTICE 'Table does not exist - migration will proceed normally';
|
||||
END IF;
|
||||
|
||||
-- Additional check: If we have any old migration names, update them
|
||||
IF EXISTS (SELECT 1 FROM _prisma_migrations WHERE migration_name = 'add_user_sessions') THEN
|
||||
RAISE NOTICE 'Found old migration name - updating to new format';
|
||||
UPDATE _prisma_migrations
|
||||
SET migration_name = '20251005000000_add_user_sessions'
|
||||
WHERE migration_name = 'add_user_sessions';
|
||||
RAISE NOTICE 'Old migration name updated';
|
||||
END IF;
|
||||
|
||||
END $$;
|
||||
@@ -0,0 +1,96 @@
|
||||
-- Reconcile user_sessions migration from 1.2.7 to 1.2.8+
|
||||
-- This migration handles the case where 1.2.7 had 'add_user_sessions' without timestamp
|
||||
-- and 1.2.8+ renamed it to '20251005000000_add_user_sessions' with timestamp
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
old_migration_exists boolean := false;
|
||||
table_exists boolean := false;
|
||||
failed_migration_exists boolean := false;
|
||||
BEGIN
|
||||
-- Check if the old migration name exists
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM _prisma_migrations
|
||||
WHERE migration_name = 'add_user_sessions'
|
||||
) INTO old_migration_exists;
|
||||
|
||||
-- Check if user_sessions table exists
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'user_sessions'
|
||||
) INTO table_exists;
|
||||
|
||||
-- Check if there's a failed migration attempt
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM _prisma_migrations
|
||||
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||
AND finished_at IS NULL
|
||||
) INTO failed_migration_exists;
|
||||
|
||||
-- Scenario 1: Old migration exists, table exists, no failed migration
|
||||
-- This means 1.2.7 was installed and we need to update the migration name
|
||||
IF old_migration_exists AND table_exists AND NOT failed_migration_exists THEN
|
||||
RAISE NOTICE 'Found 1.2.7 migration "add_user_sessions" - updating to timestamped version';
|
||||
|
||||
-- Update the old migration name to the new timestamped version
|
||||
UPDATE _prisma_migrations
|
||||
SET migration_name = '20251005000000_add_user_sessions'
|
||||
WHERE migration_name = 'add_user_sessions';
|
||||
|
||||
RAISE NOTICE 'Migration name updated: add_user_sessions -> 20251005000000_add_user_sessions';
|
||||
END IF;
|
||||
|
||||
-- Scenario 2: Failed migration exists (upgrade attempt gone wrong)
|
||||
IF failed_migration_exists THEN
|
||||
RAISE NOTICE 'Found failed migration attempt - cleaning up';
|
||||
|
||||
-- If table exists, it means the migration partially succeeded
|
||||
IF table_exists THEN
|
||||
RAISE NOTICE 'Table exists - marking migration as applied';
|
||||
|
||||
-- Delete the failed migration record
|
||||
DELETE FROM _prisma_migrations
|
||||
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||
AND finished_at IS NULL;
|
||||
|
||||
-- Insert a successful migration record
|
||||
INSERT INTO _prisma_migrations (
|
||||
id,
|
||||
checksum,
|
||||
finished_at,
|
||||
migration_name,
|
||||
logs,
|
||||
rolled_back_at,
|
||||
started_at,
|
||||
applied_steps_count
|
||||
) VALUES (
|
||||
gen_random_uuid()::text,
|
||||
'', -- Empty checksum since we're reconciling
|
||||
NOW(),
|
||||
'20251005000000_add_user_sessions',
|
||||
NULL,
|
||||
NULL,
|
||||
NOW(),
|
||||
1
|
||||
);
|
||||
|
||||
RAISE NOTICE 'Migration marked as successfully applied';
|
||||
ELSE
|
||||
RAISE NOTICE 'Table does not exist - removing failed migration to allow retry';
|
||||
|
||||
-- Just delete the failed migration to allow it to retry
|
||||
DELETE FROM _prisma_migrations
|
||||
WHERE migration_name = '20251005000000_add_user_sessions'
|
||||
AND finished_at IS NULL;
|
||||
|
||||
RAISE NOTICE 'Failed migration removed - will retry on next migration run';
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Scenario 3: Everything is clean (fresh install or already reconciled)
|
||||
IF NOT old_migration_exists AND NOT failed_migration_exists THEN
|
||||
RAISE NOTICE 'No migration reconciliation needed';
|
||||
END IF;
|
||||
|
||||
END $$;
|
||||
@@ -0,0 +1,106 @@
|
||||
-- CreateTable (with existence check for 1.2.7 compatibility)
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Check if table already exists (from 1.2.7 installation)
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = 'user_sessions'
|
||||
) THEN
|
||||
-- Table doesn't exist, create it
|
||||
CREATE TABLE "user_sessions" (
|
||||
"id" TEXT NOT NULL,
|
||||
"user_id" TEXT NOT NULL,
|
||||
"refresh_token" TEXT NOT NULL,
|
||||
"access_token_hash" TEXT,
|
||||
"ip_address" TEXT,
|
||||
"user_agent" TEXT,
|
||||
"last_activity" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"expires_at" TIMESTAMP(3) NOT NULL,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"is_revoked" BOOLEAN NOT NULL DEFAULT false,
|
||||
|
||||
CONSTRAINT "user_sessions_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
RAISE NOTICE 'Created user_sessions table';
|
||||
ELSE
|
||||
RAISE NOTICE 'user_sessions table already exists, skipping creation';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- CreateIndex (with existence check)
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE tablename = 'user_sessions'
|
||||
AND indexname = 'user_sessions_refresh_token_key'
|
||||
) THEN
|
||||
CREATE UNIQUE INDEX "user_sessions_refresh_token_key" ON "user_sessions"("refresh_token");
|
||||
RAISE NOTICE 'Created user_sessions_refresh_token_key index';
|
||||
ELSE
|
||||
RAISE NOTICE 'user_sessions_refresh_token_key index already exists, skipping';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- CreateIndex (with existence check)
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE tablename = 'user_sessions'
|
||||
AND indexname = 'user_sessions_user_id_idx'
|
||||
) THEN
|
||||
CREATE INDEX "user_sessions_user_id_idx" ON "user_sessions"("user_id");
|
||||
RAISE NOTICE 'Created user_sessions_user_id_idx index';
|
||||
ELSE
|
||||
RAISE NOTICE 'user_sessions_user_id_idx index already exists, skipping';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- CreateIndex (with existence check)
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE tablename = 'user_sessions'
|
||||
AND indexname = 'user_sessions_refresh_token_idx'
|
||||
) THEN
|
||||
CREATE INDEX "user_sessions_refresh_token_idx" ON "user_sessions"("refresh_token");
|
||||
RAISE NOTICE 'Created user_sessions_refresh_token_idx index';
|
||||
ELSE
|
||||
RAISE NOTICE 'user_sessions_refresh_token_idx index already exists, skipping';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- CreateIndex (with existence check)
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE tablename = 'user_sessions'
|
||||
AND indexname = 'user_sessions_expires_at_idx'
|
||||
) THEN
|
||||
CREATE INDEX "user_sessions_expires_at_idx" ON "user_sessions"("expires_at");
|
||||
RAISE NOTICE 'Created user_sessions_expires_at_idx index';
|
||||
ELSE
|
||||
RAISE NOTICE 'user_sessions_expires_at_idx index already exists, skipping';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- AddForeignKey (with existence check)
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.table_constraints
|
||||
WHERE table_name = 'user_sessions'
|
||||
AND constraint_name = 'user_sessions_user_id_fkey'
|
||||
) THEN
|
||||
ALTER TABLE "user_sessions" ADD CONSTRAINT "user_sessions_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
RAISE NOTICE 'Created user_sessions_user_id_fkey foreign key';
|
||||
ELSE
|
||||
RAISE NOTICE 'user_sessions_user_id_fkey foreign key already exists, skipping';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
-- Add TFA remember me fields to user_sessions table
|
||||
ALTER TABLE "user_sessions" ADD COLUMN "tfa_remember_me" BOOLEAN NOT NULL DEFAULT false;
|
||||
ALTER TABLE "user_sessions" ADD COLUMN "tfa_bypass_until" TIMESTAMP(3);
|
||||
|
||||
-- Create index for TFA bypass until field for efficient querying
|
||||
CREATE INDEX "user_sessions_tfa_bypass_until_idx" ON "user_sessions"("tfa_bypass_until");
|
||||
@@ -0,0 +1,7 @@
|
||||
-- Add security fields to user_sessions table for production-ready remember me
|
||||
ALTER TABLE "user_sessions" ADD COLUMN "device_fingerprint" TEXT;
|
||||
ALTER TABLE "user_sessions" ADD COLUMN "login_count" INTEGER NOT NULL DEFAULT 1;
|
||||
ALTER TABLE "user_sessions" ADD COLUMN "last_login_ip" TEXT;
|
||||
|
||||
-- Create index for device fingerprint for efficient querying
|
||||
CREATE INDEX "user_sessions_device_fingerprint_idx" ON "user_sessions"("device_fingerprint");
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "update_history" ADD COLUMN "total_packages" INTEGER;
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "update_history" ADD COLUMN "payload_size_kb" DOUBLE PRECISION;
|
||||
ALTER TABLE "update_history" ADD COLUMN "execution_time" DOUBLE PRECISION;
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
-- Add indexes to host_packages table for performance optimization
|
||||
-- These indexes will dramatically speed up queries filtering by host_id, package_id, needs_update, and is_security_update
|
||||
|
||||
-- Index for queries filtering by host_id (very common - used when viewing packages for a specific host)
|
||||
CREATE INDEX IF NOT EXISTS "host_packages_host_id_idx" ON "host_packages"("host_id");
|
||||
|
||||
-- Index for queries filtering by package_id (used when finding hosts for a specific package)
|
||||
CREATE INDEX IF NOT EXISTS "host_packages_package_id_idx" ON "host_packages"("package_id");
|
||||
|
||||
-- Index for queries filtering by needs_update (used when finding outdated packages)
|
||||
CREATE INDEX IF NOT EXISTS "host_packages_needs_update_idx" ON "host_packages"("needs_update");
|
||||
|
||||
-- Index for queries filtering by is_security_update (used when finding security updates)
|
||||
CREATE INDEX IF NOT EXISTS "host_packages_is_security_update_idx" ON "host_packages"("is_security_update");
|
||||
|
||||
-- Composite index for the most common query pattern: host_id + needs_update
|
||||
-- This is optimal for "show me outdated packages for this host"
|
||||
CREATE INDEX IF NOT EXISTS "host_packages_host_id_needs_update_idx" ON "host_packages"("host_id", "needs_update");
|
||||
|
||||
-- Composite index for host_id + needs_update + is_security_update
|
||||
-- This is optimal for "show me security updates for this host"
|
||||
CREATE INDEX IF NOT EXISTS "host_packages_host_id_needs_update_security_idx" ON "host_packages"("host_id", "needs_update", "is_security_update");
|
||||
|
||||
-- Index for queries filtering by package_id + needs_update
|
||||
-- This is optimal for "show me hosts where this package needs updates"
|
||||
CREATE INDEX IF NOT EXISTS "host_packages_package_id_needs_update_idx" ON "host_packages"("package_id", "needs_update");
|
||||
|
||||
-- Index on last_checked for cleanup/maintenance queries
|
||||
CREATE INDEX IF NOT EXISTS "host_packages_last_checked_idx" ON "host_packages"("last_checked");
|
||||
|
||||
@@ -0,0 +1,94 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "docker_images" (
|
||||
"id" TEXT NOT NULL,
|
||||
"repository" TEXT NOT NULL,
|
||||
"tag" TEXT NOT NULL DEFAULT 'latest',
|
||||
"image_id" TEXT NOT NULL,
|
||||
"digest" TEXT,
|
||||
"size_bytes" BIGINT,
|
||||
"source" TEXT NOT NULL DEFAULT 'docker-hub',
|
||||
"created_at" TIMESTAMP(3) NOT NULL,
|
||||
"last_pulled" TIMESTAMP(3),
|
||||
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "docker_images_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "docker_containers" (
|
||||
"id" TEXT NOT NULL,
|
||||
"host_id" TEXT NOT NULL,
|
||||
"container_id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"image_id" TEXT,
|
||||
"image_name" TEXT NOT NULL,
|
||||
"image_tag" TEXT NOT NULL DEFAULT 'latest',
|
||||
"status" TEXT NOT NULL,
|
||||
"state" TEXT,
|
||||
"ports" JSONB,
|
||||
"created_at" TIMESTAMP(3) NOT NULL,
|
||||
"started_at" TIMESTAMP(3),
|
||||
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||
"last_checked" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "docker_containers_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "docker_image_updates" (
|
||||
"id" TEXT NOT NULL,
|
||||
"image_id" TEXT NOT NULL,
|
||||
"current_tag" TEXT NOT NULL,
|
||||
"available_tag" TEXT NOT NULL,
|
||||
"is_security_update" BOOLEAN NOT NULL DEFAULT false,
|
||||
"severity" TEXT,
|
||||
"changelog_url" TEXT,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "docker_image_updates_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "docker_images_repository_idx" ON "docker_images"("repository");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "docker_images_source_idx" ON "docker_images"("source");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "docker_images_repository_tag_idx" ON "docker_images"("repository", "tag");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "docker_images_repository_tag_image_id_key" ON "docker_images"("repository", "tag", "image_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "docker_containers_host_id_idx" ON "docker_containers"("host_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "docker_containers_image_id_idx" ON "docker_containers"("image_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "docker_containers_status_idx" ON "docker_containers"("status");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "docker_containers_name_idx" ON "docker_containers"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "docker_containers_host_id_container_id_key" ON "docker_containers"("host_id", "container_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "docker_image_updates_image_id_idx" ON "docker_image_updates"("image_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "docker_image_updates_is_security_update_idx" ON "docker_image_updates"("is_security_update");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "docker_image_updates_image_id_available_tag_key" ON "docker_image_updates"("image_id", "available_tag");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "docker_containers" ADD CONSTRAINT "docker_containers_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "docker_image_updates" ADD CONSTRAINT "docker_image_updates_image_id_fkey" FOREIGN KEY ("image_id") REFERENCES "docker_images"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "job_history" (
|
||||
"id" TEXT NOT NULL,
|
||||
"job_id" TEXT NOT NULL,
|
||||
"queue_name" TEXT NOT NULL,
|
||||
"job_name" TEXT NOT NULL,
|
||||
"host_id" TEXT,
|
||||
"api_id" TEXT,
|
||||
"status" TEXT NOT NULL,
|
||||
"attempt_number" INTEGER NOT NULL DEFAULT 1,
|
||||
"error_message" TEXT,
|
||||
"output" JSONB,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||
"completed_at" TIMESTAMP(3),
|
||||
|
||||
CONSTRAINT "job_history_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "job_history_job_id_idx" ON "job_history"("job_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "job_history_queue_name_idx" ON "job_history"("queue_name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "job_history_host_id_idx" ON "job_history"("host_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "job_history_api_id_idx" ON "job_history"("api_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "job_history_status_idx" ON "job_history"("status");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "job_history_created_at_idx" ON "job_history"("created_at");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "job_history" ADD CONSTRAINT "job_history_host_id_fkey" FOREIGN KEY ("host_id") REFERENCES "hosts"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "host_group_memberships" (
|
||||
"id" TEXT NOT NULL,
|
||||
"host_id" TEXT NOT NULL,
|
||||
"host_group_id" TEXT NOT NULL,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "host_group_memberships_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "host_group_memberships_host_id_host_group_id_key" ON "host_group_memberships"("host_id", "host_group_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "host_group_memberships_host_id_idx" ON "host_group_memberships"("host_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "host_group_memberships_host_group_id_idx" ON "host_group_memberships"("host_group_id");
|
||||
|
||||
-- Migrate existing data from hosts.host_group_id to host_group_memberships
|
||||
INSERT INTO "host_group_memberships" ("id", "host_id", "host_group_id", "created_at")
|
||||
SELECT
|
||||
gen_random_uuid()::text as "id",
|
||||
"id" as "host_id",
|
||||
"host_group_id" as "host_group_id",
|
||||
CURRENT_TIMESTAMP as "created_at"
|
||||
FROM "hosts"
|
||||
WHERE "host_group_id" IS NOT NULL;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "host_group_memberships" ADD CONSTRAINT "host_group_memberships_host_id_fkey" FOREIGN KEY ("host_id") REFERENCES "hosts"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "host_group_memberships" ADD CONSTRAINT "host_group_memberships_host_group_id_fkey" FOREIGN KEY ("host_group_id") REFERENCES "host_groups"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "hosts" DROP CONSTRAINT IF EXISTS "hosts_host_group_id_fkey";
|
||||
|
||||
-- DropIndex
|
||||
DROP INDEX IF EXISTS "hosts_host_group_id_idx";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "hosts" DROP COLUMN "host_group_id";
|
||||
@@ -7,19 +7,6 @@ datasource db {
|
||||
url = env("DATABASE_URL")
|
||||
}
|
||||
|
||||
model agent_versions {
|
||||
id String @id
|
||||
version String @unique
|
||||
is_current Boolean @default(false)
|
||||
release_notes String?
|
||||
download_url String?
|
||||
min_server_version String?
|
||||
created_at DateTime @default(now())
|
||||
updated_at DateTime
|
||||
is_default Boolean @default(false)
|
||||
script_content String?
|
||||
}
|
||||
|
||||
model dashboard_preferences {
|
||||
id String @id
|
||||
user_id String
|
||||
@@ -40,7 +27,21 @@ model host_groups {
|
||||
color String? @default("#3B82F6")
|
||||
created_at DateTime @default(now())
|
||||
updated_at DateTime
|
||||
hosts hosts[]
|
||||
host_group_memberships host_group_memberships[]
|
||||
auto_enrollment_tokens auto_enrollment_tokens[]
|
||||
}
|
||||
|
||||
model host_group_memberships {
|
||||
id String @id
|
||||
host_id String
|
||||
host_group_id String
|
||||
created_at DateTime @default(now())
|
||||
hosts hosts @relation(fields: [host_id], references: [id], onDelete: Cascade)
|
||||
host_groups host_groups @relation(fields: [host_group_id], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([host_id, host_group_id])
|
||||
@@index([host_id])
|
||||
@@index([host_group_id])
|
||||
}
|
||||
|
||||
model host_packages {
|
||||
@@ -56,6 +57,14 @@ model host_packages {
|
||||
packages packages @relation(fields: [package_id], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([host_id, package_id])
|
||||
@@index([host_id])
|
||||
@@index([package_id])
|
||||
@@index([needs_update])
|
||||
@@index([is_security_update])
|
||||
@@index([host_id, needs_update])
|
||||
@@index([host_id, needs_update, is_security_update])
|
||||
@@index([package_id, needs_update])
|
||||
@@index([last_checked])
|
||||
}
|
||||
|
||||
model host_repositories {
|
||||
@@ -72,7 +81,8 @@ model host_repositories {
|
||||
|
||||
model hosts {
|
||||
id String @id
|
||||
friendly_name String @unique
|
||||
machine_id String @unique
|
||||
friendly_name String
|
||||
ip String?
|
||||
os_type String
|
||||
os_version String
|
||||
@@ -83,7 +93,6 @@ model hosts {
|
||||
updated_at DateTime
|
||||
api_id String @unique
|
||||
api_key String @unique
|
||||
host_group_id String?
|
||||
agent_version String?
|
||||
auto_update Boolean @default(true)
|
||||
cpu_cores Int?
|
||||
@@ -99,10 +108,16 @@ model hosts {
|
||||
selinux_status String?
|
||||
swap_size Int?
|
||||
system_uptime String?
|
||||
notes String?
|
||||
host_packages host_packages[]
|
||||
host_repositories host_repositories[]
|
||||
host_groups host_groups? @relation(fields: [host_group_id], references: [id])
|
||||
host_group_memberships host_group_memberships[]
|
||||
update_history update_history[]
|
||||
job_history job_history[]
|
||||
|
||||
@@index([machine_id])
|
||||
@@index([friendly_name])
|
||||
@@index([hostname])
|
||||
}
|
||||
|
||||
model packages {
|
||||
@@ -114,6 +129,9 @@ model packages {
|
||||
created_at DateTime @default(now())
|
||||
updated_at DateTime
|
||||
host_packages host_packages[]
|
||||
|
||||
@@index([name])
|
||||
@@index([category])
|
||||
}
|
||||
|
||||
model repositories {
|
||||
@@ -169,6 +187,10 @@ model settings {
|
||||
update_available Boolean @default(false)
|
||||
signup_enabled Boolean @default(false)
|
||||
default_user_role String @default("user")
|
||||
ignore_ssl_self_signed Boolean @default(false)
|
||||
logo_dark String? @default("/assets/logo_dark.png")
|
||||
logo_light String? @default("/assets/logo_light.png")
|
||||
favicon String? @default("/assets/logo_square.svg")
|
||||
}
|
||||
|
||||
model update_history {
|
||||
@@ -176,6 +198,9 @@ model update_history {
|
||||
host_id String
|
||||
packages_count Int
|
||||
security_count Int
|
||||
total_packages Int?
|
||||
payload_size_kb Float?
|
||||
execution_time Float?
|
||||
timestamp DateTime @default(now())
|
||||
status String @default("success")
|
||||
error_message String?
|
||||
@@ -187,8 +212,6 @@ model users {
|
||||
username String @unique
|
||||
email String @unique
|
||||
password_hash String
|
||||
first_name String?
|
||||
last_name String?
|
||||
role String @default("admin")
|
||||
is_active Boolean @default(true)
|
||||
last_login DateTime?
|
||||
@@ -197,5 +220,144 @@ model users {
|
||||
tfa_backup_codes String?
|
||||
tfa_enabled Boolean @default(false)
|
||||
tfa_secret String?
|
||||
first_name String?
|
||||
last_name String?
|
||||
dashboard_preferences dashboard_preferences[]
|
||||
user_sessions user_sessions[]
|
||||
auto_enrollment_tokens auto_enrollment_tokens[]
|
||||
}
|
||||
|
||||
model user_sessions {
|
||||
id String @id
|
||||
user_id String
|
||||
refresh_token String @unique
|
||||
access_token_hash String?
|
||||
ip_address String?
|
||||
user_agent String?
|
||||
device_fingerprint String?
|
||||
last_activity DateTime @default(now())
|
||||
expires_at DateTime
|
||||
created_at DateTime @default(now())
|
||||
is_revoked Boolean @default(false)
|
||||
tfa_remember_me Boolean @default(false)
|
||||
tfa_bypass_until DateTime?
|
||||
login_count Int @default(1)
|
||||
last_login_ip String?
|
||||
users users @relation(fields: [user_id], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([user_id])
|
||||
@@index([refresh_token])
|
||||
@@index([expires_at])
|
||||
@@index([tfa_bypass_until])
|
||||
@@index([device_fingerprint])
|
||||
}
|
||||
|
||||
model auto_enrollment_tokens {
|
||||
id String @id
|
||||
token_name String
|
||||
token_key String @unique
|
||||
token_secret String
|
||||
created_by_user_id String?
|
||||
is_active Boolean @default(true)
|
||||
allowed_ip_ranges String[]
|
||||
max_hosts_per_day Int @default(100)
|
||||
hosts_created_today Int @default(0)
|
||||
last_reset_date DateTime @default(now()) @db.Date
|
||||
default_host_group_id String?
|
||||
created_at DateTime @default(now())
|
||||
updated_at DateTime
|
||||
last_used_at DateTime?
|
||||
expires_at DateTime?
|
||||
metadata Json?
|
||||
users users? @relation(fields: [created_by_user_id], references: [id], onDelete: SetNull)
|
||||
host_groups host_groups? @relation(fields: [default_host_group_id], references: [id], onDelete: SetNull)
|
||||
|
||||
@@index([token_key])
|
||||
@@index([is_active])
|
||||
}
|
||||
|
||||
model docker_containers {
|
||||
id String @id
|
||||
host_id String
|
||||
container_id String
|
||||
name String
|
||||
image_id String?
|
||||
image_name String
|
||||
image_tag String @default("latest")
|
||||
status String
|
||||
state String?
|
||||
ports Json?
|
||||
created_at DateTime
|
||||
started_at DateTime?
|
||||
updated_at DateTime
|
||||
last_checked DateTime @default(now())
|
||||
docker_images docker_images? @relation(fields: [image_id], references: [id], onDelete: SetNull)
|
||||
|
||||
@@unique([host_id, container_id])
|
||||
@@index([host_id])
|
||||
@@index([image_id])
|
||||
@@index([status])
|
||||
@@index([name])
|
||||
}
|
||||
|
||||
model docker_images {
|
||||
id String @id
|
||||
repository String
|
||||
tag String @default("latest")
|
||||
image_id String
|
||||
digest String?
|
||||
size_bytes BigInt?
|
||||
source String @default("docker-hub")
|
||||
created_at DateTime
|
||||
last_pulled DateTime?
|
||||
last_checked DateTime @default(now())
|
||||
updated_at DateTime
|
||||
docker_containers docker_containers[]
|
||||
docker_image_updates docker_image_updates[]
|
||||
|
||||
@@unique([repository, tag, image_id])
|
||||
@@index([repository])
|
||||
@@index([source])
|
||||
@@index([repository, tag])
|
||||
}
|
||||
|
||||
model docker_image_updates {
|
||||
id String @id
|
||||
image_id String
|
||||
current_tag String
|
||||
available_tag String
|
||||
is_security_update Boolean @default(false)
|
||||
severity String?
|
||||
changelog_url String?
|
||||
created_at DateTime @default(now())
|
||||
updated_at DateTime
|
||||
docker_images docker_images @relation(fields: [image_id], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([image_id, available_tag])
|
||||
@@index([image_id])
|
||||
@@index([is_security_update])
|
||||
}
|
||||
|
||||
model job_history {
|
||||
id String @id
|
||||
job_id String
|
||||
queue_name String
|
||||
job_name String
|
||||
host_id String?
|
||||
api_id String?
|
||||
status String
|
||||
attempt_number Int @default(1)
|
||||
error_message String?
|
||||
output Json?
|
||||
created_at DateTime @default(now())
|
||||
updated_at DateTime
|
||||
completed_at DateTime?
|
||||
hosts hosts? @relation(fields: [host_id], references: [id], onDelete: SetNull)
|
||||
|
||||
@@index([job_id])
|
||||
@@index([queue_name])
|
||||
@@index([host_id])
|
||||
@@index([api_id])
|
||||
@@index([status])
|
||||
@@index([created_at])
|
||||
}
|
||||
|
||||
@@ -3,25 +3,25 @@
|
||||
* Optimizes connection pooling to prevent "too many connections" errors
|
||||
*/
|
||||
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
|
||||
// Parse DATABASE_URL and add connection pooling parameters
|
||||
function getOptimizedDatabaseUrl() {
|
||||
const originalUrl = process.env.DATABASE_URL;
|
||||
|
||||
if (!originalUrl) {
|
||||
throw new Error('DATABASE_URL environment variable is required');
|
||||
throw new Error("DATABASE_URL environment variable is required");
|
||||
}
|
||||
|
||||
// Parse the URL
|
||||
const url = new URL(originalUrl);
|
||||
|
||||
// Add connection pooling parameters for multiple instances
|
||||
url.searchParams.set('connection_limit', '5'); // Reduced from default 10
|
||||
url.searchParams.set('pool_timeout', '10'); // 10 seconds
|
||||
url.searchParams.set('connect_timeout', '10'); // 10 seconds
|
||||
url.searchParams.set('idle_timeout', '300'); // 5 minutes
|
||||
url.searchParams.set('max_lifetime', '1800'); // 30 minutes
|
||||
url.searchParams.set("connection_limit", "5"); // Reduced from default 10
|
||||
url.searchParams.set("pool_timeout", "10"); // 10 seconds
|
||||
url.searchParams.set("connect_timeout", "10"); // 10 seconds
|
||||
url.searchParams.set("idle_timeout", "300"); // 5 minutes
|
||||
url.searchParams.set("max_lifetime", "1800"); // 30 minutes
|
||||
|
||||
return url.toString();
|
||||
}
|
||||
@@ -33,13 +33,14 @@ function createPrismaClient() {
|
||||
return new PrismaClient({
|
||||
datasources: {
|
||||
db: {
|
||||
url: optimizedUrl
|
||||
}
|
||||
url: optimizedUrl,
|
||||
},
|
||||
log: process.env.NODE_ENV === 'development'
|
||||
? ['query', 'info', 'warn', 'error']
|
||||
: ['warn', 'error'],
|
||||
errorFormat: 'pretty'
|
||||
},
|
||||
log:
|
||||
process.env.PRISMA_LOG_QUERIES === "true"
|
||||
? ["query", "info", "warn", "error"]
|
||||
: ["warn", "error"],
|
||||
errorFormat: "pretty",
|
||||
});
|
||||
}
|
||||
|
||||
@@ -49,36 +50,56 @@ async function checkDatabaseConnection(prisma) {
|
||||
await prisma.$queryRaw`SELECT 1`;
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('Database connection failed:', error.message);
|
||||
console.error("Database connection failed:", error.message);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for database to be available with retry logic
|
||||
async function waitForDatabase(prisma, options = {}) {
|
||||
const maxAttempts = options.maxAttempts || parseInt(process.env.PM_DB_CONN_MAX_ATTEMPTS) || 30;
|
||||
const waitInterval = options.waitInterval || parseInt(process.env.PM_DB_CONN_WAIT_INTERVAL) || 2;
|
||||
const maxAttempts =
|
||||
options.maxAttempts ||
|
||||
parseInt(process.env.PM_DB_CONN_MAX_ATTEMPTS, 10) ||
|
||||
30;
|
||||
const waitInterval =
|
||||
options.waitInterval ||
|
||||
parseInt(process.env.PM_DB_CONN_WAIT_INTERVAL, 10) ||
|
||||
2;
|
||||
|
||||
console.log(`Waiting for database connection (max ${maxAttempts} attempts, ${waitInterval}s interval)...`);
|
||||
if (process.env.ENABLE_LOGGING === "true") {
|
||||
console.log(
|
||||
`Waiting for database connection (max ${maxAttempts} attempts, ${waitInterval}s interval)...`,
|
||||
);
|
||||
}
|
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
const isConnected = await checkDatabaseConnection(prisma);
|
||||
if (isConnected) {
|
||||
console.log(`Database connected successfully after ${attempt} attempt(s)`);
|
||||
if (process.env.ENABLE_LOGGING === "true") {
|
||||
console.log(
|
||||
`Database connected successfully after ${attempt} attempt(s)`,
|
||||
);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
} catch (error) {
|
||||
} catch {
|
||||
// checkDatabaseConnection already logs the error
|
||||
}
|
||||
|
||||
if (attempt < maxAttempts) {
|
||||
console.log(`⏳ Database not ready (attempt ${attempt}/${maxAttempts}), retrying in ${waitInterval}s...`);
|
||||
await new Promise(resolve => setTimeout(resolve, waitInterval * 1000));
|
||||
if (process.env.ENABLE_LOGGING === "true") {
|
||||
console.log(
|
||||
`⏳ Database not ready (attempt ${attempt}/${maxAttempts}), retrying in ${waitInterval}s...`,
|
||||
);
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, waitInterval * 1000));
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`❌ Database failed to become available after ${maxAttempts} attempts`);
|
||||
throw new Error(
|
||||
`❌ Database failed to become available after ${maxAttempts} attempts`,
|
||||
);
|
||||
}
|
||||
|
||||
// Graceful disconnect with retry
|
||||
@@ -86,14 +107,14 @@ async function disconnectPrisma(prisma, maxRetries = 3) {
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
try {
|
||||
await prisma.$disconnect();
|
||||
console.log('Database disconnected successfully');
|
||||
console.log("Database disconnected successfully");
|
||||
return;
|
||||
} catch (error) {
|
||||
console.error(`Disconnect attempt ${i + 1} failed:`, error.message);
|
||||
if (i === maxRetries - 1) {
|
||||
console.error('Failed to disconnect from database after all retries');
|
||||
console.error("Failed to disconnect from database after all retries");
|
||||
} else {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000)); // Wait 1 second
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait 1 second
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -104,5 +125,5 @@ module.exports = {
|
||||
checkDatabaseConnection,
|
||||
waitForDatabase,
|
||||
disconnectPrisma,
|
||||
getOptimizedDatabaseUrl
|
||||
getOptimizedDatabaseUrl,
|
||||
};
|
||||
|
||||
@@ -1,79 +1,99 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const jwt = require("jsonwebtoken");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const {
|
||||
validate_session,
|
||||
update_session_activity,
|
||||
is_tfa_bypassed,
|
||||
} = require("../utils/session_manager");
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Middleware to verify JWT token
|
||||
// Middleware to verify JWT token with session validation
|
||||
const authenticateToken = async (req, res, next) => {
|
||||
try {
|
||||
const authHeader = req.headers['authorization'];
|
||||
const token = authHeader && authHeader.split(' ')[1]; // Bearer TOKEN
|
||||
const authHeader = req.headers.authorization;
|
||||
const token = authHeader?.split(" ")[1]; // Bearer TOKEN
|
||||
|
||||
if (!token) {
|
||||
return res.status(401).json({ error: 'Access token required' });
|
||||
return res.status(401).json({ error: "Access token required" });
|
||||
}
|
||||
|
||||
// Verify token
|
||||
const decoded = jwt.verify(token, process.env.JWT_SECRET || 'your-secret-key');
|
||||
|
||||
// Get user from database
|
||||
const user = await prisma.users.findUnique({
|
||||
where: { id: decoded.userId },
|
||||
select: {
|
||||
id: true,
|
||||
username: true,
|
||||
email: true,
|
||||
role: true,
|
||||
is_active: true,
|
||||
last_login: true,
|
||||
created_at: true,
|
||||
updated_at: true
|
||||
if (!process.env.JWT_SECRET) {
|
||||
throw new Error("JWT_SECRET environment variable is required");
|
||||
}
|
||||
const decoded = jwt.verify(token, process.env.JWT_SECRET);
|
||||
|
||||
// Validate session and check inactivity timeout
|
||||
const validation = await validate_session(decoded.sessionId, token);
|
||||
|
||||
if (!validation.valid) {
|
||||
const error_messages = {
|
||||
"Session not found": "Session not found",
|
||||
"Session revoked": "Session has been revoked",
|
||||
"Session expired": "Session has expired",
|
||||
"Session inactive":
|
||||
validation.message || "Session timed out due to inactivity",
|
||||
"Token mismatch": "Invalid token",
|
||||
"User inactive": "User account is inactive",
|
||||
};
|
||||
|
||||
return res.status(401).json({
|
||||
error: error_messages[validation.reason] || "Authentication failed",
|
||||
reason: validation.reason,
|
||||
});
|
||||
|
||||
if (!user || !user.is_active) {
|
||||
return res.status(401).json({ error: 'Invalid or inactive user' });
|
||||
}
|
||||
|
||||
// Update last login
|
||||
// Update session activity timestamp
|
||||
await update_session_activity(decoded.sessionId);
|
||||
|
||||
// Check if TFA is bypassed for this session
|
||||
const tfa_bypassed = await is_tfa_bypassed(decoded.sessionId);
|
||||
|
||||
// Update last login (only on successful authentication)
|
||||
await prisma.users.update({
|
||||
where: { id: user.id },
|
||||
where: { id: validation.user.id },
|
||||
data: {
|
||||
last_login: new Date(),
|
||||
updated_at: new Date()
|
||||
}
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
req.user = user;
|
||||
req.user = validation.user;
|
||||
req.session_id = decoded.sessionId;
|
||||
req.tfa_bypassed = tfa_bypassed;
|
||||
next();
|
||||
} catch (error) {
|
||||
if (error.name === 'JsonWebTokenError') {
|
||||
return res.status(401).json({ error: 'Invalid token' });
|
||||
if (error.name === "JsonWebTokenError") {
|
||||
return res.status(401).json({ error: "Invalid token" });
|
||||
}
|
||||
if (error.name === 'TokenExpiredError') {
|
||||
return res.status(401).json({ error: 'Token expired' });
|
||||
if (error.name === "TokenExpiredError") {
|
||||
return res.status(401).json({ error: "Token expired" });
|
||||
}
|
||||
console.error('Auth middleware error:', error);
|
||||
return res.status(500).json({ error: 'Authentication failed' });
|
||||
console.error("Auth middleware error:", error);
|
||||
return res.status(500).json({ error: "Authentication failed" });
|
||||
}
|
||||
};
|
||||
|
||||
// Middleware to check admin role
|
||||
const requireAdmin = (req, res, next) => {
|
||||
if (req.user.role !== 'admin') {
|
||||
return res.status(403).json({ error: 'Admin access required' });
|
||||
if (req.user.role !== "admin") {
|
||||
return res.status(403).json({ error: "Admin access required" });
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
// Middleware to check if user is authenticated (optional)
|
||||
const optionalAuth = async (req, res, next) => {
|
||||
const optionalAuth = async (req, _res, next) => {
|
||||
try {
|
||||
const authHeader = req.headers['authorization'];
|
||||
const token = authHeader && authHeader.split(' ')[1];
|
||||
const authHeader = req.headers.authorization;
|
||||
const token = authHeader?.split(" ")[1];
|
||||
|
||||
if (token) {
|
||||
const decoded = jwt.verify(token, process.env.JWT_SECRET || 'your-secret-key');
|
||||
if (!process.env.JWT_SECRET) {
|
||||
throw new Error("JWT_SECRET environment variable is required");
|
||||
}
|
||||
const decoded = jwt.verify(token, process.env.JWT_SECRET);
|
||||
const user = await prisma.users.findUnique({
|
||||
where: { id: decoded.userId },
|
||||
select: {
|
||||
@@ -84,23 +104,48 @@ const optionalAuth = async (req, res, next) => {
|
||||
is_active: true,
|
||||
last_login: true,
|
||||
created_at: true,
|
||||
updated_at: true
|
||||
}
|
||||
updated_at: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (user && user.is_active) {
|
||||
if (user?.is_active) {
|
||||
req.user = user;
|
||||
}
|
||||
}
|
||||
next();
|
||||
} catch (error) {
|
||||
} catch {
|
||||
// Continue without authentication for optional auth
|
||||
next();
|
||||
}
|
||||
};
|
||||
|
||||
// Middleware to check if TFA is required for sensitive operations
|
||||
const requireTfaIfEnabled = async (req, res, next) => {
|
||||
try {
|
||||
// Check if user has TFA enabled
|
||||
const user = await prisma.users.findUnique({
|
||||
where: { id: req.user.id },
|
||||
select: { tfa_enabled: true },
|
||||
});
|
||||
|
||||
// If TFA is enabled and not bypassed, require TFA verification
|
||||
if (user?.tfa_enabled && !req.tfa_bypassed) {
|
||||
return res.status(403).json({
|
||||
error: "Two-factor authentication required for this operation",
|
||||
requires_tfa: true,
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error("TFA requirement check error:", error);
|
||||
return res.status(500).json({ error: "Authentication check failed" });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
authenticateToken,
|
||||
requireAdmin,
|
||||
optionalAuth
|
||||
optionalAuth,
|
||||
requireTfaIfEnabled,
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Permission middleware factory
|
||||
@@ -7,42 +7,44 @@ const requirePermission = (permission) => {
|
||||
try {
|
||||
// Get user's role permissions
|
||||
const rolePermissions = await prisma.role_permissions.findUnique({
|
||||
where: { role: req.user.role }
|
||||
where: { role: req.user.role },
|
||||
});
|
||||
|
||||
// If no specific permissions found, default to admin permissions (for backward compatibility)
|
||||
if (!rolePermissions) {
|
||||
console.warn(`No permissions found for role: ${req.user.role}, defaulting to admin access`);
|
||||
console.warn(
|
||||
`No permissions found for role: ${req.user.role}, defaulting to admin access`,
|
||||
);
|
||||
return next();
|
||||
}
|
||||
|
||||
// Check if user has the required permission
|
||||
if (!rolePermissions[permission]) {
|
||||
return res.status(403).json({
|
||||
error: 'Insufficient permissions',
|
||||
message: `You don't have permission to ${permission.replace('can_', '').replace('_', ' ')}`
|
||||
error: "Insufficient permissions",
|
||||
message: `You don't have permission to ${permission.replace("can_", "").replace("_", " ")}`,
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error('Permission check error:', error);
|
||||
res.status(500).json({ error: 'Permission check failed' });
|
||||
console.error("Permission check error:", error);
|
||||
res.status(500).json({ error: "Permission check failed" });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// Specific permission middlewares - using snake_case field names
|
||||
const requireViewDashboard = requirePermission('can_view_dashboard');
|
||||
const requireViewHosts = requirePermission('can_view_hosts');
|
||||
const requireManageHosts = requirePermission('can_manage_hosts');
|
||||
const requireViewPackages = requirePermission('can_view_packages');
|
||||
const requireManagePackages = requirePermission('can_manage_packages');
|
||||
const requireViewUsers = requirePermission('can_view_users');
|
||||
const requireManageUsers = requirePermission('can_manage_users');
|
||||
const requireViewReports = requirePermission('can_view_reports');
|
||||
const requireExportData = requirePermission('can_export_data');
|
||||
const requireManageSettings = requirePermission('can_manage_settings');
|
||||
const requireViewDashboard = requirePermission("can_view_dashboard");
|
||||
const requireViewHosts = requirePermission("can_view_hosts");
|
||||
const requireManageHosts = requirePermission("can_manage_hosts");
|
||||
const requireViewPackages = requirePermission("can_view_packages");
|
||||
const requireManagePackages = requirePermission("can_manage_packages");
|
||||
const requireViewUsers = requirePermission("can_view_users");
|
||||
const requireManageUsers = requirePermission("can_manage_users");
|
||||
const requireViewReports = requirePermission("can_view_reports");
|
||||
const requireExportData = requirePermission("can_export_data");
|
||||
const requireManageSettings = requirePermission("can_manage_settings");
|
||||
|
||||
module.exports = {
|
||||
requirePermission,
|
||||
@@ -55,5 +57,5 @@ module.exports = {
|
||||
requireManageUsers,
|
||||
requireViewReports,
|
||||
requireExportData,
|
||||
requireManageSettings
|
||||
requireManageSettings,
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
745
backend/src/routes/autoEnrollmentRoutes.js
Normal file
745
backend/src/routes/autoEnrollmentRoutes.js
Normal file
@@ -0,0 +1,745 @@
|
||||
const express = require("express");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const crypto = require("node:crypto");
|
||||
const bcrypt = require("bcryptjs");
|
||||
const { body, validationResult } = require("express-validator");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const { requireManageSettings } = require("../middleware/permissions");
|
||||
const { v4: uuidv4 } = require("uuid");
|
||||
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Generate auto-enrollment token credentials
|
||||
const generate_auto_enrollment_token = () => {
|
||||
const token_key = `patchmon_ae_${crypto.randomBytes(16).toString("hex")}`;
|
||||
const token_secret = crypto.randomBytes(48).toString("hex");
|
||||
return { token_key, token_secret };
|
||||
};
|
||||
|
||||
// Middleware to validate auto-enrollment token
|
||||
const validate_auto_enrollment_token = async (req, res, next) => {
|
||||
try {
|
||||
const token_key = req.headers["x-auto-enrollment-key"];
|
||||
const token_secret = req.headers["x-auto-enrollment-secret"];
|
||||
|
||||
if (!token_key || !token_secret) {
|
||||
return res
|
||||
.status(401)
|
||||
.json({ error: "Auto-enrollment credentials required" });
|
||||
}
|
||||
|
||||
// Find token
|
||||
const token = await prisma.auto_enrollment_tokens.findUnique({
|
||||
where: { token_key: token_key },
|
||||
});
|
||||
|
||||
if (!token || !token.is_active) {
|
||||
return res.status(401).json({ error: "Invalid or inactive token" });
|
||||
}
|
||||
|
||||
// Verify secret (hashed)
|
||||
const is_valid = await bcrypt.compare(token_secret, token.token_secret);
|
||||
if (!is_valid) {
|
||||
return res.status(401).json({ error: "Invalid token secret" });
|
||||
}
|
||||
|
||||
// Check expiration
|
||||
if (token.expires_at && new Date() > new Date(token.expires_at)) {
|
||||
return res.status(401).json({ error: "Token expired" });
|
||||
}
|
||||
|
||||
// Check IP whitelist if configured
|
||||
if (token.allowed_ip_ranges && token.allowed_ip_ranges.length > 0) {
|
||||
const client_ip = req.ip || req.connection.remoteAddress;
|
||||
// Basic IP check - can be enhanced with CIDR matching
|
||||
const ip_allowed = token.allowed_ip_ranges.some((allowed_ip) => {
|
||||
return client_ip.includes(allowed_ip);
|
||||
});
|
||||
|
||||
if (!ip_allowed) {
|
||||
console.warn(
|
||||
`Auto-enrollment attempt from unauthorized IP: ${client_ip}`,
|
||||
);
|
||||
return res
|
||||
.status(403)
|
||||
.json({ error: "IP address not authorized for this token" });
|
||||
}
|
||||
}
|
||||
|
||||
// Check rate limit (hosts per day)
|
||||
const today = new Date().toISOString().split("T")[0];
|
||||
const token_reset_date = token.last_reset_date.toISOString().split("T")[0];
|
||||
|
||||
if (token_reset_date !== today) {
|
||||
// Reset daily counter
|
||||
await prisma.auto_enrollment_tokens.update({
|
||||
where: { id: token.id },
|
||||
data: {
|
||||
hosts_created_today: 0,
|
||||
last_reset_date: new Date(),
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
token.hosts_created_today = 0;
|
||||
}
|
||||
|
||||
if (token.hosts_created_today >= token.max_hosts_per_day) {
|
||||
return res.status(429).json({
|
||||
error: "Rate limit exceeded",
|
||||
message: `Maximum ${token.max_hosts_per_day} hosts per day allowed for this token`,
|
||||
});
|
||||
}
|
||||
|
||||
req.auto_enrollment_token = token;
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error("Auto-enrollment token validation error:", error);
|
||||
res.status(500).json({ error: "Token validation failed" });
|
||||
}
|
||||
};
|
||||
|
||||
// ========== ADMIN ENDPOINTS (Manage Tokens) ==========
|
||||
|
||||
// Create auto-enrollment token
|
||||
router.post(
|
||||
"/tokens",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
[
|
||||
body("token_name")
|
||||
.isLength({ min: 1, max: 255 })
|
||||
.withMessage("Token name is required (max 255 characters)"),
|
||||
body("allowed_ip_ranges")
|
||||
.optional()
|
||||
.isArray()
|
||||
.withMessage("Allowed IP ranges must be an array"),
|
||||
body("max_hosts_per_day")
|
||||
.optional()
|
||||
.isInt({ min: 1, max: 1000 })
|
||||
.withMessage("Max hosts per day must be between 1 and 1000"),
|
||||
body("default_host_group_id")
|
||||
.optional({ nullable: true, checkFalsy: true })
|
||||
.isString(),
|
||||
body("expires_at")
|
||||
.optional({ nullable: true, checkFalsy: true })
|
||||
.isISO8601()
|
||||
.withMessage("Invalid date format"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const {
|
||||
token_name,
|
||||
allowed_ip_ranges = [],
|
||||
max_hosts_per_day = 100,
|
||||
default_host_group_id,
|
||||
expires_at,
|
||||
metadata = {},
|
||||
} = req.body;
|
||||
|
||||
// Validate host group if provided
|
||||
if (default_host_group_id) {
|
||||
const host_group = await prisma.host_groups.findUnique({
|
||||
where: { id: default_host_group_id },
|
||||
});
|
||||
|
||||
if (!host_group) {
|
||||
return res.status(400).json({ error: "Host group not found" });
|
||||
}
|
||||
}
|
||||
|
||||
const { token_key, token_secret } = generate_auto_enrollment_token();
|
||||
const hashed_secret = await bcrypt.hash(token_secret, 10);
|
||||
|
||||
const token = await prisma.auto_enrollment_tokens.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
token_name,
|
||||
token_key: token_key,
|
||||
token_secret: hashed_secret,
|
||||
created_by_user_id: req.user.id,
|
||||
allowed_ip_ranges,
|
||||
max_hosts_per_day,
|
||||
default_host_group_id: default_host_group_id || null,
|
||||
expires_at: expires_at ? new Date(expires_at) : null,
|
||||
metadata: { integration_type: "proxmox-lxc", ...metadata },
|
||||
updated_at: new Date(),
|
||||
},
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
users: {
|
||||
select: {
|
||||
id: true,
|
||||
username: true,
|
||||
first_name: true,
|
||||
last_name: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Return unhashed secret ONLY once (like API keys)
|
||||
res.status(201).json({
|
||||
message: "Auto-enrollment token created successfully",
|
||||
token: {
|
||||
id: token.id,
|
||||
token_name: token.token_name,
|
||||
token_key: token_key,
|
||||
token_secret: token_secret, // ONLY returned here!
|
||||
max_hosts_per_day: token.max_hosts_per_day,
|
||||
default_host_group: token.host_groups,
|
||||
created_by: token.users,
|
||||
expires_at: token.expires_at,
|
||||
},
|
||||
warning: "⚠️ Save the token_secret now - it cannot be retrieved later!",
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Create auto-enrollment token error:", error);
|
||||
res.status(500).json({ error: "Failed to create token" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// List auto-enrollment tokens
|
||||
router.get(
|
||||
"/tokens",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
const tokens = await prisma.auto_enrollment_tokens.findMany({
|
||||
select: {
|
||||
id: true,
|
||||
token_name: true,
|
||||
token_key: true,
|
||||
is_active: true,
|
||||
allowed_ip_ranges: true,
|
||||
max_hosts_per_day: true,
|
||||
hosts_created_today: true,
|
||||
last_used_at: true,
|
||||
expires_at: true,
|
||||
created_at: true,
|
||||
default_host_group_id: true,
|
||||
metadata: true,
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
users: {
|
||||
select: {
|
||||
id: true,
|
||||
username: true,
|
||||
first_name: true,
|
||||
last_name: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { created_at: "desc" },
|
||||
});
|
||||
|
||||
res.json(tokens);
|
||||
} catch (error) {
|
||||
console.error("List auto-enrollment tokens error:", error);
|
||||
res.status(500).json({ error: "Failed to list tokens" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Get single token details
|
||||
router.get(
|
||||
"/tokens/:tokenId",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { tokenId } = req.params;
|
||||
|
||||
const token = await prisma.auto_enrollment_tokens.findUnique({
|
||||
where: { id: tokenId },
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
users: {
|
||||
select: {
|
||||
id: true,
|
||||
username: true,
|
||||
first_name: true,
|
||||
last_name: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!token) {
|
||||
return res.status(404).json({ error: "Token not found" });
|
||||
}
|
||||
|
||||
// Don't include the secret in response
|
||||
const { token_secret: _secret, ...token_data } = token;
|
||||
|
||||
res.json(token_data);
|
||||
} catch (error) {
|
||||
console.error("Get token error:", error);
|
||||
res.status(500).json({ error: "Failed to get token" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Update token (toggle active state, update limits, etc.)
|
||||
router.patch(
|
||||
"/tokens/:tokenId",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
[
|
||||
body("is_active").optional().isBoolean(),
|
||||
body("max_hosts_per_day").optional().isInt({ min: 1, max: 1000 }),
|
||||
body("allowed_ip_ranges").optional().isArray(),
|
||||
body("expires_at").optional().isISO8601(),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { tokenId } = req.params;
|
||||
const update_data = { updated_at: new Date() };
|
||||
|
||||
if (req.body.is_active !== undefined)
|
||||
update_data.is_active = req.body.is_active;
|
||||
if (req.body.max_hosts_per_day !== undefined)
|
||||
update_data.max_hosts_per_day = req.body.max_hosts_per_day;
|
||||
if (req.body.allowed_ip_ranges !== undefined)
|
||||
update_data.allowed_ip_ranges = req.body.allowed_ip_ranges;
|
||||
if (req.body.expires_at !== undefined)
|
||||
update_data.expires_at = new Date(req.body.expires_at);
|
||||
|
||||
const token = await prisma.auto_enrollment_tokens.update({
|
||||
where: { id: tokenId },
|
||||
data: update_data,
|
||||
include: {
|
||||
host_groups: true,
|
||||
users: {
|
||||
select: {
|
||||
username: true,
|
||||
first_name: true,
|
||||
last_name: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const { token_secret: _secret, ...token_data } = token;
|
||||
|
||||
res.json({
|
||||
message: "Token updated successfully",
|
||||
token: token_data,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Update token error:", error);
|
||||
res.status(500).json({ error: "Failed to update token" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Delete token
|
||||
router.delete(
|
||||
"/tokens/:tokenId",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { tokenId } = req.params;
|
||||
|
||||
const token = await prisma.auto_enrollment_tokens.findUnique({
|
||||
where: { id: tokenId },
|
||||
});
|
||||
|
||||
if (!token) {
|
||||
return res.status(404).json({ error: "Token not found" });
|
||||
}
|
||||
|
||||
await prisma.auto_enrollment_tokens.delete({
|
||||
where: { id: tokenId },
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: "Auto-enrollment token deleted successfully",
|
||||
deleted_token: {
|
||||
id: token.id,
|
||||
token_name: token.token_name,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Delete token error:", error);
|
||||
res.status(500).json({ error: "Failed to delete token" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// ========== AUTO-ENROLLMENT ENDPOINTS (Used by Scripts) ==========
|
||||
// Future integrations can follow this pattern:
|
||||
// - /proxmox-lxc - Proxmox LXC containers
|
||||
// - /vmware-esxi - VMware ESXi VMs
|
||||
// - /docker - Docker containers
|
||||
// - /kubernetes - Kubernetes pods
|
||||
// - /aws-ec2 - AWS EC2 instances
|
||||
|
||||
// Serve the Proxmox LXC enrollment script with credentials injected
|
||||
router.get("/proxmox-lxc", async (req, res) => {
|
||||
try {
|
||||
// Get token from query params
|
||||
const token_key = req.query.token_key;
|
||||
const token_secret = req.query.token_secret;
|
||||
|
||||
if (!token_key || !token_secret) {
|
||||
return res
|
||||
.status(401)
|
||||
.json({ error: "Token key and secret required as query parameters" });
|
||||
}
|
||||
|
||||
// Validate token
|
||||
const token = await prisma.auto_enrollment_tokens.findUnique({
|
||||
where: { token_key: token_key },
|
||||
});
|
||||
|
||||
if (!token || !token.is_active) {
|
||||
return res.status(401).json({ error: "Invalid or inactive token" });
|
||||
}
|
||||
|
||||
// Verify secret
|
||||
const is_valid = await bcrypt.compare(token_secret, token.token_secret);
|
||||
if (!is_valid) {
|
||||
return res.status(401).json({ error: "Invalid token secret" });
|
||||
}
|
||||
|
||||
// Check expiration
|
||||
if (token.expires_at && new Date() > new Date(token.expires_at)) {
|
||||
return res.status(401).json({ error: "Token expired" });
|
||||
}
|
||||
|
||||
const fs = require("node:fs");
|
||||
const path = require("node:path");
|
||||
|
||||
const script_path = path.join(
|
||||
__dirname,
|
||||
"../../../agents/proxmox_auto_enroll.sh",
|
||||
);
|
||||
|
||||
if (!fs.existsSync(script_path)) {
|
||||
return res
|
||||
.status(404)
|
||||
.json({ error: "Proxmox enrollment script not found" });
|
||||
}
|
||||
|
||||
let script = fs.readFileSync(script_path, "utf8");
|
||||
|
||||
// Convert Windows line endings to Unix line endings
|
||||
script = script.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
|
||||
|
||||
// Get the configured server URL from settings
|
||||
let server_url = "http://localhost:3001";
|
||||
try {
|
||||
const settings = await prisma.settings.findFirst();
|
||||
if (settings?.server_url) {
|
||||
server_url = settings.server_url;
|
||||
}
|
||||
} catch (settings_error) {
|
||||
console.warn(
|
||||
"Could not fetch settings, using default server URL:",
|
||||
settings_error.message,
|
||||
);
|
||||
}
|
||||
|
||||
// Determine curl flags dynamically from settings
|
||||
let curl_flags = "-s";
|
||||
try {
|
||||
const settings = await prisma.settings.findFirst();
|
||||
if (settings && settings.ignore_ssl_self_signed === true) {
|
||||
curl_flags = "-sk";
|
||||
}
|
||||
} catch (_) {}
|
||||
|
||||
// Check for --force parameter
|
||||
const force_install = req.query.force === "true" || req.query.force === "1";
|
||||
|
||||
// Inject the token credentials, server URL, curl flags, and force flag into the script
|
||||
const env_vars = `#!/bin/bash
|
||||
# PatchMon Auto-Enrollment Configuration (Auto-generated)
|
||||
export PATCHMON_URL="${server_url}"
|
||||
export AUTO_ENROLLMENT_KEY="${token.token_key}"
|
||||
export AUTO_ENROLLMENT_SECRET="${token_secret}"
|
||||
export CURL_FLAGS="${curl_flags}"
|
||||
export FORCE_INSTALL="${force_install ? "true" : "false"}"
|
||||
|
||||
`;
|
||||
|
||||
// Remove the shebang and configuration section from the original script
|
||||
script = script.replace(/^#!/, "#");
|
||||
|
||||
// Remove the configuration section (between # ===== CONFIGURATION ===== and the next # =====)
|
||||
script = script.replace(
|
||||
/# ===== CONFIGURATION =====[\s\S]*?(?=# ===== COLOR OUTPUT =====)/,
|
||||
"",
|
||||
);
|
||||
|
||||
script = env_vars + script;
|
||||
|
||||
res.setHeader("Content-Type", "text/plain");
|
||||
res.setHeader(
|
||||
"Content-Disposition",
|
||||
'inline; filename="proxmox_auto_enroll.sh"',
|
||||
);
|
||||
res.send(script);
|
||||
} catch (error) {
|
||||
console.error("Proxmox script serve error:", error);
|
||||
res.status(500).json({ error: "Failed to serve enrollment script" });
|
||||
}
|
||||
});
|
||||
|
||||
// Create host via auto-enrollment
|
||||
router.post(
|
||||
"/enroll",
|
||||
validate_auto_enrollment_token,
|
||||
[
|
||||
body("friendly_name")
|
||||
.isLength({ min: 1, max: 255 })
|
||||
.withMessage("Friendly name is required"),
|
||||
body("machine_id")
|
||||
.isLength({ min: 1, max: 255 })
|
||||
.withMessage("Machine ID is required"),
|
||||
body("metadata").optional().isObject(),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { friendly_name, machine_id } = req.body;
|
||||
|
||||
// Generate host API credentials
|
||||
const api_id = `patchmon_${crypto.randomBytes(8).toString("hex")}`;
|
||||
const api_key = crypto.randomBytes(32).toString("hex");
|
||||
|
||||
// Check if host already exists by machine_id (not hostname)
|
||||
const existing_host = await prisma.hosts.findUnique({
|
||||
where: { machine_id },
|
||||
});
|
||||
|
||||
if (existing_host) {
|
||||
return res.status(409).json({
|
||||
error: "Host already exists",
|
||||
host_id: existing_host.id,
|
||||
api_id: existing_host.api_id,
|
||||
machine_id: existing_host.machine_id,
|
||||
friendly_name: existing_host.friendly_name,
|
||||
message:
|
||||
"This machine is already enrolled in PatchMon (matched by machine ID)",
|
||||
});
|
||||
}
|
||||
|
||||
// Create host
|
||||
const host = await prisma.hosts.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
machine_id,
|
||||
friendly_name,
|
||||
os_type: "unknown",
|
||||
os_version: "unknown",
|
||||
api_id: api_id,
|
||||
api_key: api_key,
|
||||
host_group_id: req.auto_enrollment_token.default_host_group_id,
|
||||
status: "pending",
|
||||
notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`,
|
||||
updated_at: new Date(),
|
||||
},
|
||||
include: {
|
||||
host_groups: {
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
color: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Update token usage stats
|
||||
await prisma.auto_enrollment_tokens.update({
|
||||
where: { id: req.auto_enrollment_token.id },
|
||||
data: {
|
||||
hosts_created_today: { increment: 1 },
|
||||
last_used_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
console.log(
|
||||
`Auto-enrolled host: ${friendly_name} (${host.id}) via token: ${req.auto_enrollment_token.token_name}`,
|
||||
);
|
||||
|
||||
res.status(201).json({
|
||||
message: "Host enrolled successfully",
|
||||
host: {
|
||||
id: host.id,
|
||||
friendly_name: host.friendly_name,
|
||||
api_id: api_id,
|
||||
api_key: api_key,
|
||||
host_group: host.host_groups,
|
||||
status: host.status,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Auto-enrollment error:", error);
|
||||
res.status(500).json({ error: "Failed to enroll host" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Bulk enroll multiple hosts at once
|
||||
router.post(
|
||||
"/enroll/bulk",
|
||||
validate_auto_enrollment_token,
|
||||
[
|
||||
body("hosts")
|
||||
.isArray({ min: 1, max: 50 })
|
||||
.withMessage("Hosts array required (max 50)"),
|
||||
body("hosts.*.friendly_name")
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("Each host needs a friendly_name"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { hosts } = req.body;
|
||||
|
||||
// Check rate limit
|
||||
const remaining_quota =
|
||||
req.auto_enrollment_token.max_hosts_per_day -
|
||||
req.auto_enrollment_token.hosts_created_today;
|
||||
|
||||
if (hosts.length > remaining_quota) {
|
||||
return res.status(429).json({
|
||||
error: "Rate limit exceeded",
|
||||
message: `Only ${remaining_quota} hosts remaining in daily quota`,
|
||||
});
|
||||
}
|
||||
|
||||
const results = {
|
||||
success: [],
|
||||
failed: [],
|
||||
skipped: [],
|
||||
};
|
||||
|
||||
for (const host_data of hosts) {
|
||||
try {
|
||||
const { friendly_name, machine_id } = host_data;
|
||||
|
||||
if (!machine_id) {
|
||||
results.failed.push({
|
||||
friendly_name,
|
||||
error: "Machine ID is required",
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if host already exists by machine_id
|
||||
const existing_host = await prisma.hosts.findUnique({
|
||||
where: { machine_id },
|
||||
});
|
||||
|
||||
if (existing_host) {
|
||||
results.skipped.push({
|
||||
friendly_name,
|
||||
machine_id,
|
||||
reason: "Machine already enrolled",
|
||||
api_id: existing_host.api_id,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Generate credentials
|
||||
const api_id = `patchmon_${crypto.randomBytes(8).toString("hex")}`;
|
||||
const api_key = crypto.randomBytes(32).toString("hex");
|
||||
|
||||
// Create host
|
||||
const host = await prisma.hosts.create({
|
||||
data: {
|
||||
id: uuidv4(),
|
||||
machine_id,
|
||||
friendly_name,
|
||||
os_type: "unknown",
|
||||
os_version: "unknown",
|
||||
api_id: api_id,
|
||||
api_key: api_key,
|
||||
host_group_id: req.auto_enrollment_token.default_host_group_id,
|
||||
status: "pending",
|
||||
notes: `Auto-enrolled via ${req.auto_enrollment_token.token_name} on ${new Date().toISOString()}`,
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
results.success.push({
|
||||
id: host.id,
|
||||
friendly_name: host.friendly_name,
|
||||
api_id: api_id,
|
||||
api_key: api_key,
|
||||
});
|
||||
} catch (error) {
|
||||
results.failed.push({
|
||||
friendly_name: host_data.friendly_name,
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Update token usage stats
|
||||
if (results.success.length > 0) {
|
||||
await prisma.auto_enrollment_tokens.update({
|
||||
where: { id: req.auto_enrollment_token.id },
|
||||
data: {
|
||||
hosts_created_today: { increment: results.success.length },
|
||||
last_used_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
res.status(201).json({
|
||||
message: `Bulk enrollment completed: ${results.success.length} succeeded, ${results.failed.length} failed, ${results.skipped.length} skipped`,
|
||||
results,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Bulk auto-enrollment error:", error);
|
||||
res.status(500).json({ error: "Failed to bulk enroll hosts" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
416
backend/src/routes/automationRoutes.js
Normal file
416
backend/src/routes/automationRoutes.js
Normal file
@@ -0,0 +1,416 @@
|
||||
const express = require("express");
|
||||
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||
const { getConnectedApiIds } = require("../services/agentWs");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get all queue statistics
|
||||
router.get("/stats", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
const stats = await queueManager.getAllQueueStats();
|
||||
res.json({
|
||||
success: true,
|
||||
data: stats,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching queue stats:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to fetch queue statistics",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get specific queue statistics
|
||||
router.get("/stats/:queueName", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { queueName } = req.params;
|
||||
|
||||
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid queue name",
|
||||
});
|
||||
}
|
||||
|
||||
const stats = await queueManager.getQueueStats(queueName);
|
||||
res.json({
|
||||
success: true,
|
||||
data: stats,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching queue stats:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to fetch queue statistics",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get recent jobs for a queue
|
||||
router.get("/jobs/:queueName", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { queueName } = req.params;
|
||||
const { limit = 10 } = req.query;
|
||||
|
||||
if (!Object.values(QUEUE_NAMES).includes(queueName)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: "Invalid queue name",
|
||||
});
|
||||
}
|
||||
|
||||
const jobs = await queueManager.getRecentJobs(
|
||||
queueName,
|
||||
parseInt(limit, 10),
|
||||
);
|
||||
|
||||
// Format jobs for frontend
|
||||
const formattedJobs = jobs.map((job) => ({
|
||||
id: job.id,
|
||||
name: job.name,
|
||||
status: job.finishedOn
|
||||
? job.failedReason
|
||||
? "failed"
|
||||
: "completed"
|
||||
: "active",
|
||||
progress: job.progress,
|
||||
data: job.data,
|
||||
returnvalue: job.returnvalue,
|
||||
failedReason: job.failedReason,
|
||||
processedOn: job.processedOn,
|
||||
finishedOn: job.finishedOn,
|
||||
createdAt: new Date(job.timestamp),
|
||||
attemptsMade: job.attemptsMade,
|
||||
delay: job.delay,
|
||||
}));
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: formattedJobs,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching recent jobs:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to fetch recent jobs",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Trigger manual GitHub update check
|
||||
router.post("/trigger/github-update", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
const job = await queueManager.triggerGitHubUpdateCheck();
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
jobId: job.id,
|
||||
message: "GitHub update check triggered successfully",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error triggering GitHub update check:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to trigger GitHub update check",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Trigger manual session cleanup
|
||||
router.post(
|
||||
"/trigger/session-cleanup",
|
||||
authenticateToken,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
const job = await queueManager.triggerSessionCleanup();
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
jobId: job.id,
|
||||
message: "Session cleanup triggered successfully",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error triggering session cleanup:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to trigger session cleanup",
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Trigger Agent Collection: enqueue report_now for connected agents only
|
||||
router.post(
|
||||
"/trigger/agent-collection",
|
||||
authenticateToken,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
const queue = queueManager.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||
const apiIds = getConnectedApiIds();
|
||||
if (!apiIds || apiIds.length === 0) {
|
||||
return res.json({ success: true, data: { enqueued: 0 } });
|
||||
}
|
||||
const jobs = apiIds.map((apiId) => ({
|
||||
name: "report_now",
|
||||
data: { api_id: apiId, type: "report_now" },
|
||||
opts: { attempts: 3, backoff: { type: "fixed", delay: 2000 } },
|
||||
}));
|
||||
await queue.addBulk(jobs);
|
||||
res.json({ success: true, data: { enqueued: jobs.length } });
|
||||
} catch (error) {
|
||||
console.error("Error triggering agent collection:", error);
|
||||
res
|
||||
.status(500)
|
||||
.json({ success: false, error: "Failed to trigger agent collection" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Trigger manual orphaned repo cleanup
|
||||
router.post(
|
||||
"/trigger/orphaned-repo-cleanup",
|
||||
authenticateToken,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
const job = await queueManager.triggerOrphanedRepoCleanup();
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
jobId: job.id,
|
||||
message: "Orphaned repository cleanup triggered successfully",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error triggering orphaned repository cleanup:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to trigger orphaned repository cleanup",
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Trigger manual orphaned package cleanup
|
||||
router.post(
|
||||
"/trigger/orphaned-package-cleanup",
|
||||
authenticateToken,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
const job = await queueManager.triggerOrphanedPackageCleanup();
|
||||
res.json({
|
||||
success: true,
|
||||
data: {
|
||||
jobId: job.id,
|
||||
message: "Orphaned package cleanup triggered successfully",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error triggering orphaned package cleanup:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to trigger orphaned package cleanup",
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Get queue health status
|
||||
router.get("/health", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
const stats = await queueManager.getAllQueueStats();
|
||||
const totalJobs = Object.values(stats).reduce((sum, queueStats) => {
|
||||
return sum + queueStats.waiting + queueStats.active + queueStats.failed;
|
||||
}, 0);
|
||||
|
||||
const health = {
|
||||
status: "healthy",
|
||||
totalJobs,
|
||||
queues: Object.keys(stats).length,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Check for unhealthy conditions
|
||||
if (totalJobs > 1000) {
|
||||
health.status = "warning";
|
||||
health.message = "High number of queued jobs";
|
||||
}
|
||||
|
||||
const failedJobs = Object.values(stats).reduce((sum, queueStats) => {
|
||||
return sum + queueStats.failed;
|
||||
}, 0);
|
||||
|
||||
if (failedJobs > 10) {
|
||||
health.status = "error";
|
||||
health.message = "High number of failed jobs";
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: health,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error checking queue health:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to check queue health",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get automation overview (for dashboard cards)
|
||||
router.get("/overview", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
const stats = await queueManager.getAllQueueStats();
|
||||
const { getSettings } = require("../services/settingsService");
|
||||
const settings = await getSettings();
|
||||
|
||||
// Get recent jobs for each queue to show last run times
|
||||
const recentJobs = await Promise.all([
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.GITHUB_UPDATE_CHECK, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.SESSION_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_REPO_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP, 1),
|
||||
queueManager.getRecentJobs(QUEUE_NAMES.AGENT_COMMANDS, 1),
|
||||
]);
|
||||
|
||||
// Calculate overview metrics
|
||||
const overview = {
|
||||
scheduledTasks:
|
||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].delayed +
|
||||
stats[QUEUE_NAMES.SESSION_CLEANUP].delayed +
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].delayed +
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].delayed,
|
||||
|
||||
runningTasks:
|
||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].active +
|
||||
stats[QUEUE_NAMES.SESSION_CLEANUP].active +
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].active +
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].active,
|
||||
|
||||
failedTasks:
|
||||
stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK].failed +
|
||||
stats[QUEUE_NAMES.SESSION_CLEANUP].failed +
|
||||
stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].failed +
|
||||
stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].failed,
|
||||
|
||||
totalAutomations: Object.values(stats).reduce((sum, queueStats) => {
|
||||
return (
|
||||
sum +
|
||||
queueStats.completed +
|
||||
queueStats.failed +
|
||||
queueStats.active +
|
||||
queueStats.waiting +
|
||||
queueStats.delayed
|
||||
);
|
||||
}, 0),
|
||||
|
||||
// Automation details with last run times
|
||||
automations: [
|
||||
{
|
||||
name: "GitHub Update Check",
|
||||
queue: QUEUE_NAMES.GITHUB_UPDATE_CHECK,
|
||||
description: "Checks for new PatchMon releases",
|
||||
schedule: "Daily at midnight",
|
||||
lastRun: recentJobs[0][0]?.finishedOn
|
||||
? new Date(recentJobs[0][0].finishedOn).toLocaleString()
|
||||
: "Never",
|
||||
lastRunTimestamp: recentJobs[0][0]?.finishedOn || 0,
|
||||
status: recentJobs[0][0]?.failedReason
|
||||
? "Failed"
|
||||
: recentJobs[0][0]
|
||||
? "Success"
|
||||
: "Never run",
|
||||
stats: stats[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
|
||||
},
|
||||
{
|
||||
name: "Session Cleanup",
|
||||
queue: QUEUE_NAMES.SESSION_CLEANUP,
|
||||
description: "Cleans up expired user sessions",
|
||||
schedule: "Every hour",
|
||||
lastRun: recentJobs[1][0]?.finishedOn
|
||||
? new Date(recentJobs[1][0].finishedOn).toLocaleString()
|
||||
: "Never",
|
||||
lastRunTimestamp: recentJobs[1][0]?.finishedOn || 0,
|
||||
status: recentJobs[1][0]?.failedReason
|
||||
? "Failed"
|
||||
: recentJobs[1][0]
|
||||
? "Success"
|
||||
: "Never run",
|
||||
stats: stats[QUEUE_NAMES.SESSION_CLEANUP],
|
||||
},
|
||||
{
|
||||
name: "Orphaned Repo Cleanup",
|
||||
queue: QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
|
||||
description: "Removes repositories with no associated hosts",
|
||||
schedule: "Daily at 2 AM",
|
||||
lastRun: recentJobs[2][0]?.finishedOn
|
||||
? new Date(recentJobs[2][0].finishedOn).toLocaleString()
|
||||
: "Never",
|
||||
lastRunTimestamp: recentJobs[2][0]?.finishedOn || 0,
|
||||
status: recentJobs[2][0]?.failedReason
|
||||
? "Failed"
|
||||
: recentJobs[2][0]
|
||||
? "Success"
|
||||
: "Never run",
|
||||
stats: stats[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
|
||||
},
|
||||
{
|
||||
name: "Orphaned Package Cleanup",
|
||||
queue: QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP,
|
||||
description: "Removes packages with no associated hosts",
|
||||
schedule: "Daily at 3 AM",
|
||||
lastRun: recentJobs[3][0]?.finishedOn
|
||||
? new Date(recentJobs[3][0].finishedOn).toLocaleString()
|
||||
: "Never",
|
||||
lastRunTimestamp: recentJobs[3][0]?.finishedOn || 0,
|
||||
status: recentJobs[3][0]?.failedReason
|
||||
? "Failed"
|
||||
: recentJobs[3][0]
|
||||
? "Success"
|
||||
: "Never run",
|
||||
stats: stats[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||
},
|
||||
{
|
||||
name: "Collect Host Statistics",
|
||||
queue: QUEUE_NAMES.AGENT_COMMANDS,
|
||||
description: "Collects package statistics from connected agents only",
|
||||
schedule: `Every ${settings.update_interval} minutes (Agent-driven)`,
|
||||
lastRun: recentJobs[4][0]?.finishedOn
|
||||
? new Date(recentJobs[4][0].finishedOn).toLocaleString()
|
||||
: "Never",
|
||||
lastRunTimestamp: recentJobs[4][0]?.finishedOn || 0,
|
||||
status: recentJobs[4][0]?.failedReason
|
||||
? "Failed"
|
||||
: recentJobs[4][0]
|
||||
? "Success"
|
||||
: "Never run",
|
||||
stats: stats[QUEUE_NAMES.AGENT_COMMANDS],
|
||||
},
|
||||
].sort((a, b) => {
|
||||
// Sort by last run timestamp (most recent first)
|
||||
// If both have never run (timestamp 0), maintain original order
|
||||
if (a.lastRunTimestamp === 0 && b.lastRunTimestamp === 0) return 0;
|
||||
if (a.lastRunTimestamp === 0) return 1; // Never run goes to bottom
|
||||
if (b.lastRunTimestamp === 0) return -1; // Never run goes to bottom
|
||||
return b.lastRunTimestamp - a.lastRunTimestamp; // Most recent first
|
||||
}),
|
||||
};
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
data: overview,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching automation overview:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to fetch automation overview",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,8 +1,8 @@
|
||||
const express = require('express');
|
||||
const { body, validationResult } = require('express-validator');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { authenticateToken } = require('../middleware/auth');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const express = require("express");
|
||||
const { body, validationResult } = require("express-validator");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const { v4: uuidv4 } = require("uuid");
|
||||
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
@@ -11,12 +11,14 @@ const prisma = new PrismaClient();
|
||||
async function getUserPermissions(userRole) {
|
||||
try {
|
||||
const permissions = await prisma.role_permissions.findUnique({
|
||||
where: { role: userRole }
|
||||
where: { role: userRole },
|
||||
});
|
||||
|
||||
// If no specific permissions found, return default admin permissions (for backward compatibility)
|
||||
if (!permissions) {
|
||||
console.warn(`No permissions found for role: ${userRole}, defaulting to admin access`);
|
||||
console.warn(
|
||||
`No permissions found for role: ${userRole}, defaulting to admin access`,
|
||||
);
|
||||
return {
|
||||
can_view_dashboard: true,
|
||||
can_view_hosts: true,
|
||||
@@ -27,13 +29,13 @@ async function getUserPermissions(userRole) {
|
||||
can_manage_users: true,
|
||||
can_view_reports: true,
|
||||
can_export_data: true,
|
||||
can_manage_settings: true
|
||||
can_manage_settings: true,
|
||||
};
|
||||
}
|
||||
|
||||
return permissions;
|
||||
} catch (error) {
|
||||
console.error('Error fetching user permissions:', error);
|
||||
console.error("Error fetching user permissions:", error);
|
||||
// Return admin permissions as fallback
|
||||
return {
|
||||
can_view_dashboard: true,
|
||||
@@ -45,13 +47,13 @@ async function getUserPermissions(userRole) {
|
||||
can_manage_users: true,
|
||||
can_view_reports: true,
|
||||
can_export_data: true,
|
||||
can_manage_settings: true
|
||||
can_manage_settings: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create permission-based dashboard preferences for a new user
|
||||
async function createDefaultDashboardPreferences(userId, userRole = 'user') {
|
||||
async function createDefaultDashboardPreferences(userId, userRole = "user") {
|
||||
try {
|
||||
// Get user's actual permissions
|
||||
const permissions = await getUserPermissions(userRole);
|
||||
@@ -60,35 +62,93 @@ async function createDefaultDashboardPreferences(userId, userRole = 'user') {
|
||||
// Order aligned with preferred layout
|
||||
const allCards = [
|
||||
// Host-related cards
|
||||
{ cardId: 'totalHosts', requiredPermission: 'can_view_hosts', order: 0 },
|
||||
{ cardId: 'hostsNeedingUpdates', requiredPermission: 'can_view_hosts', order: 1 },
|
||||
{ cardId: "totalHosts", requiredPermission: "can_view_hosts", order: 0 },
|
||||
{
|
||||
cardId: "hostsNeedingUpdates",
|
||||
requiredPermission: "can_view_hosts",
|
||||
order: 1,
|
||||
},
|
||||
|
||||
// Package-related cards
|
||||
{ cardId: 'totalOutdatedPackages', requiredPermission: 'can_view_packages', order: 2 },
|
||||
{ cardId: 'securityUpdates', requiredPermission: 'can_view_packages', order: 3 },
|
||||
{
|
||||
cardId: "totalOutdatedPackages",
|
||||
requiredPermission: "can_view_packages",
|
||||
order: 2,
|
||||
},
|
||||
{
|
||||
cardId: "securityUpdates",
|
||||
requiredPermission: "can_view_packages",
|
||||
order: 3,
|
||||
},
|
||||
|
||||
// Host-related cards (continued)
|
||||
{ cardId: 'totalHostGroups', requiredPermission: 'can_view_hosts', order: 4 },
|
||||
{ cardId: 'upToDateHosts', requiredPermission: 'can_view_hosts', order: 5 },
|
||||
{
|
||||
cardId: "totalHostGroups",
|
||||
requiredPermission: "can_view_hosts",
|
||||
order: 4,
|
||||
},
|
||||
{
|
||||
cardId: "upToDateHosts",
|
||||
requiredPermission: "can_view_hosts",
|
||||
order: 5,
|
||||
},
|
||||
|
||||
// Repository-related cards
|
||||
{ cardId: 'totalRepos', requiredPermission: 'can_view_hosts', order: 6 },
|
||||
{ cardId: "totalRepos", requiredPermission: "can_view_hosts", order: 6 },
|
||||
|
||||
// User management cards (admin only)
|
||||
{ cardId: 'totalUsers', requiredPermission: 'can_view_users', order: 7 },
|
||||
{ cardId: "totalUsers", requiredPermission: "can_view_users", order: 7 },
|
||||
|
||||
// System/Report cards
|
||||
{ cardId: 'osDistribution', requiredPermission: 'can_view_reports', order: 8 },
|
||||
{ cardId: 'osDistributionBar', requiredPermission: 'can_view_reports', order: 9 },
|
||||
{ cardId: 'recentCollection', requiredPermission: 'can_view_hosts', order: 10 },
|
||||
{ cardId: 'updateStatus', requiredPermission: 'can_view_reports', order: 11 },
|
||||
{ cardId: 'packagePriority', requiredPermission: 'can_view_packages', order: 12 },
|
||||
{ cardId: 'recentUsers', requiredPermission: 'can_view_users', order: 13 },
|
||||
{ cardId: 'quickStats', requiredPermission: 'can_view_dashboard', order: 14 }
|
||||
{
|
||||
cardId: "osDistribution",
|
||||
requiredPermission: "can_view_reports",
|
||||
order: 8,
|
||||
},
|
||||
{
|
||||
cardId: "osDistributionBar",
|
||||
requiredPermission: "can_view_reports",
|
||||
order: 9,
|
||||
},
|
||||
{
|
||||
cardId: "osDistributionDoughnut",
|
||||
requiredPermission: "can_view_reports",
|
||||
order: 10,
|
||||
},
|
||||
{
|
||||
cardId: "recentCollection",
|
||||
requiredPermission: "can_view_hosts",
|
||||
order: 11,
|
||||
},
|
||||
{
|
||||
cardId: "updateStatus",
|
||||
requiredPermission: "can_view_reports",
|
||||
order: 12,
|
||||
},
|
||||
{
|
||||
cardId: "packagePriority",
|
||||
requiredPermission: "can_view_packages",
|
||||
order: 13,
|
||||
},
|
||||
{
|
||||
cardId: "packageTrends",
|
||||
requiredPermission: "can_view_packages",
|
||||
order: 14,
|
||||
},
|
||||
{
|
||||
cardId: "recentUsers",
|
||||
requiredPermission: "can_view_users",
|
||||
order: 15,
|
||||
},
|
||||
{
|
||||
cardId: "quickStats",
|
||||
requiredPermission: "can_view_dashboard",
|
||||
order: 16,
|
||||
},
|
||||
];
|
||||
|
||||
// Filter cards based on user's permissions
|
||||
const allowedCards = allCards.filter(card => {
|
||||
const allowedCards = allCards.filter((card) => {
|
||||
return permissions[card.requiredPermission] === true;
|
||||
});
|
||||
|
||||
@@ -100,42 +160,50 @@ async function createDefaultDashboardPreferences(userId, userRole = 'user') {
|
||||
enabled: true,
|
||||
order: card.order, // Preserve original order from allCards
|
||||
created_at: new Date(),
|
||||
updated_at: new Date()
|
||||
updated_at: new Date(),
|
||||
}));
|
||||
|
||||
await prisma.dashboard_preferences.createMany({
|
||||
data: preferencesData
|
||||
data: preferencesData,
|
||||
});
|
||||
|
||||
console.log(`Permission-based dashboard preferences created for user ${userId} with role ${userRole}: ${allowedCards.length} cards`);
|
||||
console.log(
|
||||
`Permission-based dashboard preferences created for user ${userId} with role ${userRole}: ${allowedCards.length} cards`,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Error creating default dashboard preferences:', error);
|
||||
console.error("Error creating default dashboard preferences:", error);
|
||||
// Don't throw error - this shouldn't break user creation
|
||||
}
|
||||
}
|
||||
|
||||
// Get user's dashboard preferences
|
||||
router.get('/', authenticateToken, async (req, res) => {
|
||||
router.get("/", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const preferences = await prisma.dashboard_preferences.findMany({
|
||||
where: { user_id: req.user.id },
|
||||
orderBy: { order: 'asc' }
|
||||
orderBy: { order: "asc" },
|
||||
});
|
||||
|
||||
res.json(preferences);
|
||||
} catch (error) {
|
||||
console.error('Dashboard preferences fetch error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch dashboard preferences' });
|
||||
console.error("Dashboard preferences fetch error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch dashboard preferences" });
|
||||
}
|
||||
});
|
||||
|
||||
// Update dashboard preferences (bulk update)
|
||||
router.put('/', authenticateToken, [
|
||||
body('preferences').isArray().withMessage('Preferences must be an array'),
|
||||
body('preferences.*.cardId').isString().withMessage('Card ID is required'),
|
||||
body('preferences.*.enabled').isBoolean().withMessage('Enabled must be boolean'),
|
||||
body('preferences.*.order').isInt().withMessage('Order must be integer')
|
||||
], async (req, res) => {
|
||||
router.put(
|
||||
"/",
|
||||
authenticateToken,
|
||||
[
|
||||
body("preferences").isArray().withMessage("Preferences must be an array"),
|
||||
body("preferences.*.cardId").isString().withMessage("Card ID is required"),
|
||||
body("preferences.*.enabled")
|
||||
.isBoolean()
|
||||
.withMessage("Enabled must be boolean"),
|
||||
body("preferences.*.order").isInt().withMessage("Order must be integer"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
@@ -147,59 +215,164 @@ router.put('/', authenticateToken, [
|
||||
|
||||
// Delete existing preferences for this user
|
||||
await prisma.dashboard_preferences.deleteMany({
|
||||
where: { user_id: userId }
|
||||
where: { user_id: userId },
|
||||
});
|
||||
|
||||
// Create new preferences
|
||||
const newPreferences = preferences.map(pref => ({
|
||||
id: require('uuid').v4(),
|
||||
const newPreferences = preferences.map((pref) => ({
|
||||
id: require("uuid").v4(),
|
||||
user_id: userId,
|
||||
card_id: pref.cardId,
|
||||
enabled: pref.enabled,
|
||||
order: pref.order,
|
||||
updated_at: new Date()
|
||||
updated_at: new Date(),
|
||||
}));
|
||||
|
||||
const createdPreferences = await prisma.dashboard_preferences.createMany({
|
||||
data: newPreferences
|
||||
await prisma.dashboard_preferences.createMany({
|
||||
data: newPreferences,
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: 'Dashboard preferences updated successfully',
|
||||
preferences: newPreferences
|
||||
message: "Dashboard preferences updated successfully",
|
||||
preferences: newPreferences,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Dashboard preferences update error:', error);
|
||||
res.status(500).json({ error: 'Failed to update dashboard preferences' });
|
||||
console.error("Dashboard preferences update error:", error);
|
||||
res.status(500).json({ error: "Failed to update dashboard preferences" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Get default dashboard card configuration
|
||||
router.get('/defaults', authenticateToken, async (req, res) => {
|
||||
router.get("/defaults", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
// This provides a comprehensive dashboard view for all new users
|
||||
const defaultCards = [
|
||||
{ cardId: 'totalHosts', title: 'Total Hosts', icon: 'Server', enabled: true, order: 0 },
|
||||
{ cardId: 'hostsNeedingUpdates', title: 'Needs Updating', icon: 'AlertTriangle', enabled: true, order: 1 },
|
||||
{ cardId: 'totalOutdatedPackages', title: 'Outdated Packages', icon: 'Package', enabled: true, order: 2 },
|
||||
{ cardId: 'securityUpdates', title: 'Security Updates', icon: 'Shield', enabled: true, order: 3 },
|
||||
{ cardId: 'totalHostGroups', title: 'Host Groups', icon: 'Folder', enabled: true, order: 4 },
|
||||
{ cardId: 'hostsNeedingUpdates', title: 'Up to date', icon: 'CheckCircle', enabled: true, order: 5 },
|
||||
{ cardId: 'totalRepos', title: 'Repositories', icon: 'GitBranch', enabled: true, order: 6 },
|
||||
{ cardId: 'totalUsers', title: 'Users', icon: 'Users', enabled: true, order: 7 },
|
||||
{ cardId: 'osDistribution', title: 'OS Distribution', icon: 'BarChart3', enabled: true, order: 8 },
|
||||
{ cardId: 'osDistributionBar', title: 'OS Distribution (Bar)', icon: 'BarChart3', enabled: true, order: 9 },
|
||||
{ cardId: 'recentCollection', title: 'Recent Collection', icon: 'Server', enabled: true, order: 10 },
|
||||
{ cardId: 'updateStatus', title: 'Update Status', icon: 'BarChart3', enabled: true, order: 11 },
|
||||
{ cardId: 'packagePriority', title: 'Package Priority', icon: 'BarChart3', enabled: true, order: 12 },
|
||||
{ cardId: 'recentUsers', title: 'Recent Users Logged in', icon: 'Users', enabled: true, order: 13 },
|
||||
{ cardId: 'quickStats', title: 'Quick Stats', icon: 'TrendingUp', enabled: true, order: 14 }
|
||||
{
|
||||
cardId: "totalHosts",
|
||||
title: "Total Hosts",
|
||||
icon: "Server",
|
||||
enabled: true,
|
||||
order: 0,
|
||||
},
|
||||
{
|
||||
cardId: "hostsNeedingUpdates",
|
||||
title: "Needs Updating",
|
||||
icon: "AlertTriangle",
|
||||
enabled: true,
|
||||
order: 1,
|
||||
},
|
||||
{
|
||||
cardId: "totalOutdatedPackages",
|
||||
title: "Outdated Packages",
|
||||
icon: "Package",
|
||||
enabled: true,
|
||||
order: 2,
|
||||
},
|
||||
{
|
||||
cardId: "securityUpdates",
|
||||
title: "Security Updates",
|
||||
icon: "Shield",
|
||||
enabled: true,
|
||||
order: 3,
|
||||
},
|
||||
{
|
||||
cardId: "totalHostGroups",
|
||||
title: "Host Groups",
|
||||
icon: "Folder",
|
||||
enabled: true,
|
||||
order: 4,
|
||||
},
|
||||
{
|
||||
cardId: "upToDateHosts",
|
||||
title: "Up to date",
|
||||
icon: "CheckCircle",
|
||||
enabled: true,
|
||||
order: 5,
|
||||
},
|
||||
{
|
||||
cardId: "totalRepos",
|
||||
title: "Repositories",
|
||||
icon: "GitBranch",
|
||||
enabled: true,
|
||||
order: 6,
|
||||
},
|
||||
{
|
||||
cardId: "totalUsers",
|
||||
title: "Users",
|
||||
icon: "Users",
|
||||
enabled: true,
|
||||
order: 7,
|
||||
},
|
||||
{
|
||||
cardId: "osDistribution",
|
||||
title: "OS Distribution",
|
||||
icon: "BarChart3",
|
||||
enabled: true,
|
||||
order: 8,
|
||||
},
|
||||
{
|
||||
cardId: "osDistributionBar",
|
||||
title: "OS Distribution (Bar)",
|
||||
icon: "BarChart3",
|
||||
enabled: true,
|
||||
order: 9,
|
||||
},
|
||||
{
|
||||
cardId: "osDistributionDoughnut",
|
||||
title: "OS Distribution (Doughnut)",
|
||||
icon: "PieChart",
|
||||
enabled: true,
|
||||
order: 10,
|
||||
},
|
||||
{
|
||||
cardId: "recentCollection",
|
||||
title: "Recent Collection",
|
||||
icon: "Server",
|
||||
enabled: true,
|
||||
order: 11,
|
||||
},
|
||||
{
|
||||
cardId: "updateStatus",
|
||||
title: "Update Status",
|
||||
icon: "BarChart3",
|
||||
enabled: true,
|
||||
order: 12,
|
||||
},
|
||||
{
|
||||
cardId: "packagePriority",
|
||||
title: "Package Priority",
|
||||
icon: "BarChart3",
|
||||
enabled: true,
|
||||
order: 13,
|
||||
},
|
||||
{
|
||||
cardId: "packageTrends",
|
||||
title: "Package Trends",
|
||||
icon: "TrendingUp",
|
||||
enabled: true,
|
||||
order: 14,
|
||||
},
|
||||
{
|
||||
cardId: "recentUsers",
|
||||
title: "Recent Users Logged in",
|
||||
icon: "Users",
|
||||
enabled: true,
|
||||
order: 15,
|
||||
},
|
||||
{
|
||||
cardId: "quickStats",
|
||||
title: "Quick Stats",
|
||||
icon: "TrendingUp",
|
||||
enabled: true,
|
||||
order: 16,
|
||||
},
|
||||
];
|
||||
|
||||
res.json(defaultCards);
|
||||
} catch (error) {
|
||||
console.error('Default dashboard cards error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch default dashboard cards' });
|
||||
console.error("Default dashboard cards error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch default dashboard cards" });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
779
backend/src/routes/dockerRoutes.js
Normal file
779
backend/src/routes/dockerRoutes.js
Normal file
@@ -0,0 +1,779 @@
|
||||
const express = require("express");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const { v4: uuidv4 } = require("uuid");
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
const router = express.Router();
|
||||
|
||||
// Helper function to convert BigInt fields to strings for JSON serialization
|
||||
const convertBigIntToString = (obj) => {
|
||||
if (obj === null || obj === undefined) return obj;
|
||||
|
||||
if (typeof obj === "bigint") {
|
||||
return obj.toString();
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(convertBigIntToString);
|
||||
}
|
||||
|
||||
if (typeof obj === "object") {
|
||||
const converted = {};
|
||||
for (const key in obj) {
|
||||
converted[key] = convertBigIntToString(obj[key]);
|
||||
}
|
||||
return converted;
|
||||
}
|
||||
|
||||
return obj;
|
||||
};
|
||||
|
||||
// GET /api/v1/docker/dashboard - Get Docker dashboard statistics
|
||||
router.get("/dashboard", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
// Get total hosts with Docker containers
|
||||
const hostsWithDocker = await prisma.docker_containers.groupBy({
|
||||
by: ["host_id"],
|
||||
_count: true,
|
||||
});
|
||||
|
||||
// Get total containers
|
||||
const totalContainers = await prisma.docker_containers.count();
|
||||
|
||||
// Get running containers
|
||||
const runningContainers = await prisma.docker_containers.count({
|
||||
where: { status: "running" },
|
||||
});
|
||||
|
||||
// Get total images
|
||||
const totalImages = await prisma.docker_images.count();
|
||||
|
||||
// Get available updates
|
||||
const availableUpdates = await prisma.docker_image_updates.count();
|
||||
|
||||
// Get containers by status
|
||||
const containersByStatus = await prisma.docker_containers.groupBy({
|
||||
by: ["status"],
|
||||
_count: true,
|
||||
});
|
||||
|
||||
// Get images by source
|
||||
const imagesBySource = await prisma.docker_images.groupBy({
|
||||
by: ["source"],
|
||||
_count: true,
|
||||
});
|
||||
|
||||
res.json({
|
||||
stats: {
|
||||
totalHostsWithDocker: hostsWithDocker.length,
|
||||
totalContainers,
|
||||
runningContainers,
|
||||
totalImages,
|
||||
availableUpdates,
|
||||
},
|
||||
containersByStatus,
|
||||
imagesBySource,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching Docker dashboard:", error);
|
||||
res.status(500).json({ error: "Failed to fetch Docker dashboard" });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/v1/docker/containers - Get all containers with filters
|
||||
router.get("/containers", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { status, hostId, imageId, search, page = 1, limit = 50 } = req.query;
|
||||
|
||||
const where = {};
|
||||
if (status) where.status = status;
|
||||
if (hostId) where.host_id = hostId;
|
||||
if (imageId) where.image_id = imageId;
|
||||
if (search) {
|
||||
where.OR = [
|
||||
{ name: { contains: search, mode: "insensitive" } },
|
||||
{ image_name: { contains: search, mode: "insensitive" } },
|
||||
];
|
||||
}
|
||||
|
||||
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||
const take = parseInt(limit, 10);
|
||||
|
||||
const [containers, total] = await Promise.all([
|
||||
prisma.docker_containers.findMany({
|
||||
where,
|
||||
include: {
|
||||
docker_images: true,
|
||||
},
|
||||
orderBy: { updated_at: "desc" },
|
||||
skip,
|
||||
take,
|
||||
}),
|
||||
prisma.docker_containers.count({ where }),
|
||||
]);
|
||||
|
||||
// Get host information for each container
|
||||
const hostIds = [...new Set(containers.map((c) => c.host_id))];
|
||||
const hosts = await prisma.hosts.findMany({
|
||||
where: { id: { in: hostIds } },
|
||||
select: { id: true, friendly_name: true, hostname: true, ip: true },
|
||||
});
|
||||
|
||||
const hostsMap = hosts.reduce((acc, host) => {
|
||||
acc[host.id] = host;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const containersWithHosts = containers.map((container) => ({
|
||||
...container,
|
||||
host: hostsMap[container.host_id],
|
||||
}));
|
||||
|
||||
res.json(
|
||||
convertBigIntToString({
|
||||
containers: containersWithHosts,
|
||||
pagination: {
|
||||
page: parseInt(page, 10),
|
||||
limit: parseInt(limit, 10),
|
||||
total,
|
||||
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||
},
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error fetching containers:", error);
|
||||
res.status(500).json({ error: "Failed to fetch containers" });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/v1/docker/containers/:id - Get container detail
|
||||
router.get("/containers/:id", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
const container = await prisma.docker_containers.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
docker_images: {
|
||||
include: {
|
||||
docker_image_updates: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!container) {
|
||||
return res.status(404).json({ error: "Container not found" });
|
||||
}
|
||||
|
||||
// Get host information
|
||||
const host = await prisma.hosts.findUnique({
|
||||
where: { id: container.host_id },
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
hostname: true,
|
||||
ip: true,
|
||||
os_type: true,
|
||||
os_version: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Get other containers using the same image
|
||||
const similarContainers = await prisma.docker_containers.findMany({
|
||||
where: {
|
||||
image_id: container.image_id,
|
||||
id: { not: id },
|
||||
},
|
||||
take: 10,
|
||||
});
|
||||
|
||||
res.json(
|
||||
convertBigIntToString({
|
||||
container: {
|
||||
...container,
|
||||
host,
|
||||
},
|
||||
similarContainers,
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error fetching container detail:", error);
|
||||
res.status(500).json({ error: "Failed to fetch container detail" });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/v1/docker/images - Get all images with filters
|
||||
router.get("/images", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { source, search, page = 1, limit = 50 } = req.query;
|
||||
|
||||
const where = {};
|
||||
if (source) where.source = source;
|
||||
if (search) {
|
||||
where.OR = [
|
||||
{ repository: { contains: search, mode: "insensitive" } },
|
||||
{ tag: { contains: search, mode: "insensitive" } },
|
||||
];
|
||||
}
|
||||
|
||||
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||
const take = parseInt(limit, 10);
|
||||
|
||||
const [images, total] = await Promise.all([
|
||||
prisma.docker_images.findMany({
|
||||
where,
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
docker_containers: true,
|
||||
docker_image_updates: true,
|
||||
},
|
||||
},
|
||||
docker_image_updates: {
|
||||
take: 1,
|
||||
orderBy: { created_at: "desc" },
|
||||
},
|
||||
},
|
||||
orderBy: { updated_at: "desc" },
|
||||
skip,
|
||||
take,
|
||||
}),
|
||||
prisma.docker_images.count({ where }),
|
||||
]);
|
||||
|
||||
// Get unique hosts using each image
|
||||
const imagesWithHosts = await Promise.all(
|
||||
images.map(async (image) => {
|
||||
const containers = await prisma.docker_containers.findMany({
|
||||
where: { image_id: image.id },
|
||||
select: { host_id: true },
|
||||
distinct: ["host_id"],
|
||||
});
|
||||
return {
|
||||
...image,
|
||||
hostsCount: containers.length,
|
||||
hasUpdates: image._count.docker_image_updates > 0,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
res.json(
|
||||
convertBigIntToString({
|
||||
images: imagesWithHosts,
|
||||
pagination: {
|
||||
page: parseInt(page, 10),
|
||||
limit: parseInt(limit, 10),
|
||||
total,
|
||||
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||
},
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error fetching images:", error);
|
||||
res.status(500).json({ error: "Failed to fetch images" });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/v1/docker/images/:id - Get image detail
|
||||
router.get("/images/:id", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
const image = await prisma.docker_images.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
docker_containers: {
|
||||
take: 100,
|
||||
},
|
||||
docker_image_updates: {
|
||||
orderBy: { created_at: "desc" },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!image) {
|
||||
return res.status(404).json({ error: "Image not found" });
|
||||
}
|
||||
|
||||
// Get unique hosts using this image
|
||||
const hostIds = [...new Set(image.docker_containers.map((c) => c.host_id))];
|
||||
const hosts = await prisma.hosts.findMany({
|
||||
where: { id: { in: hostIds } },
|
||||
select: { id: true, friendly_name: true, hostname: true, ip: true },
|
||||
});
|
||||
|
||||
res.json(
|
||||
convertBigIntToString({
|
||||
image,
|
||||
hosts,
|
||||
totalContainers: image.docker_containers.length,
|
||||
totalHosts: hosts.length,
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error fetching image detail:", error);
|
||||
res.status(500).json({ error: "Failed to fetch image detail" });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/v1/docker/hosts - Get all hosts with Docker
|
||||
router.get("/hosts", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { page = 1, limit = 50 } = req.query;
|
||||
|
||||
// Get hosts that have Docker containers
|
||||
const hostsWithContainers = await prisma.docker_containers.groupBy({
|
||||
by: ["host_id"],
|
||||
_count: true,
|
||||
});
|
||||
|
||||
const hostIds = hostsWithContainers.map((h) => h.host_id);
|
||||
|
||||
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||
const take = parseInt(limit, 10);
|
||||
|
||||
const hosts = await prisma.hosts.findMany({
|
||||
where: { id: { in: hostIds } },
|
||||
skip,
|
||||
take,
|
||||
orderBy: { friendly_name: "asc" },
|
||||
});
|
||||
|
||||
// Get container counts and statuses for each host
|
||||
const hostsWithStats = await Promise.all(
|
||||
hosts.map(async (host) => {
|
||||
const [totalContainers, runningContainers, totalImages] =
|
||||
await Promise.all([
|
||||
prisma.docker_containers.count({
|
||||
where: { host_id: host.id },
|
||||
}),
|
||||
prisma.docker_containers.count({
|
||||
where: { host_id: host.id, status: "running" },
|
||||
}),
|
||||
prisma.docker_containers.findMany({
|
||||
where: { host_id: host.id },
|
||||
select: { image_id: true },
|
||||
distinct: ["image_id"],
|
||||
}),
|
||||
]);
|
||||
|
||||
return {
|
||||
...host,
|
||||
dockerStats: {
|
||||
totalContainers,
|
||||
runningContainers,
|
||||
totalImages: totalImages.length,
|
||||
},
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
res.json(
|
||||
convertBigIntToString({
|
||||
hosts: hostsWithStats,
|
||||
pagination: {
|
||||
page: parseInt(page, 10),
|
||||
limit: parseInt(limit, 10),
|
||||
total: hostIds.length,
|
||||
totalPages: Math.ceil(hostIds.length / parseInt(limit, 10)),
|
||||
},
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error fetching Docker hosts:", error);
|
||||
res.status(500).json({ error: "Failed to fetch Docker hosts" });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/v1/docker/hosts/:id - Get host Docker detail
|
||||
router.get("/hosts/:id", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
const host = await prisma.hosts.findUnique({
|
||||
where: { id },
|
||||
});
|
||||
|
||||
if (!host) {
|
||||
return res.status(404).json({ error: "Host not found" });
|
||||
}
|
||||
|
||||
// Get containers on this host
|
||||
const containers = await prisma.docker_containers.findMany({
|
||||
where: { host_id: id },
|
||||
include: {
|
||||
docker_images: {
|
||||
include: {
|
||||
docker_image_updates: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { name: "asc" },
|
||||
});
|
||||
|
||||
// Get unique images on this host
|
||||
const imageIds = [...new Set(containers.map((c) => c.image_id))].filter(
|
||||
Boolean,
|
||||
);
|
||||
const images = await prisma.docker_images.findMany({
|
||||
where: { id: { in: imageIds } },
|
||||
});
|
||||
|
||||
// Get container statistics
|
||||
const runningContainers = containers.filter(
|
||||
(c) => c.status === "running",
|
||||
).length;
|
||||
const stoppedContainers = containers.filter(
|
||||
(c) => c.status === "exited" || c.status === "stopped",
|
||||
).length;
|
||||
|
||||
res.json(
|
||||
convertBigIntToString({
|
||||
host,
|
||||
containers,
|
||||
images,
|
||||
stats: {
|
||||
totalContainers: containers.length,
|
||||
runningContainers,
|
||||
stoppedContainers,
|
||||
totalImages: images.length,
|
||||
},
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error fetching host Docker detail:", error);
|
||||
res.status(500).json({ error: "Failed to fetch host Docker detail" });
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/v1/docker/updates - Get available updates
|
||||
router.get("/updates", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { page = 1, limit = 50, securityOnly = false } = req.query;
|
||||
|
||||
const where = {};
|
||||
if (securityOnly === "true") {
|
||||
where.is_security_update = true;
|
||||
}
|
||||
|
||||
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||
const take = parseInt(limit, 10);
|
||||
|
||||
const [updates, total] = await Promise.all([
|
||||
prisma.docker_image_updates.findMany({
|
||||
where,
|
||||
include: {
|
||||
docker_images: {
|
||||
include: {
|
||||
docker_containers: {
|
||||
select: {
|
||||
id: true,
|
||||
host_id: true,
|
||||
name: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: [{ is_security_update: "desc" }, { created_at: "desc" }],
|
||||
skip,
|
||||
take,
|
||||
}),
|
||||
prisma.docker_image_updates.count({ where }),
|
||||
]);
|
||||
|
||||
// Get affected hosts for each update
|
||||
const updatesWithHosts = await Promise.all(
|
||||
updates.map(async (update) => {
|
||||
const hostIds = [
|
||||
...new Set(
|
||||
update.docker_images.docker_containers.map((c) => c.host_id),
|
||||
),
|
||||
];
|
||||
const hosts = await prisma.hosts.findMany({
|
||||
where: { id: { in: hostIds } },
|
||||
select: { id: true, friendly_name: true, hostname: true },
|
||||
});
|
||||
return {
|
||||
...update,
|
||||
affectedHosts: hosts,
|
||||
affectedContainersCount:
|
||||
update.docker_images.docker_containers.length,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
res.json(
|
||||
convertBigIntToString({
|
||||
updates: updatesWithHosts,
|
||||
pagination: {
|
||||
page: parseInt(page, 10),
|
||||
limit: parseInt(limit, 10),
|
||||
total,
|
||||
totalPages: Math.ceil(total / parseInt(limit, 10)),
|
||||
},
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error fetching Docker updates:", error);
|
||||
res.status(500).json({ error: "Failed to fetch Docker updates" });
|
||||
}
|
||||
});
|
||||
|
||||
// POST /api/v1/docker/collect - Collect Docker data from agent
|
||||
router.post("/collect", async (req, res) => {
|
||||
try {
|
||||
const { apiId, apiKey, containers, images, updates } = req.body;
|
||||
|
||||
// Validate API credentials
|
||||
const host = await prisma.hosts.findFirst({
|
||||
where: { api_id: apiId, api_key: apiKey },
|
||||
});
|
||||
|
||||
if (!host) {
|
||||
return res.status(401).json({ error: "Invalid API credentials" });
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
|
||||
// Helper function to validate and parse dates
|
||||
const parseDate = (dateString) => {
|
||||
if (!dateString) return now;
|
||||
const date = new Date(dateString);
|
||||
return Number.isNaN(date.getTime()) ? now : date;
|
||||
};
|
||||
|
||||
// Process containers
|
||||
if (containers && Array.isArray(containers)) {
|
||||
for (const containerData of containers) {
|
||||
const containerId = uuidv4();
|
||||
|
||||
// Find or create image
|
||||
let imageId = null;
|
||||
if (containerData.image_repository && containerData.image_tag) {
|
||||
const image = await prisma.docker_images.upsert({
|
||||
where: {
|
||||
repository_tag_image_id: {
|
||||
repository: containerData.image_repository,
|
||||
tag: containerData.image_tag,
|
||||
image_id: containerData.image_id || "unknown",
|
||||
},
|
||||
},
|
||||
update: {
|
||||
last_checked: now,
|
||||
updated_at: now,
|
||||
},
|
||||
create: {
|
||||
id: uuidv4(),
|
||||
repository: containerData.image_repository,
|
||||
tag: containerData.image_tag,
|
||||
image_id: containerData.image_id || "unknown",
|
||||
source: containerData.image_source || "docker-hub",
|
||||
created_at: parseDate(containerData.created_at),
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
imageId = image.id;
|
||||
}
|
||||
|
||||
// Upsert container
|
||||
await prisma.docker_containers.upsert({
|
||||
where: {
|
||||
host_id_container_id: {
|
||||
host_id: host.id,
|
||||
container_id: containerData.container_id,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
name: containerData.name,
|
||||
image_id: imageId,
|
||||
image_name: containerData.image_name,
|
||||
image_tag: containerData.image_tag || "latest",
|
||||
status: containerData.status,
|
||||
state: containerData.state,
|
||||
ports: containerData.ports || null,
|
||||
started_at: containerData.started_at
|
||||
? parseDate(containerData.started_at)
|
||||
: null,
|
||||
updated_at: now,
|
||||
last_checked: now,
|
||||
},
|
||||
create: {
|
||||
id: containerId,
|
||||
host_id: host.id,
|
||||
container_id: containerData.container_id,
|
||||
name: containerData.name,
|
||||
image_id: imageId,
|
||||
image_name: containerData.image_name,
|
||||
image_tag: containerData.image_tag || "latest",
|
||||
status: containerData.status,
|
||||
state: containerData.state,
|
||||
ports: containerData.ports || null,
|
||||
created_at: parseDate(containerData.created_at),
|
||||
started_at: containerData.started_at
|
||||
? parseDate(containerData.started_at)
|
||||
: null,
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Process standalone images
|
||||
if (images && Array.isArray(images)) {
|
||||
for (const imageData of images) {
|
||||
await prisma.docker_images.upsert({
|
||||
where: {
|
||||
repository_tag_image_id: {
|
||||
repository: imageData.repository,
|
||||
tag: imageData.tag,
|
||||
image_id: imageData.image_id,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
size_bytes: imageData.size_bytes
|
||||
? BigInt(imageData.size_bytes)
|
||||
: null,
|
||||
last_checked: now,
|
||||
updated_at: now,
|
||||
},
|
||||
create: {
|
||||
id: uuidv4(),
|
||||
repository: imageData.repository,
|
||||
tag: imageData.tag,
|
||||
image_id: imageData.image_id,
|
||||
digest: imageData.digest,
|
||||
size_bytes: imageData.size_bytes
|
||||
? BigInt(imageData.size_bytes)
|
||||
: null,
|
||||
source: imageData.source || "docker-hub",
|
||||
created_at: parseDate(imageData.created_at),
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Process updates
|
||||
// First, get all images for this host to clean up old updates
|
||||
const hostImageIds = await prisma.docker_containers
|
||||
.findMany({
|
||||
where: { host_id: host.id },
|
||||
select: { image_id: true },
|
||||
distinct: ["image_id"],
|
||||
})
|
||||
.then((results) => results.map((r) => r.image_id).filter(Boolean));
|
||||
|
||||
// Delete old updates for images on this host that are no longer reported
|
||||
if (hostImageIds.length > 0) {
|
||||
const reportedImageIds = [];
|
||||
|
||||
// Process new updates
|
||||
if (updates && Array.isArray(updates)) {
|
||||
for (const updateData of updates) {
|
||||
// Find the image by repository, tag, and image_id
|
||||
const image = await prisma.docker_images.findFirst({
|
||||
where: {
|
||||
repository: updateData.repository,
|
||||
tag: updateData.current_tag,
|
||||
image_id: updateData.image_id,
|
||||
},
|
||||
});
|
||||
|
||||
if (image) {
|
||||
reportedImageIds.push(image.id);
|
||||
|
||||
// Store digest info in changelog_url field as JSON for now
|
||||
const digestInfo = JSON.stringify({
|
||||
method: "digest_comparison",
|
||||
current_digest: updateData.current_digest,
|
||||
available_digest: updateData.available_digest,
|
||||
});
|
||||
|
||||
// Upsert the update record
|
||||
await prisma.docker_image_updates.upsert({
|
||||
where: {
|
||||
image_id_available_tag: {
|
||||
image_id: image.id,
|
||||
available_tag: updateData.available_tag,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
updated_at: now,
|
||||
changelog_url: digestInfo,
|
||||
severity: "digest_changed",
|
||||
},
|
||||
create: {
|
||||
id: uuidv4(),
|
||||
image_id: image.id,
|
||||
current_tag: updateData.current_tag,
|
||||
available_tag: updateData.available_tag,
|
||||
severity: "digest_changed",
|
||||
changelog_url: digestInfo,
|
||||
updated_at: now,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove stale updates for images on this host that are no longer in the updates list
|
||||
const imageIdsToCleanup = hostImageIds.filter(
|
||||
(id) => !reportedImageIds.includes(id),
|
||||
);
|
||||
if (imageIdsToCleanup.length > 0) {
|
||||
await prisma.docker_image_updates.deleteMany({
|
||||
where: {
|
||||
image_id: { in: imageIdsToCleanup },
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true, message: "Docker data collected successfully" });
|
||||
} catch (error) {
|
||||
console.error("Error collecting Docker data:", error);
|
||||
console.error("Error stack:", error.stack);
|
||||
console.error("Request body:", JSON.stringify(req.body, null, 2));
|
||||
res.status(500).json({
|
||||
error: "Failed to collect Docker data",
|
||||
message: error.message,
|
||||
details: process.env.NODE_ENV === "development" ? error.stack : undefined,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// GET /api/v1/docker/agent - Serve the Docker agent installation script
|
||||
router.get("/agent", async (_req, res) => {
|
||||
try {
|
||||
const fs = require("node:fs");
|
||||
const path = require("node:path");
|
||||
const agentPath = path.join(
|
||||
__dirname,
|
||||
"../../..",
|
||||
"agents",
|
||||
"patchmon-docker-agent.sh",
|
||||
);
|
||||
|
||||
// Check if file exists
|
||||
if (!fs.existsSync(agentPath)) {
|
||||
return res.status(404).json({ error: "Docker agent script not found" });
|
||||
}
|
||||
|
||||
// Read and serve the file
|
||||
const agentScript = fs.readFileSync(agentPath, "utf8");
|
||||
res.setHeader("Content-Type", "text/x-shellscript");
|
||||
res.setHeader(
|
||||
"Content-Disposition",
|
||||
'inline; filename="patchmon-docker-agent.sh"',
|
||||
);
|
||||
res.send(agentScript);
|
||||
} catch (error) {
|
||||
console.error("Error serving Docker agent:", error);
|
||||
res.status(500).json({ error: "Failed to serve Docker agent script" });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
236
backend/src/routes/gethomepageRoutes.js
Normal file
236
backend/src/routes/gethomepageRoutes.js
Normal file
@@ -0,0 +1,236 @@
|
||||
const express = require("express");
|
||||
const { createPrismaClient } = require("../config/database");
|
||||
const bcrypt = require("bcryptjs");
|
||||
|
||||
const router = express.Router();
|
||||
const prisma = createPrismaClient();
|
||||
|
||||
// Middleware to authenticate API key
|
||||
const authenticateApiKey = async (req, res, next) => {
|
||||
try {
|
||||
const authHeader = req.headers.authorization;
|
||||
|
||||
if (!authHeader || !authHeader.startsWith("Basic ")) {
|
||||
return res
|
||||
.status(401)
|
||||
.json({ error: "Missing or invalid authorization header" });
|
||||
}
|
||||
|
||||
// Decode base64 credentials
|
||||
const base64Credentials = authHeader.split(" ")[1];
|
||||
const credentials = Buffer.from(base64Credentials, "base64").toString(
|
||||
"ascii",
|
||||
);
|
||||
const [apiKey, apiSecret] = credentials.split(":");
|
||||
|
||||
if (!apiKey || !apiSecret) {
|
||||
return res.status(401).json({ error: "Invalid credentials format" });
|
||||
}
|
||||
|
||||
// Find the token in database
|
||||
const token = await prisma.auto_enrollment_tokens.findUnique({
|
||||
where: { token_key: apiKey },
|
||||
include: {
|
||||
users: {
|
||||
select: {
|
||||
id: true,
|
||||
username: true,
|
||||
role: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!token) {
|
||||
console.log(`API key not found: ${apiKey}`);
|
||||
return res.status(401).json({ error: "Invalid API key" });
|
||||
}
|
||||
|
||||
// Check if token is active
|
||||
if (!token.is_active) {
|
||||
return res.status(401).json({ error: "API key is disabled" });
|
||||
}
|
||||
|
||||
// Check if token has expired
|
||||
if (token.expires_at && new Date(token.expires_at) < new Date()) {
|
||||
return res.status(401).json({ error: "API key has expired" });
|
||||
}
|
||||
|
||||
// Check if token is for gethomepage integration
|
||||
if (token.metadata?.integration_type !== "gethomepage") {
|
||||
return res.status(401).json({ error: "Invalid API key type" });
|
||||
}
|
||||
|
||||
// Verify the secret
|
||||
const isValidSecret = await bcrypt.compare(apiSecret, token.token_secret);
|
||||
if (!isValidSecret) {
|
||||
return res.status(401).json({ error: "Invalid API secret" });
|
||||
}
|
||||
|
||||
// Check IP restrictions if any
|
||||
if (token.allowed_ip_ranges && token.allowed_ip_ranges.length > 0) {
|
||||
const clientIp = req.ip || req.connection.remoteAddress;
|
||||
const forwardedFor = req.headers["x-forwarded-for"];
|
||||
const realIp = req.headers["x-real-ip"];
|
||||
|
||||
// Get the actual client IP (considering proxies)
|
||||
const actualClientIp = forwardedFor
|
||||
? forwardedFor.split(",")[0].trim()
|
||||
: realIp || clientIp;
|
||||
|
||||
const isAllowedIp = token.allowed_ip_ranges.some((range) => {
|
||||
// Simple IP range check (can be enhanced for CIDR support)
|
||||
return actualClientIp.startsWith(range) || actualClientIp === range;
|
||||
});
|
||||
|
||||
if (!isAllowedIp) {
|
||||
console.log(
|
||||
`IP validation failed. Client IP: ${actualClientIp}, Allowed ranges: ${token.allowed_ip_ranges.join(", ")}`,
|
||||
);
|
||||
return res.status(403).json({ error: "IP address not allowed" });
|
||||
}
|
||||
}
|
||||
|
||||
// Update last used timestamp
|
||||
await prisma.auto_enrollment_tokens.update({
|
||||
where: { id: token.id },
|
||||
data: { last_used_at: new Date() },
|
||||
});
|
||||
|
||||
// Attach token info to request
|
||||
req.apiToken = token;
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error("API key authentication error:", error);
|
||||
res.status(500).json({ error: "Authentication failed" });
|
||||
}
|
||||
};
|
||||
|
||||
// Get homepage widget statistics
|
||||
router.get("/stats", authenticateApiKey, async (_req, res) => {
|
||||
try {
|
||||
// Get total hosts count
|
||||
const totalHosts = await prisma.hosts.count({
|
||||
where: { status: "active" },
|
||||
});
|
||||
|
||||
// Get total outdated packages count
|
||||
const totalOutdatedPackages = await prisma.host_packages.count({
|
||||
where: { needs_update: true },
|
||||
});
|
||||
|
||||
// Get total repositories count
|
||||
const totalRepos = await prisma.repositories.count({
|
||||
where: { is_active: true },
|
||||
});
|
||||
|
||||
// Get hosts that need updates (have outdated packages)
|
||||
const hostsNeedingUpdates = await prisma.hosts.count({
|
||||
where: {
|
||||
status: "active",
|
||||
host_packages: {
|
||||
some: {
|
||||
needs_update: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Get security updates count
|
||||
const securityUpdates = await prisma.host_packages.count({
|
||||
where: {
|
||||
needs_update: true,
|
||||
is_security_update: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Get hosts with security updates
|
||||
const hostsWithSecurityUpdates = await prisma.hosts.count({
|
||||
where: {
|
||||
status: "active",
|
||||
host_packages: {
|
||||
some: {
|
||||
needs_update: true,
|
||||
is_security_update: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Get up-to-date hosts count
|
||||
const upToDateHosts = totalHosts - hostsNeedingUpdates;
|
||||
|
||||
// Get recent update activity (last 24 hours)
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
const recentUpdates = await prisma.update_history.count({
|
||||
where: {
|
||||
timestamp: {
|
||||
gte: oneDayAgo,
|
||||
},
|
||||
status: "success",
|
||||
},
|
||||
});
|
||||
|
||||
// Get OS distribution
|
||||
const osDistribution = await prisma.hosts.groupBy({
|
||||
by: ["os_type"],
|
||||
where: { status: "active" },
|
||||
_count: {
|
||||
id: true,
|
||||
},
|
||||
orderBy: {
|
||||
_count: {
|
||||
id: "desc",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Format OS distribution data
|
||||
const osDistributionFormatted = osDistribution.map((os) => ({
|
||||
name: os.os_type,
|
||||
count: os._count.id,
|
||||
}));
|
||||
|
||||
// Extract top 3 OS types for flat display in widgets
|
||||
const top_os_1 = osDistributionFormatted[0] || { name: "None", count: 0 };
|
||||
const top_os_2 = osDistributionFormatted[1] || { name: "None", count: 0 };
|
||||
const top_os_3 = osDistributionFormatted[2] || { name: "None", count: 0 };
|
||||
|
||||
// Prepare response data
|
||||
const stats = {
|
||||
total_hosts: totalHosts,
|
||||
total_outdated_packages: totalOutdatedPackages,
|
||||
total_repos: totalRepos,
|
||||
hosts_needing_updates: hostsNeedingUpdates,
|
||||
up_to_date_hosts: upToDateHosts,
|
||||
security_updates: securityUpdates,
|
||||
hosts_with_security_updates: hostsWithSecurityUpdates,
|
||||
recent_updates_24h: recentUpdates,
|
||||
os_distribution: osDistributionFormatted,
|
||||
// Flattened OS data for easy widget display
|
||||
top_os_1_name: top_os_1.name,
|
||||
top_os_1_count: top_os_1.count,
|
||||
top_os_2_name: top_os_2.name,
|
||||
top_os_2_count: top_os_2.count,
|
||||
top_os_3_name: top_os_3.name,
|
||||
top_os_3_count: top_os_3.count,
|
||||
last_updated: new Date().toISOString(),
|
||||
};
|
||||
|
||||
res.json(stats);
|
||||
} catch (error) {
|
||||
console.error("Error fetching homepage stats:", error);
|
||||
res.status(500).json({ error: "Failed to fetch statistics" });
|
||||
}
|
||||
});
|
||||
|
||||
// Health check endpoint for the API
|
||||
router.get("/health", authenticateApiKey, async (req, res) => {
|
||||
res.json({
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
api_key: req.apiToken.token_name,
|
||||
});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,43 +1,45 @@
|
||||
const express = require('express');
|
||||
const { body, validationResult } = require('express-validator');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { randomUUID } = require('crypto');
|
||||
const { authenticateToken } = require('../middleware/auth');
|
||||
const { requireManageHosts } = require('../middleware/permissions');
|
||||
const express = require("express");
|
||||
const { body, validationResult } = require("express-validator");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const { randomUUID } = require("node:crypto");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const { requireManageHosts } = require("../middleware/permissions");
|
||||
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Get all host groups
|
||||
router.get('/', authenticateToken, async (req, res) => {
|
||||
router.get("/", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
const hostGroups = await prisma.host_groups.findMany({
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
hosts: true
|
||||
}
|
||||
}
|
||||
host_group_memberships: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
name: 'asc'
|
||||
}
|
||||
name: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
res.json(hostGroups);
|
||||
} catch (error) {
|
||||
console.error('Error fetching host groups:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch host groups' });
|
||||
console.error("Error fetching host groups:", error);
|
||||
res.status(500).json({ error: "Failed to fetch host groups" });
|
||||
}
|
||||
});
|
||||
|
||||
// Get a specific host group by ID
|
||||
router.get('/:id', authenticateToken, async (req, res) => {
|
||||
router.get("/:id", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
const hostGroup = await prisma.host_groups.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
host_group_memberships: {
|
||||
include: {
|
||||
hosts: {
|
||||
select: {
|
||||
@@ -48,29 +50,39 @@ router.get('/:id', authenticateToken, async (req, res) => {
|
||||
os_type: true,
|
||||
os_version: true,
|
||||
status: true,
|
||||
last_update: true
|
||||
}
|
||||
}
|
||||
}
|
||||
last_update: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!hostGroup) {
|
||||
return res.status(404).json({ error: 'Host group not found' });
|
||||
return res.status(404).json({ error: "Host group not found" });
|
||||
}
|
||||
|
||||
res.json(hostGroup);
|
||||
} catch (error) {
|
||||
console.error('Error fetching host group:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch host group' });
|
||||
console.error("Error fetching host group:", error);
|
||||
res.status(500).json({ error: "Failed to fetch host group" });
|
||||
}
|
||||
});
|
||||
|
||||
// Create a new host group
|
||||
router.post('/', authenticateToken, requireManageHosts, [
|
||||
body('name').trim().isLength({ min: 1 }).withMessage('Name is required'),
|
||||
body('description').optional().trim(),
|
||||
body('color').optional().isHexColor().withMessage('Color must be a valid hex color')
|
||||
], async (req, res) => {
|
||||
router.post(
|
||||
"/",
|
||||
authenticateToken,
|
||||
requireManageHosts,
|
||||
[
|
||||
body("name").trim().isLength({ min: 1 }).withMessage("Name is required"),
|
||||
body("description").optional().trim(),
|
||||
body("color")
|
||||
.optional()
|
||||
.isHexColor()
|
||||
.withMessage("Color must be a valid hex color"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
@@ -81,11 +93,13 @@ router.post('/', authenticateToken, requireManageHosts, [
|
||||
|
||||
// Check if host group with this name already exists
|
||||
const existingGroup = await prisma.host_groups.findUnique({
|
||||
where: { name }
|
||||
where: { name },
|
||||
});
|
||||
|
||||
if (existingGroup) {
|
||||
return res.status(400).json({ error: 'A host group with this name already exists' });
|
||||
return res
|
||||
.status(400)
|
||||
.json({ error: "A host group with this name already exists" });
|
||||
}
|
||||
|
||||
const hostGroup = await prisma.host_groups.create({
|
||||
@@ -93,24 +107,33 @@ router.post('/', authenticateToken, requireManageHosts, [
|
||||
id: randomUUID(),
|
||||
name,
|
||||
description: description || null,
|
||||
color: color || '#3B82F6',
|
||||
updated_at: new Date()
|
||||
}
|
||||
color: color || "#3B82F6",
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
res.status(201).json(hostGroup);
|
||||
} catch (error) {
|
||||
console.error('Error creating host group:', error);
|
||||
res.status(500).json({ error: 'Failed to create host group' });
|
||||
console.error("Error creating host group:", error);
|
||||
res.status(500).json({ error: "Failed to create host group" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Update a host group
|
||||
router.put('/:id', authenticateToken, requireManageHosts, [
|
||||
body('name').trim().isLength({ min: 1 }).withMessage('Name is required'),
|
||||
body('description').optional().trim(),
|
||||
body('color').optional().isHexColor().withMessage('Color must be a valid hex color')
|
||||
], async (req, res) => {
|
||||
router.put(
|
||||
"/:id",
|
||||
authenticateToken,
|
||||
requireManageHosts,
|
||||
[
|
||||
body("name").trim().isLength({ min: 1 }).withMessage("Name is required"),
|
||||
body("description").optional().trim(),
|
||||
body("color")
|
||||
.optional()
|
||||
.isHexColor()
|
||||
.withMessage("Color must be a valid hex color"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
@@ -122,23 +145,25 @@ router.put('/:id', authenticateToken, requireManageHosts, [
|
||||
|
||||
// Check if host group exists
|
||||
const existingGroup = await prisma.host_groups.findUnique({
|
||||
where: { id }
|
||||
where: { id },
|
||||
});
|
||||
|
||||
if (!existingGroup) {
|
||||
return res.status(404).json({ error: 'Host group not found' });
|
||||
return res.status(404).json({ error: "Host group not found" });
|
||||
}
|
||||
|
||||
// Check if another host group with this name already exists
|
||||
const duplicateGroup = await prisma.host_groups.findFirst({
|
||||
where: {
|
||||
name,
|
||||
id: { not: id }
|
||||
}
|
||||
id: { not: id },
|
||||
},
|
||||
});
|
||||
|
||||
if (duplicateGroup) {
|
||||
return res.status(400).json({ error: 'A host group with this name already exists' });
|
||||
return res
|
||||
.status(400)
|
||||
.json({ error: "A host group with this name already exists" });
|
||||
}
|
||||
|
||||
const hostGroup = await prisma.host_groups.update({
|
||||
@@ -146,20 +171,25 @@ router.put('/:id', authenticateToken, requireManageHosts, [
|
||||
data: {
|
||||
name,
|
||||
description: description || null,
|
||||
color: color || '#3B82F6',
|
||||
updated_at: new Date()
|
||||
}
|
||||
color: color || "#3B82F6",
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
res.json(hostGroup);
|
||||
} catch (error) {
|
||||
console.error('Error updating host group:', error);
|
||||
res.status(500).json({ error: 'Failed to update host group' });
|
||||
console.error("Error updating host group:", error);
|
||||
res.status(500).json({ error: "Failed to update host group" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Delete a host group
|
||||
router.delete('/:id', authenticateToken, requireManageHosts, async (req, res) => {
|
||||
router.delete(
|
||||
"/:id",
|
||||
authenticateToken,
|
||||
requireManageHosts,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
@@ -169,41 +199,48 @@ router.delete('/:id', authenticateToken, requireManageHosts, async (req, res) =>
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
hosts: true
|
||||
}
|
||||
}
|
||||
}
|
||||
host_group_memberships: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!existingGroup) {
|
||||
return res.status(404).json({ error: 'Host group not found' });
|
||||
return res.status(404).json({ error: "Host group not found" });
|
||||
}
|
||||
|
||||
// Check if host group has hosts
|
||||
if (existingGroup._count.hosts > 0) {
|
||||
return res.status(400).json({
|
||||
error: 'Cannot delete host group that contains hosts. Please move or remove hosts first.'
|
||||
// If host group has memberships, remove them first
|
||||
if (existingGroup._count.host_group_memberships > 0) {
|
||||
await prisma.host_group_memberships.deleteMany({
|
||||
where: { host_group_id: id },
|
||||
});
|
||||
}
|
||||
|
||||
await prisma.host_groups.delete({
|
||||
where: { id }
|
||||
where: { id },
|
||||
});
|
||||
|
||||
res.json({ message: 'Host group deleted successfully' });
|
||||
res.json({ message: "Host group deleted successfully" });
|
||||
} catch (error) {
|
||||
console.error('Error deleting host group:', error);
|
||||
res.status(500).json({ error: 'Failed to delete host group' });
|
||||
console.error("Error deleting host group:", error);
|
||||
res.status(500).json({ error: "Failed to delete host group" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Get hosts in a specific group
|
||||
router.get('/:id/hosts', authenticateToken, async (req, res) => {
|
||||
router.get("/:id/hosts", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
|
||||
const hosts = await prisma.hosts.findMany({
|
||||
where: { host_group_id: id },
|
||||
where: {
|
||||
host_group_memberships: {
|
||||
some: {
|
||||
host_group_id: id,
|
||||
},
|
||||
},
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
@@ -213,17 +250,17 @@ router.get('/:id/hosts', authenticateToken, async (req, res) => {
|
||||
architecture: true,
|
||||
status: true,
|
||||
last_update: true,
|
||||
created_at: true
|
||||
created_at: true,
|
||||
},
|
||||
orderBy: {
|
||||
friendly_name: 'asc'
|
||||
}
|
||||
friendly_name: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
res.json(hosts);
|
||||
} catch (error) {
|
||||
console.error('Error fetching hosts in group:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch hosts in group' });
|
||||
console.error("Error fetching hosts in group:", error);
|
||||
res.status(500).json({ error: "Failed to fetch hosts in group" });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,53 +1,78 @@
|
||||
const express = require('express');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { body, validationResult } = require('express-validator');
|
||||
const express = require("express");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Get all packages with their update status
|
||||
router.get('/', async (req, res) => {
|
||||
router.get("/", async (req, res) => {
|
||||
try {
|
||||
const {
|
||||
page = 1,
|
||||
limit = 50,
|
||||
search = '',
|
||||
category = '',
|
||||
needsUpdate = '',
|
||||
isSecurityUpdate = ''
|
||||
search = "",
|
||||
category = "",
|
||||
needsUpdate = "",
|
||||
isSecurityUpdate = "",
|
||||
host = "",
|
||||
} = req.query;
|
||||
|
||||
const skip = (parseInt(page) - 1) * parseInt(limit);
|
||||
const take = parseInt(limit);
|
||||
const skip = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||
const take = parseInt(limit, 10);
|
||||
|
||||
// Build where clause
|
||||
const where = {
|
||||
AND: [
|
||||
// Search filter
|
||||
search ? {
|
||||
search
|
||||
? {
|
||||
OR: [
|
||||
{ name: { contains: search, mode: 'insensitive' } },
|
||||
{ description: { contains: search, mode: 'insensitive' } }
|
||||
]
|
||||
} : {},
|
||||
{ name: { contains: search, mode: "insensitive" } },
|
||||
{ description: { contains: search, mode: "insensitive" } },
|
||||
],
|
||||
}
|
||||
: {},
|
||||
// Category filter
|
||||
category ? { category: { equals: category } } : {},
|
||||
// Update status filters
|
||||
needsUpdate ? {
|
||||
// Host filter - only return packages installed on the specified host
|
||||
// Combined with update status filters if both are present
|
||||
host
|
||||
? {
|
||||
host_packages: {
|
||||
some: {
|
||||
needs_update: needsUpdate === 'true'
|
||||
host_id: host,
|
||||
// If needsUpdate or isSecurityUpdate filters are present, apply them here
|
||||
...(needsUpdate
|
||||
? { needs_update: needsUpdate === "true" }
|
||||
: {}),
|
||||
...(isSecurityUpdate
|
||||
? { is_security_update: isSecurityUpdate === "true" }
|
||||
: {}),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
} : {},
|
||||
isSecurityUpdate ? {
|
||||
: {},
|
||||
// Update status filters (only applied if no host filter)
|
||||
// If host filter is present, these are already applied above
|
||||
!host && needsUpdate
|
||||
? {
|
||||
host_packages: {
|
||||
some: {
|
||||
is_security_update: isSecurityUpdate === 'true'
|
||||
needs_update: needsUpdate === "true",
|
||||
},
|
||||
},
|
||||
}
|
||||
: {},
|
||||
!host && isSecurityUpdate
|
||||
? {
|
||||
host_packages: {
|
||||
some: {
|
||||
is_security_update: isSecurityUpdate === "true",
|
||||
},
|
||||
},
|
||||
}
|
||||
} : {}
|
||||
]
|
||||
: {},
|
||||
],
|
||||
};
|
||||
|
||||
// Get packages with counts
|
||||
@@ -62,39 +87,49 @@ router.get('/', async (req, res) => {
|
||||
latest_version: true,
|
||||
created_at: true,
|
||||
_count: {
|
||||
host_packages: true
|
||||
}
|
||||
select: {
|
||||
host_packages: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
skip,
|
||||
take,
|
||||
orderBy: {
|
||||
name: 'asc'
|
||||
}
|
||||
name: "asc",
|
||||
},
|
||||
}),
|
||||
prisma.packages.count({ where })
|
||||
prisma.packages.count({ where }),
|
||||
]);
|
||||
|
||||
// Get additional stats for each package
|
||||
const packagesWithStats = await Promise.all(
|
||||
packages.map(async (pkg) => {
|
||||
const [updatesCount, securityCount, affectedHosts] = await Promise.all([
|
||||
// Build base where clause for this package
|
||||
const baseWhere = { package_id: pkg.id };
|
||||
|
||||
// If host filter is specified, add host filter to all queries
|
||||
const hostWhere = host ? { ...baseWhere, host_id: host } : baseWhere;
|
||||
|
||||
const [updatesCount, securityCount, packageHosts] = await Promise.all([
|
||||
prisma.host_packages.count({
|
||||
where: {
|
||||
package_id: pkg.id,
|
||||
needs_update: true
|
||||
}
|
||||
...hostWhere,
|
||||
needs_update: true,
|
||||
},
|
||||
}),
|
||||
prisma.host_packages.count({
|
||||
where: {
|
||||
package_id: pkg.id,
|
||||
...hostWhere,
|
||||
needs_update: true,
|
||||
is_security_update: true
|
||||
}
|
||||
is_security_update: true,
|
||||
},
|
||||
}),
|
||||
prisma.host_packages.findMany({
|
||||
where: {
|
||||
package_id: pkg.id,
|
||||
needs_update: true
|
||||
...hostWhere,
|
||||
// If host filter is specified, include all packages for that host
|
||||
// Otherwise, only include packages that need updates
|
||||
...(host ? {} : { needs_update: true }),
|
||||
},
|
||||
select: {
|
||||
hosts: {
|
||||
@@ -102,51 +137,56 @@ router.get('/', async (req, res) => {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
hostname: true,
|
||||
os_type: true
|
||||
}
|
||||
}
|
||||
os_type: true,
|
||||
},
|
||||
take: 10 // Limit to first 10 for performance
|
||||
})
|
||||
},
|
||||
current_version: true,
|
||||
available_version: true,
|
||||
needs_update: true,
|
||||
is_security_update: true,
|
||||
},
|
||||
take: 10, // Limit to first 10 for performance
|
||||
}),
|
||||
]);
|
||||
|
||||
return {
|
||||
...pkg,
|
||||
affectedHostsCount: pkg._count.hostPackages,
|
||||
affectedHosts: affectedHosts.map(hp => ({
|
||||
hostId: hp.host.id,
|
||||
friendlyName: hp.host.friendly_name,
|
||||
osType: hp.host.os_type,
|
||||
packageHostsCount: pkg._count.host_packages,
|
||||
packageHosts: packageHosts.map((hp) => ({
|
||||
hostId: hp.hosts.id,
|
||||
friendlyName: hp.hosts.friendly_name,
|
||||
osType: hp.hosts.os_type,
|
||||
currentVersion: hp.current_version,
|
||||
availableVersion: hp.available_version,
|
||||
isSecurityUpdate: hp.is_security_update
|
||||
needsUpdate: hp.needs_update,
|
||||
isSecurityUpdate: hp.is_security_update,
|
||||
})),
|
||||
stats: {
|
||||
totalInstalls: pkg._count.hostPackages,
|
||||
totalInstalls: pkg._count.host_packages,
|
||||
updatesNeeded: updatesCount,
|
||||
securityUpdates: securityCount
|
||||
}
|
||||
securityUpdates: securityCount,
|
||||
},
|
||||
};
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
res.json({
|
||||
packages: packagesWithStats,
|
||||
pagination: {
|
||||
page: parseInt(page),
|
||||
limit: parseInt(limit),
|
||||
page: parseInt(page, 10),
|
||||
limit: parseInt(limit, 10),
|
||||
total: totalCount,
|
||||
pages: Math.ceil(totalCount / parseInt(limit))
|
||||
}
|
||||
pages: Math.ceil(totalCount / parseInt(limit, 10)),
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching packages:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch packages' });
|
||||
console.error("Error fetching packages:", error);
|
||||
res.status(500).json({ error: "Failed to fetch packages" });
|
||||
}
|
||||
});
|
||||
|
||||
// Get package details by ID
|
||||
router.get('/:packageId', async (req, res) => {
|
||||
router.get("/:packageId", async (req, res) => {
|
||||
try {
|
||||
const { packageId } = req.params;
|
||||
|
||||
@@ -155,46 +195,50 @@ router.get('/:packageId', async (req, res) => {
|
||||
include: {
|
||||
host_packages: {
|
||||
include: {
|
||||
host: {
|
||||
hosts: {
|
||||
select: {
|
||||
id: true,
|
||||
hostname: true,
|
||||
ip: true,
|
||||
osType: true,
|
||||
osVersion: true,
|
||||
lastUpdate: true
|
||||
}
|
||||
}
|
||||
os_type: true,
|
||||
os_version: true,
|
||||
last_update: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
needsUpdate: 'desc'
|
||||
}
|
||||
}
|
||||
}
|
||||
needs_update: "desc",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!packageData) {
|
||||
return res.status(404).json({ error: 'Package not found' });
|
||||
return res.status(404).json({ error: "Package not found" });
|
||||
}
|
||||
|
||||
// Calculate statistics
|
||||
const stats = {
|
||||
totalInstalls: packageData.host_packages.length,
|
||||
updatesNeeded: packageData.host_packages.filter(hp => hp.needsUpdate).length,
|
||||
securityUpdates: packageData.host_packages.filter(hp => hp.needsUpdate && hp.isSecurityUpdate).length,
|
||||
upToDate: packageData.host_packages.filter(hp => !hp.needsUpdate).length
|
||||
updatesNeeded: packageData.host_packages.filter((hp) => hp.needs_update)
|
||||
.length,
|
||||
securityUpdates: packageData.host_packages.filter(
|
||||
(hp) => hp.needs_update && hp.is_security_update,
|
||||
).length,
|
||||
upToDate: packageData.host_packages.filter((hp) => !hp.needs_update)
|
||||
.length,
|
||||
};
|
||||
|
||||
// Group by version
|
||||
const versionDistribution = packageData.host_packages.reduce((acc, hp) => {
|
||||
const version = hp.currentVersion;
|
||||
const version = hp.current_version;
|
||||
acc[version] = (acc[version] || 0) + 1;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
// Group by OS type
|
||||
const osDistribution = packageData.host_packages.reduce((acc, hp) => {
|
||||
const osType = hp.host.osType;
|
||||
const osType = hp.hosts.os_type;
|
||||
acc[osType] = (acc[osType] || 0) + 1;
|
||||
return acc;
|
||||
}, {});
|
||||
@@ -203,19 +247,126 @@ router.get('/:packageId', async (req, res) => {
|
||||
...packageData,
|
||||
stats,
|
||||
distributions: {
|
||||
versions: Object.entries(versionDistribution).map(([version, count]) => ({
|
||||
versions: Object.entries(versionDistribution).map(
|
||||
([version, count]) => ({
|
||||
version,
|
||||
count
|
||||
})),
|
||||
count,
|
||||
}),
|
||||
),
|
||||
osTypes: Object.entries(osDistribution).map(([osType, count]) => ({
|
||||
osType,
|
||||
count
|
||||
}))
|
||||
}
|
||||
count,
|
||||
})),
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching package details:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch package details' });
|
||||
console.error("Error fetching package details:", error);
|
||||
res.status(500).json({ error: "Failed to fetch package details" });
|
||||
}
|
||||
});
|
||||
|
||||
// Get hosts where a package is installed
|
||||
router.get("/:packageId/hosts", async (req, res) => {
|
||||
try {
|
||||
const { packageId } = req.params;
|
||||
const {
|
||||
page = 1,
|
||||
limit = 25,
|
||||
search = "",
|
||||
sortBy = "friendly_name",
|
||||
sortOrder = "asc",
|
||||
} = req.query;
|
||||
|
||||
const offset = (parseInt(page, 10) - 1) * parseInt(limit, 10);
|
||||
|
||||
// Build search conditions
|
||||
const searchConditions = search
|
||||
? {
|
||||
OR: [
|
||||
{
|
||||
hosts: {
|
||||
friendly_name: { contains: search, mode: "insensitive" },
|
||||
},
|
||||
},
|
||||
{ hosts: { hostname: { contains: search, mode: "insensitive" } } },
|
||||
{ current_version: { contains: search, mode: "insensitive" } },
|
||||
{ available_version: { contains: search, mode: "insensitive" } },
|
||||
],
|
||||
}
|
||||
: {};
|
||||
|
||||
// Build sort conditions
|
||||
const orderBy = {};
|
||||
if (
|
||||
sortBy === "friendly_name" ||
|
||||
sortBy === "hostname" ||
|
||||
sortBy === "os_type"
|
||||
) {
|
||||
orderBy.hosts = { [sortBy]: sortOrder };
|
||||
} else if (sortBy === "needs_update") {
|
||||
orderBy[sortBy] = sortOrder;
|
||||
} else {
|
||||
orderBy[sortBy] = sortOrder;
|
||||
}
|
||||
|
||||
// Get total count
|
||||
const totalCount = await prisma.host_packages.count({
|
||||
where: {
|
||||
package_id: packageId,
|
||||
...searchConditions,
|
||||
},
|
||||
});
|
||||
|
||||
// Get paginated results
|
||||
const hostPackages = await prisma.host_packages.findMany({
|
||||
where: {
|
||||
package_id: packageId,
|
||||
...searchConditions,
|
||||
},
|
||||
include: {
|
||||
hosts: {
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
hostname: true,
|
||||
os_type: true,
|
||||
os_version: true,
|
||||
last_update: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy,
|
||||
skip: offset,
|
||||
take: parseInt(limit, 10),
|
||||
});
|
||||
|
||||
// Transform the data for the frontend
|
||||
const hosts = hostPackages.map((hp) => ({
|
||||
hostId: hp.hosts.id,
|
||||
friendlyName: hp.hosts.friendly_name,
|
||||
hostname: hp.hosts.hostname,
|
||||
osType: hp.hosts.os_type,
|
||||
osVersion: hp.hosts.os_version,
|
||||
lastUpdate: hp.hosts.last_update,
|
||||
currentVersion: hp.current_version,
|
||||
availableVersion: hp.available_version,
|
||||
needsUpdate: hp.needs_update,
|
||||
isSecurityUpdate: hp.is_security_update,
|
||||
lastChecked: hp.last_checked,
|
||||
}));
|
||||
|
||||
res.json({
|
||||
hosts,
|
||||
pagination: {
|
||||
page: parseInt(page, 10),
|
||||
limit: parseInt(limit, 10),
|
||||
total: totalCount,
|
||||
pages: Math.ceil(totalCount / parseInt(limit, 10)),
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching package hosts:", error);
|
||||
res.status(500).json({ error: "Failed to fetch package hosts" });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,49 +1,66 @@
|
||||
const express = require('express');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { authenticateToken, requireAdmin } = require('../middleware/auth');
|
||||
const { requireManageSettings, requireManageUsers } = require('../middleware/permissions');
|
||||
const express = require("express");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const {
|
||||
requireManageSettings,
|
||||
requireManageUsers,
|
||||
} = require("../middleware/permissions");
|
||||
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Get all role permissions (allow users who can manage users to view roles)
|
||||
router.get('/roles', authenticateToken, requireManageUsers, async (req, res) => {
|
||||
router.get(
|
||||
"/roles",
|
||||
authenticateToken,
|
||||
requireManageUsers,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
const permissions = await prisma.role_permissions.findMany({
|
||||
orderBy: {
|
||||
role: 'asc'
|
||||
}
|
||||
role: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
res.json(permissions);
|
||||
} catch (error) {
|
||||
console.error('Get role permissions error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch role permissions' });
|
||||
console.error("Get role permissions error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch role permissions" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Get permissions for a specific role
|
||||
router.get('/roles/:role', authenticateToken, requireManageSettings, async (req, res) => {
|
||||
router.get(
|
||||
"/roles/:role",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { role } = req.params;
|
||||
|
||||
const permissions = await prisma.role_permissions.findUnique({
|
||||
where: { role }
|
||||
where: { role },
|
||||
});
|
||||
|
||||
if (!permissions) {
|
||||
return res.status(404).json({ error: 'Role not found' });
|
||||
return res.status(404).json({ error: "Role not found" });
|
||||
}
|
||||
|
||||
res.json(permissions);
|
||||
} catch (error) {
|
||||
console.error('Get role permission error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch role permission' });
|
||||
console.error("Get role permission error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch role permission" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Create or update role permissions
|
||||
router.put('/roles/:role', authenticateToken, requireManageSettings, async (req, res) => {
|
||||
router.put(
|
||||
"/roles/:role",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { role } = req.params;
|
||||
const {
|
||||
@@ -56,12 +73,14 @@ router.put('/roles/:role', authenticateToken, requireManageSettings, async (req,
|
||||
can_manage_users,
|
||||
can_view_reports,
|
||||
can_export_data,
|
||||
can_manage_settings
|
||||
can_manage_settings,
|
||||
} = req.body;
|
||||
|
||||
// Prevent modifying admin and user role permissions (built-in roles)
|
||||
if (role === 'admin' || role === 'user') {
|
||||
return res.status(400).json({ error: `Cannot modify ${role} role permissions - this is a built-in role` });
|
||||
if (role === "admin" || role === "user") {
|
||||
return res.status(400).json({
|
||||
error: `Cannot modify ${role} role permissions - this is a built-in role`,
|
||||
});
|
||||
}
|
||||
|
||||
const permissions = await prisma.role_permissions.upsert({
|
||||
@@ -77,10 +96,10 @@ router.put('/roles/:role', authenticateToken, requireManageSettings, async (req,
|
||||
can_view_reports: can_view_reports,
|
||||
can_export_data: can_export_data,
|
||||
can_manage_settings: can_manage_settings,
|
||||
updated_at: new Date()
|
||||
updated_at: new Date(),
|
||||
},
|
||||
create: {
|
||||
id: require('uuid').v4(),
|
||||
id: require("uuid").v4(),
|
||||
role,
|
||||
can_view_dashboard: can_view_dashboard,
|
||||
can_view_hosts: can_view_hosts,
|
||||
@@ -92,61 +111,69 @@ router.put('/roles/:role', authenticateToken, requireManageSettings, async (req,
|
||||
can_view_reports: can_view_reports,
|
||||
can_export_data: can_export_data,
|
||||
can_manage_settings: can_manage_settings,
|
||||
updated_at: new Date()
|
||||
}
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: 'Role permissions updated successfully',
|
||||
permissions
|
||||
message: "Role permissions updated successfully",
|
||||
permissions,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Update role permissions error:', error);
|
||||
res.status(500).json({ error: 'Failed to update role permissions' });
|
||||
console.error("Update role permissions error:", error);
|
||||
res.status(500).json({ error: "Failed to update role permissions" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Delete a role (and its permissions)
|
||||
router.delete('/roles/:role', authenticateToken, requireManageSettings, async (req, res) => {
|
||||
router.delete(
|
||||
"/roles/:role",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { role } = req.params;
|
||||
|
||||
// Prevent deleting admin and user roles (built-in roles)
|
||||
if (role === 'admin' || role === 'user') {
|
||||
return res.status(400).json({ error: `Cannot delete ${role} role - this is a built-in role` });
|
||||
if (role === "admin" || role === "user") {
|
||||
return res.status(400).json({
|
||||
error: `Cannot delete ${role} role - this is a built-in role`,
|
||||
});
|
||||
}
|
||||
|
||||
// Check if any users are using this role
|
||||
const usersWithRole = await prisma.users.count({
|
||||
where: { role }
|
||||
where: { role },
|
||||
});
|
||||
|
||||
if (usersWithRole > 0) {
|
||||
return res.status(400).json({
|
||||
error: `Cannot delete role "${role}" because ${usersWithRole} user(s) are currently using it`
|
||||
error: `Cannot delete role "${role}" because ${usersWithRole} user(s) are currently using it`,
|
||||
});
|
||||
}
|
||||
|
||||
await prisma.role_permissions.delete({
|
||||
where: { role }
|
||||
where: { role },
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: `Role "${role}" deleted successfully`
|
||||
message: `Role "${role}" deleted successfully`,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Delete role error:', error);
|
||||
res.status(500).json({ error: 'Failed to delete role' });
|
||||
console.error("Delete role error:", error);
|
||||
res.status(500).json({ error: "Failed to delete role" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Get user's permissions based on their role
|
||||
router.get('/user-permissions', authenticateToken, async (req, res) => {
|
||||
router.get("/user-permissions", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const userRole = req.user.role;
|
||||
|
||||
const permissions = await prisma.role_permissions.findUnique({
|
||||
where: { role: userRole }
|
||||
where: { role: userRole },
|
||||
});
|
||||
|
||||
if (!permissions) {
|
||||
@@ -168,8 +195,8 @@ router.get('/user-permissions', authenticateToken, async (req, res) => {
|
||||
|
||||
res.json(permissions);
|
||||
} catch (error) {
|
||||
console.error('Get user permissions error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch user permissions' });
|
||||
console.error("Get user permissions error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch user permissions" });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
const express = require('express');
|
||||
const { body, validationResult } = require('express-validator');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { authenticateToken } = require('../middleware/auth');
|
||||
const { requireViewHosts, requireManageHosts } = require('../middleware/permissions');
|
||||
const express = require("express");
|
||||
const { body, validationResult } = require("express-validator");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const {
|
||||
requireViewHosts,
|
||||
requireManageHosts,
|
||||
} = require("../middleware/permissions");
|
||||
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Get all repositories with host count
|
||||
router.get('/', authenticateToken, requireViewHosts, async (req, res) => {
|
||||
router.get("/", authenticateToken, requireViewHosts, async (_req, res) => {
|
||||
try {
|
||||
const repositories = await prisma.repositories.findMany({
|
||||
include: {
|
||||
@@ -18,47 +21,51 @@ router.get('/', authenticateToken, requireViewHosts, async (req, res) => {
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
status: true
|
||||
}
|
||||
}
|
||||
}
|
||||
status: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
_count: {
|
||||
select: {
|
||||
host_repositories: true
|
||||
}
|
||||
}
|
||||
host_repositories: true,
|
||||
},
|
||||
orderBy: [
|
||||
{ name: 'asc' },
|
||||
{ url: 'asc' }
|
||||
]
|
||||
},
|
||||
},
|
||||
orderBy: [{ name: "asc" }, { url: "asc" }],
|
||||
});
|
||||
|
||||
// Transform data to include host counts and status
|
||||
const transformedRepos = repositories.map(repo => ({
|
||||
const transformedRepos = repositories.map((repo) => ({
|
||||
...repo,
|
||||
hostCount: repo._count.host_repositories,
|
||||
enabledHostCount: repo.host_repositories.filter(hr => hr.is_enabled).length,
|
||||
activeHostCount: repo.host_repositories.filter(hr => hr.hosts.status === 'active').length,
|
||||
hosts: repo.host_repositories.map(hr => ({
|
||||
enabledHostCount: repo.host_repositories.filter((hr) => hr.is_enabled)
|
||||
.length,
|
||||
activeHostCount: repo.host_repositories.filter(
|
||||
(hr) => hr.hosts.status === "active",
|
||||
).length,
|
||||
hosts: repo.host_repositories.map((hr) => ({
|
||||
id: hr.hosts.id,
|
||||
friendlyName: hr.hosts.friendly_name,
|
||||
status: hr.hosts.status,
|
||||
isEnabled: hr.is_enabled,
|
||||
lastChecked: hr.last_checked
|
||||
}))
|
||||
lastChecked: hr.last_checked,
|
||||
})),
|
||||
}));
|
||||
|
||||
res.json(transformedRepos);
|
||||
} catch (error) {
|
||||
console.error('Repository list error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch repositories' });
|
||||
console.error("Repository list error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch repositories" });
|
||||
}
|
||||
});
|
||||
|
||||
// Get repositories for a specific host
|
||||
router.get('/host/:hostId', authenticateToken, requireViewHosts, async (req, res) => {
|
||||
router.get(
|
||||
"/host/:hostId",
|
||||
authenticateToken,
|
||||
requireViewHosts,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { hostId } = req.params;
|
||||
|
||||
@@ -69,26 +76,31 @@ router.get('/host/:hostId', authenticateToken, requireViewHosts, async (req, res
|
||||
hosts: {
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true
|
||||
}
|
||||
}
|
||||
friendly_name: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
repositories: {
|
||||
name: 'asc'
|
||||
}
|
||||
}
|
||||
name: "asc",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
res.json(hostRepositories);
|
||||
} catch (error) {
|
||||
console.error('Host repositories error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch host repositories' });
|
||||
console.error("Host repositories error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch host repositories" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Get repository details with all hosts
|
||||
router.get('/:repositoryId', authenticateToken, requireViewHosts, async (req, res) => {
|
||||
router.get(
|
||||
"/:repositoryId",
|
||||
authenticateToken,
|
||||
requireViewHosts,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { repositoryId } = req.params;
|
||||
|
||||
@@ -106,37 +118,52 @@ router.get('/:repositoryId', authenticateToken, requireViewHosts, async (req, re
|
||||
os_type: true,
|
||||
os_version: true,
|
||||
status: true,
|
||||
last_update: true
|
||||
}
|
||||
}
|
||||
last_update: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
hosts: {
|
||||
friendly_name: 'asc'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
friendly_name: "asc",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!repository) {
|
||||
return res.status(404).json({ error: 'Repository not found' });
|
||||
return res.status(404).json({ error: "Repository not found" });
|
||||
}
|
||||
|
||||
res.json(repository);
|
||||
} catch (error) {
|
||||
console.error('Repository detail error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch repository details' });
|
||||
console.error("Repository detail error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch repository details" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Update repository information (admin only)
|
||||
router.put('/:repositoryId', authenticateToken, requireManageHosts, [
|
||||
body('name').optional().isLength({ min: 1 }).withMessage('Name is required'),
|
||||
body('description').optional(),
|
||||
body('isActive').optional().isBoolean().withMessage('isActive must be a boolean'),
|
||||
body('priority').optional().isInt({ min: 0 }).withMessage('Priority must be a positive integer')
|
||||
], async (req, res) => {
|
||||
router.put(
|
||||
"/:repositoryId",
|
||||
authenticateToken,
|
||||
requireManageHosts,
|
||||
[
|
||||
body("name")
|
||||
.optional()
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("Name is required"),
|
||||
body("description").optional(),
|
||||
body("isActive")
|
||||
.optional()
|
||||
.isBoolean()
|
||||
.withMessage("isActive must be a boolean"),
|
||||
body("priority")
|
||||
.optional()
|
||||
.isInt({ min: 0 })
|
||||
.withMessage("Priority must be a positive integer"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
@@ -152,28 +179,32 @@ router.put('/:repositoryId', authenticateToken, requireManageHosts, [
|
||||
...(name && { name }),
|
||||
...(description !== undefined && { description }),
|
||||
...(isActive !== undefined && { is_active: isActive }),
|
||||
...(priority !== undefined && { priority })
|
||||
...(priority !== undefined && { priority }),
|
||||
},
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
host_repositories: true
|
||||
}
|
||||
}
|
||||
}
|
||||
host_repositories: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
res.json(repository);
|
||||
} catch (error) {
|
||||
console.error('Repository update error:', error);
|
||||
res.status(500).json({ error: 'Failed to update repository' });
|
||||
console.error("Repository update error:", error);
|
||||
res.status(500).json({ error: "Failed to update repository" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Toggle repository status for a specific host
|
||||
router.patch('/host/:hostId/repository/:repositoryId', authenticateToken, requireManageHosts, [
|
||||
body('isEnabled').isBoolean().withMessage('isEnabled must be a boolean')
|
||||
], async (req, res) => {
|
||||
router.patch(
|
||||
"/host/:hostId/repository/:repositoryId",
|
||||
authenticateToken,
|
||||
requireManageHosts,
|
||||
[body("isEnabled").isBoolean().withMessage("isEnabled must be a boolean")],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
@@ -187,55 +218,60 @@ router.patch('/host/:hostId/repository/:repositoryId', authenticateToken, requir
|
||||
where: {
|
||||
host_id_repository_id: {
|
||||
host_id: hostId,
|
||||
repository_id: repositoryId
|
||||
}
|
||||
repository_id: repositoryId,
|
||||
},
|
||||
},
|
||||
data: {
|
||||
is_enabled: isEnabled,
|
||||
last_checked: new Date()
|
||||
last_checked: new Date(),
|
||||
},
|
||||
include: {
|
||||
repositories: true,
|
||||
hosts: {
|
||||
select: {
|
||||
friendly_name: true
|
||||
}
|
||||
}
|
||||
}
|
||||
friendly_name: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: `Repository ${isEnabled ? 'enabled' : 'disabled'} for host ${hostRepository.hosts.friendly_name}`,
|
||||
hostRepository
|
||||
message: `Repository ${isEnabled ? "enabled" : "disabled"} for host ${hostRepository.hosts.friendly_name}`,
|
||||
hostRepository,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Host repository toggle error:', error);
|
||||
res.status(500).json({ error: 'Failed to toggle repository status' });
|
||||
console.error("Host repository toggle error:", error);
|
||||
res.status(500).json({ error: "Failed to toggle repository status" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Get repository statistics
|
||||
router.get('/stats/summary', authenticateToken, requireViewHosts, async (req, res) => {
|
||||
router.get(
|
||||
"/stats/summary",
|
||||
authenticateToken,
|
||||
requireViewHosts,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
const stats = await prisma.repositories.aggregate({
|
||||
_count: true
|
||||
_count: true,
|
||||
});
|
||||
|
||||
const hostRepoStats = await prisma.host_repositories.aggregate({
|
||||
_count: {
|
||||
is_enabled: true
|
||||
is_enabled: true,
|
||||
},
|
||||
where: {
|
||||
is_enabled: true
|
||||
}
|
||||
is_enabled: true,
|
||||
},
|
||||
});
|
||||
|
||||
const secureRepos = await prisma.repositories.count({
|
||||
where: { is_secure: true }
|
||||
where: { is_secure: true },
|
||||
});
|
||||
|
||||
const activeRepos = await prisma.repositories.count({
|
||||
where: { is_active: true }
|
||||
where: { is_active: true },
|
||||
});
|
||||
|
||||
res.json({
|
||||
@@ -243,33 +279,110 @@ router.get('/stats/summary', authenticateToken, requireViewHosts, async (req, re
|
||||
activeRepositories: activeRepos,
|
||||
secureRepositories: secureRepos,
|
||||
enabledHostRepositories: hostRepoStats._count.isEnabled,
|
||||
securityPercentage: stats._count > 0 ? Math.round((secureRepos / stats._count) * 100) : 0
|
||||
securityPercentage:
|
||||
stats._count > 0 ? Math.round((secureRepos / stats._count) * 100) : 0,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Repository stats error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch repository statistics' });
|
||||
console.error("Repository stats error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch repository statistics" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Delete a specific repository (admin only)
|
||||
router.delete(
|
||||
"/:repositoryId",
|
||||
authenticateToken,
|
||||
requireManageHosts,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { repositoryId } = req.params;
|
||||
|
||||
// Check if repository exists first
|
||||
const existingRepository = await prisma.repositories.findUnique({
|
||||
where: { id: repositoryId },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
url: true,
|
||||
_count: {
|
||||
select: {
|
||||
host_repositories: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!existingRepository) {
|
||||
return res.status(404).json({
|
||||
error: "Repository not found",
|
||||
details: "The repository may have been deleted or does not exist",
|
||||
});
|
||||
}
|
||||
|
||||
// Delete repository and all related data (cascade will handle host_repositories)
|
||||
await prisma.repositories.delete({
|
||||
where: { id: repositoryId },
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: "Repository deleted successfully",
|
||||
deletedRepository: {
|
||||
id: existingRepository.id,
|
||||
name: existingRepository.name,
|
||||
url: existingRepository.url,
|
||||
hostCount: existingRepository._count.host_repositories,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Repository deletion error:", error);
|
||||
|
||||
// Handle specific Prisma errors
|
||||
if (error.code === "P2025") {
|
||||
return res.status(404).json({
|
||||
error: "Repository not found",
|
||||
details: "The repository may have been deleted or does not exist",
|
||||
});
|
||||
}
|
||||
|
||||
if (error.code === "P2003") {
|
||||
return res.status(400).json({
|
||||
error: "Cannot delete repository due to foreign key constraints",
|
||||
details: "The repository has related data that prevents deletion",
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
error: "Failed to delete repository",
|
||||
details: error.message || "An unexpected error occurred",
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Cleanup orphaned repositories (admin only)
|
||||
router.delete('/cleanup/orphaned', authenticateToken, requireManageHosts, async (req, res) => {
|
||||
router.delete(
|
||||
"/cleanup/orphaned",
|
||||
authenticateToken,
|
||||
requireManageHosts,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
console.log('Cleaning up orphaned repositories...');
|
||||
console.log("Cleaning up orphaned repositories...");
|
||||
|
||||
// Find repositories with no host relationships
|
||||
const orphanedRepos = await prisma.repositories.findMany({
|
||||
where: {
|
||||
host_repositories: {
|
||||
none: {}
|
||||
}
|
||||
}
|
||||
none: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (orphanedRepos.length === 0) {
|
||||
return res.json({
|
||||
message: 'No orphaned repositories found',
|
||||
message: "No orphaned repositories found",
|
||||
deletedCount: 0,
|
||||
deletedRepositories: []
|
||||
deletedRepositories: [],
|
||||
});
|
||||
}
|
||||
|
||||
@@ -277,9 +390,9 @@ router.delete('/cleanup/orphaned', authenticateToken, requireManageHosts, async
|
||||
const deleteResult = await prisma.repositories.deleteMany({
|
||||
where: {
|
||||
hostRepositories: {
|
||||
none: {}
|
||||
}
|
||||
}
|
||||
none: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Deleted ${deleteResult.count} orphaned repositories`);
|
||||
@@ -287,16 +400,19 @@ router.delete('/cleanup/orphaned', authenticateToken, requireManageHosts, async
|
||||
res.json({
|
||||
message: `Successfully deleted ${deleteResult.count} orphaned repositories`,
|
||||
deletedCount: deleteResult.count,
|
||||
deletedRepositories: orphanedRepos.map(repo => ({
|
||||
deletedRepositories: orphanedRepos.map((repo) => ({
|
||||
id: repo.id,
|
||||
name: repo.name,
|
||||
url: repo.url
|
||||
}))
|
||||
url: repo.url,
|
||||
})),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Repository cleanup error:', error);
|
||||
res.status(500).json({ error: 'Failed to cleanup orphaned repositories' });
|
||||
console.error("Repository cleanup error:", error);
|
||||
res
|
||||
.status(500)
|
||||
.json({ error: "Failed to cleanup orphaned repositories" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
249
backend/src/routes/searchRoutes.js
Normal file
249
backend/src/routes/searchRoutes.js
Normal file
@@ -0,0 +1,249 @@
|
||||
const express = require("express");
|
||||
const router = express.Router();
|
||||
const { createPrismaClient } = require("../config/database");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
|
||||
const prisma = createPrismaClient();
|
||||
|
||||
/**
|
||||
* Global search endpoint
|
||||
* Searches across hosts, packages, repositories, and users
|
||||
* Returns categorized results
|
||||
*/
|
||||
router.get("/", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { q } = req.query;
|
||||
|
||||
if (!q || q.trim().length === 0) {
|
||||
return res.json({
|
||||
hosts: [],
|
||||
packages: [],
|
||||
repositories: [],
|
||||
users: [],
|
||||
});
|
||||
}
|
||||
|
||||
const searchTerm = q.trim();
|
||||
|
||||
// Prepare results object
|
||||
const results = {
|
||||
hosts: [],
|
||||
packages: [],
|
||||
repositories: [],
|
||||
users: [],
|
||||
};
|
||||
|
||||
// Get user permissions from database
|
||||
let userPermissions = null;
|
||||
try {
|
||||
userPermissions = await prisma.role_permissions.findUnique({
|
||||
where: { role: req.user.role },
|
||||
});
|
||||
|
||||
// If no specific permissions found, default to admin permissions
|
||||
if (!userPermissions) {
|
||||
console.warn(
|
||||
`No permissions found for role: ${req.user.role}, defaulting to admin access`,
|
||||
);
|
||||
userPermissions = {
|
||||
can_view_hosts: true,
|
||||
can_view_packages: true,
|
||||
can_view_users: true,
|
||||
};
|
||||
}
|
||||
} catch (permError) {
|
||||
console.error("Error fetching permissions:", permError);
|
||||
// Default to restrictive permissions on error
|
||||
userPermissions = {
|
||||
can_view_hosts: false,
|
||||
can_view_packages: false,
|
||||
can_view_users: false,
|
||||
};
|
||||
}
|
||||
|
||||
// Search hosts if user has permission
|
||||
if (userPermissions.can_view_hosts) {
|
||||
try {
|
||||
const hosts = await prisma.hosts.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ hostname: { contains: searchTerm, mode: "insensitive" } },
|
||||
{ friendly_name: { contains: searchTerm, mode: "insensitive" } },
|
||||
{ ip: { contains: searchTerm, mode: "insensitive" } },
|
||||
{ machine_id: { contains: searchTerm, mode: "insensitive" } },
|
||||
],
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
machine_id: true,
|
||||
hostname: true,
|
||||
friendly_name: true,
|
||||
ip: true,
|
||||
os_type: true,
|
||||
os_version: true,
|
||||
status: true,
|
||||
last_update: true,
|
||||
},
|
||||
take: 10, // Limit results
|
||||
orderBy: {
|
||||
last_update: "desc",
|
||||
},
|
||||
});
|
||||
|
||||
results.hosts = hosts.map((host) => ({
|
||||
id: host.id,
|
||||
hostname: host.hostname,
|
||||
friendly_name: host.friendly_name,
|
||||
ip: host.ip,
|
||||
os_type: host.os_type,
|
||||
os_version: host.os_version,
|
||||
status: host.status,
|
||||
last_update: host.last_update,
|
||||
type: "host",
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error("Error searching hosts:", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Search packages if user has permission
|
||||
if (userPermissions.can_view_packages) {
|
||||
try {
|
||||
const packages = await prisma.packages.findMany({
|
||||
where: {
|
||||
name: { contains: searchTerm, mode: "insensitive" },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
description: true,
|
||||
category: true,
|
||||
latest_version: true,
|
||||
_count: {
|
||||
select: {
|
||||
host_packages: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
take: 10,
|
||||
orderBy: {
|
||||
name: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
results.packages = packages.map((pkg) => ({
|
||||
id: pkg.id,
|
||||
name: pkg.name,
|
||||
description: pkg.description,
|
||||
category: pkg.category,
|
||||
latest_version: pkg.latest_version,
|
||||
host_count: pkg._count.host_packages,
|
||||
type: "package",
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error("Error searching packages:", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Search repositories if user has permission (usually same as hosts)
|
||||
if (userPermissions.can_view_hosts) {
|
||||
try {
|
||||
const repositories = await prisma.repositories.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ name: { contains: searchTerm, mode: "insensitive" } },
|
||||
{ url: { contains: searchTerm, mode: "insensitive" } },
|
||||
{ description: { contains: searchTerm, mode: "insensitive" } },
|
||||
],
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
url: true,
|
||||
distribution: true,
|
||||
repo_type: true,
|
||||
is_active: true,
|
||||
description: true,
|
||||
_count: {
|
||||
select: {
|
||||
host_repositories: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
take: 10,
|
||||
orderBy: {
|
||||
name: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
results.repositories = repositories.map((repo) => ({
|
||||
id: repo.id,
|
||||
name: repo.name,
|
||||
url: repo.url,
|
||||
distribution: repo.distribution,
|
||||
repo_type: repo.repo_type,
|
||||
is_active: repo.is_active,
|
||||
description: repo.description,
|
||||
host_count: repo._count.host_repositories,
|
||||
type: "repository",
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error("Error searching repositories:", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Search users if user has permission
|
||||
if (userPermissions.can_view_users) {
|
||||
try {
|
||||
const users = await prisma.users.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ username: { contains: searchTerm, mode: "insensitive" } },
|
||||
{ email: { contains: searchTerm, mode: "insensitive" } },
|
||||
{ first_name: { contains: searchTerm, mode: "insensitive" } },
|
||||
{ last_name: { contains: searchTerm, mode: "insensitive" } },
|
||||
],
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
username: true,
|
||||
email: true,
|
||||
first_name: true,
|
||||
last_name: true,
|
||||
role: true,
|
||||
is_active: true,
|
||||
last_login: true,
|
||||
},
|
||||
take: 10,
|
||||
orderBy: {
|
||||
username: "asc",
|
||||
},
|
||||
});
|
||||
|
||||
results.users = users.map((user) => ({
|
||||
id: user.id,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
first_name: user.first_name,
|
||||
last_name: user.last_name,
|
||||
role: user.role,
|
||||
is_active: user.is_active,
|
||||
last_login: user.last_login,
|
||||
type: "user",
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error("Error searching users:", error);
|
||||
}
|
||||
}
|
||||
|
||||
res.json(results);
|
||||
} catch (error) {
|
||||
console.error("Global search error:", error);
|
||||
res.status(500).json({
|
||||
error: "Failed to perform search",
|
||||
message: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,211 +1,471 @@
|
||||
const express = require('express');
|
||||
const { body, validationResult } = require('express-validator');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { authenticateToken } = require('../middleware/auth');
|
||||
const { requireManageSettings } = require('../middleware/permissions');
|
||||
const { getSettings, updateSettings } = require('../services/settingsService');
|
||||
const express = require("express");
|
||||
const { body, validationResult } = require("express-validator");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const { requireManageSettings } = require("../middleware/permissions");
|
||||
const { getSettings, updateSettings } = require("../services/settingsService");
|
||||
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Function to trigger crontab updates on all hosts with auto-update enabled
|
||||
async function triggerCrontabUpdates() {
|
||||
try {
|
||||
console.log('Triggering crontab updates on all hosts with auto-update enabled...');
|
||||
// WebSocket broadcaster for agent policy updates (no longer used - queue-based delivery preferred)
|
||||
// const { broadcastSettingsUpdate } = require("../services/agentWs");
|
||||
const { queueManager, QUEUE_NAMES } = require("../services/automation");
|
||||
|
||||
// Get current settings for server URL
|
||||
const settings = await getSettings();
|
||||
const serverUrl = settings.server_url;
|
||||
|
||||
// Get all hosts that have auto-update enabled
|
||||
const hosts = await prisma.hosts.findMany({
|
||||
where: {
|
||||
auto_update: true,
|
||||
status: 'active' // Only update active hosts
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
friendly_name: true,
|
||||
api_id: true,
|
||||
api_key: true
|
||||
// Helpers
|
||||
function normalizeUpdateInterval(minutes) {
|
||||
let m = parseInt(minutes, 10);
|
||||
if (Number.isNaN(m)) return 60;
|
||||
if (m < 5) m = 5;
|
||||
if (m > 1440) m = 1440;
|
||||
if (m < 60) {
|
||||
// Clamp to 5-59, step 5
|
||||
const snapped = Math.round(m / 5) * 5;
|
||||
return Math.min(59, Math.max(5, snapped));
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`Found ${hosts.length} hosts with auto-update enabled`);
|
||||
|
||||
// For each host, we'll send a special update command that triggers crontab update
|
||||
// This is done by sending a ping with a special flag
|
||||
for (const host of hosts) {
|
||||
try {
|
||||
console.log(`Triggering crontab update for host: ${host.friendly_name}`);
|
||||
|
||||
// We'll use the existing ping endpoint but add a special parameter
|
||||
// The agent will detect this and run update-crontab command
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
|
||||
const url = new URL(`${serverUrl}/api/v1/hosts/ping`);
|
||||
const isHttps = url.protocol === 'https:';
|
||||
const client = isHttps ? https : http;
|
||||
|
||||
const postData = JSON.stringify({
|
||||
triggerCrontabUpdate: true,
|
||||
message: 'Update interval changed, please update your crontab'
|
||||
});
|
||||
|
||||
const options = {
|
||||
hostname: url.hostname,
|
||||
port: url.port || (isHttps ? 443 : 80),
|
||||
path: url.pathname,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': Buffer.byteLength(postData),
|
||||
'X-API-ID': host.api_id,
|
||||
'X-API-KEY': host.api_key
|
||||
}
|
||||
};
|
||||
|
||||
const req = client.request(options, (res) => {
|
||||
if (res.statusCode === 200) {
|
||||
console.log(`Successfully triggered crontab update for ${host.friendly_name}`);
|
||||
} else {
|
||||
console.error(`Failed to trigger crontab update for ${host.friendly_name}: ${res.statusCode}`);
|
||||
}
|
||||
});
|
||||
|
||||
req.on('error', (error) => {
|
||||
console.error(`Error triggering crontab update for ${host.friendly_name}:`, error.message);
|
||||
});
|
||||
|
||||
req.write(postData);
|
||||
req.end();
|
||||
} catch (error) {
|
||||
console.error(`Error triggering crontab update for ${host.friendly_name}:`, error.message);
|
||||
// Allowed hour-based presets
|
||||
const allowed = [60, 120, 180, 360, 720, 1440];
|
||||
let nearest = allowed[0];
|
||||
let bestDiff = Math.abs(m - nearest);
|
||||
for (const a of allowed) {
|
||||
const d = Math.abs(m - a);
|
||||
if (d < bestDiff) {
|
||||
bestDiff = d;
|
||||
nearest = a;
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Crontab update trigger completed');
|
||||
} catch (error) {
|
||||
console.error('Error in triggerCrontabUpdates:', error);
|
||||
return nearest;
|
||||
}
|
||||
|
||||
function buildCronExpression(minutes) {
|
||||
const m = normalizeUpdateInterval(minutes);
|
||||
if (m < 60) {
|
||||
return `*/${m} * * * *`;
|
||||
}
|
||||
if (m === 60) {
|
||||
// Hourly at current minute is chosen by agent; default 0 here
|
||||
return `0 * * * *`;
|
||||
}
|
||||
const hours = Math.floor(m / 60);
|
||||
// Every N hours at minute 0
|
||||
return `0 */${hours} * * *`;
|
||||
}
|
||||
|
||||
// Get current settings
|
||||
router.get('/', authenticateToken, requireManageSettings, async (req, res) => {
|
||||
router.get("/", authenticateToken, requireManageSettings, async (_req, res) => {
|
||||
try {
|
||||
const settings = await getSettings();
|
||||
console.log('Returning settings:', settings);
|
||||
if (process.env.ENABLE_LOGGING === "true") {
|
||||
console.log("Returning settings:", settings);
|
||||
}
|
||||
res.json(settings);
|
||||
} catch (error) {
|
||||
console.error('Settings fetch error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch settings' });
|
||||
console.error("Settings fetch error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch settings" });
|
||||
}
|
||||
});
|
||||
|
||||
// Update settings
|
||||
router.put('/', authenticateToken, requireManageSettings, [
|
||||
body('serverProtocol').isIn(['http', 'https']).withMessage('Protocol must be http or https'),
|
||||
body('serverHost').isLength({ min: 1 }).withMessage('Server host is required'),
|
||||
body('serverPort').isInt({ min: 1, max: 65535 }).withMessage('Port must be between 1 and 65535'),
|
||||
body('updateInterval').isInt({ min: 5, max: 1440 }).withMessage('Update interval must be between 5 and 1440 minutes'),
|
||||
body('autoUpdate').isBoolean().withMessage('Auto update must be a boolean'),
|
||||
body('signupEnabled').isBoolean().withMessage('Signup enabled must be a boolean'),
|
||||
body('defaultUserRole').optional().isLength({ min: 1 }).withMessage('Default user role must be a non-empty string'),
|
||||
body('githubRepoUrl').optional().isLength({ min: 1 }).withMessage('GitHub repo URL must be a non-empty string'),
|
||||
body('repositoryType').optional().isIn(['public', 'private']).withMessage('Repository type must be public or private'),
|
||||
body('sshKeyPath').optional().custom((value) => {
|
||||
router.put(
|
||||
"/",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
[
|
||||
body("serverProtocol")
|
||||
.optional()
|
||||
.isIn(["http", "https"])
|
||||
.withMessage("Protocol must be http or https"),
|
||||
body("serverHost")
|
||||
.optional()
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("Server host is required"),
|
||||
body("serverPort")
|
||||
.optional()
|
||||
.isInt({ min: 1, max: 65535 })
|
||||
.withMessage("Port must be between 1 and 65535"),
|
||||
body("updateInterval")
|
||||
.optional()
|
||||
.isInt({ min: 5, max: 1440 })
|
||||
.withMessage("Update interval must be between 5 and 1440 minutes"),
|
||||
body("autoUpdate")
|
||||
.optional()
|
||||
.isBoolean()
|
||||
.withMessage("Auto update must be a boolean"),
|
||||
body("ignoreSslSelfSigned")
|
||||
.optional()
|
||||
.isBoolean()
|
||||
.withMessage("Ignore SSL self-signed must be a boolean"),
|
||||
body("signupEnabled")
|
||||
.optional()
|
||||
.isBoolean()
|
||||
.withMessage("Signup enabled must be a boolean"),
|
||||
body("defaultUserRole")
|
||||
.optional()
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("Default user role must be a non-empty string"),
|
||||
body("githubRepoUrl")
|
||||
.optional()
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("GitHub repo URL must be a non-empty string"),
|
||||
body("repositoryType")
|
||||
.optional()
|
||||
.isIn(["public", "private"])
|
||||
.withMessage("Repository type must be public or private"),
|
||||
body("sshKeyPath")
|
||||
.optional()
|
||||
.custom((value) => {
|
||||
if (value && value.trim().length === 0) {
|
||||
return true; // Allow empty string
|
||||
}
|
||||
if (value && value.trim().length < 1) {
|
||||
throw new Error('SSH key path must be a non-empty string');
|
||||
throw new Error("SSH key path must be a non-empty string");
|
||||
}
|
||||
return true;
|
||||
})
|
||||
], async (req, res) => {
|
||||
}),
|
||||
body("logoDark")
|
||||
.optional()
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("Logo dark path must be a non-empty string"),
|
||||
body("logoLight")
|
||||
.optional()
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("Logo light path must be a non-empty string"),
|
||||
body("favicon")
|
||||
.optional()
|
||||
.isLength({ min: 1 })
|
||||
.withMessage("Favicon path must be a non-empty string"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
console.log('Validation errors:', errors.array());
|
||||
console.log("Validation errors:", errors.array());
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { serverProtocol, serverHost, serverPort, updateInterval, autoUpdate, signupEnabled, defaultUserRole, githubRepoUrl, repositoryType, sshKeyPath } = req.body;
|
||||
const {
|
||||
serverProtocol,
|
||||
serverHost,
|
||||
serverPort,
|
||||
updateInterval,
|
||||
autoUpdate,
|
||||
ignoreSslSelfSigned,
|
||||
signupEnabled,
|
||||
defaultUserRole,
|
||||
githubRepoUrl,
|
||||
repositoryType,
|
||||
sshKeyPath,
|
||||
logoDark,
|
||||
logoLight,
|
||||
favicon,
|
||||
} = req.body;
|
||||
|
||||
// Get current settings to check for update interval changes
|
||||
const currentSettings = await getSettings();
|
||||
const oldUpdateInterval = currentSettings.update_interval;
|
||||
|
||||
// Update settings using the service
|
||||
const updatedSettings = await updateSettings(currentSettings.id, {
|
||||
server_protocol: serverProtocol,
|
||||
server_host: serverHost,
|
||||
server_port: serverPort,
|
||||
update_interval: updateInterval || 60,
|
||||
auto_update: autoUpdate || false,
|
||||
signup_enabled: signupEnabled || false,
|
||||
default_user_role: defaultUserRole || process.env.DEFAULT_USER_ROLE || 'user',
|
||||
github_repo_url: githubRepoUrl !== undefined ? githubRepoUrl : 'git@github.com:9technologygroup/patchmon.net.git',
|
||||
repository_type: repositoryType || 'public',
|
||||
ssh_key_path: sshKeyPath || null,
|
||||
// Build update object with only provided fields
|
||||
const updateData = {};
|
||||
|
||||
if (serverProtocol !== undefined)
|
||||
updateData.server_protocol = serverProtocol;
|
||||
if (serverHost !== undefined) updateData.server_host = serverHost;
|
||||
if (serverPort !== undefined) updateData.server_port = serverPort;
|
||||
if (updateInterval !== undefined) {
|
||||
updateData.update_interval = normalizeUpdateInterval(updateInterval);
|
||||
}
|
||||
if (autoUpdate !== undefined) updateData.auto_update = autoUpdate;
|
||||
if (ignoreSslSelfSigned !== undefined)
|
||||
updateData.ignore_ssl_self_signed = ignoreSslSelfSigned;
|
||||
if (signupEnabled !== undefined)
|
||||
updateData.signup_enabled = signupEnabled;
|
||||
if (defaultUserRole !== undefined)
|
||||
updateData.default_user_role = defaultUserRole;
|
||||
if (githubRepoUrl !== undefined)
|
||||
updateData.github_repo_url = githubRepoUrl;
|
||||
if (repositoryType !== undefined)
|
||||
updateData.repository_type = repositoryType;
|
||||
if (sshKeyPath !== undefined) updateData.ssh_key_path = sshKeyPath;
|
||||
if (logoDark !== undefined) updateData.logo_dark = logoDark;
|
||||
if (logoLight !== undefined) updateData.logo_light = logoLight;
|
||||
if (favicon !== undefined) updateData.favicon = favicon;
|
||||
|
||||
const updatedSettings = await updateSettings(
|
||||
currentSettings.id,
|
||||
updateData,
|
||||
);
|
||||
|
||||
console.log("Settings updated successfully:", updatedSettings);
|
||||
|
||||
// If update interval changed, enqueue persistent jobs for agents
|
||||
if (
|
||||
updateInterval !== undefined &&
|
||||
oldUpdateInterval !== updateData.update_interval
|
||||
) {
|
||||
console.log(
|
||||
`Update interval changed from ${oldUpdateInterval} to ${updateData.update_interval} minutes. Enqueueing agent settings updates...`,
|
||||
);
|
||||
|
||||
const hosts = await prisma.hosts.findMany({
|
||||
where: { status: "active" },
|
||||
select: { api_id: true },
|
||||
});
|
||||
|
||||
console.log('Settings updated successfully:', updatedSettings);
|
||||
const queue = queueManager.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||
const jobs = hosts.map((h) => ({
|
||||
name: "settings_update",
|
||||
data: {
|
||||
api_id: h.api_id,
|
||||
type: "settings_update",
|
||||
update_interval: updateData.update_interval,
|
||||
},
|
||||
opts: { attempts: 10, backoff: { type: "exponential", delay: 5000 } },
|
||||
}));
|
||||
|
||||
// If update interval changed, trigger crontab updates on all hosts with auto-update enabled
|
||||
if (oldUpdateInterval !== (updateInterval || 60)) {
|
||||
console.log(`Update interval changed from ${oldUpdateInterval} to ${updateInterval || 60} minutes. Triggering crontab updates...`);
|
||||
await triggerCrontabUpdates();
|
||||
// Bulk add jobs
|
||||
await queue.addBulk(jobs);
|
||||
|
||||
// Note: Queue-based delivery handles retries and ensures reliable delivery
|
||||
// No need for immediate broadcast as it would cause duplicate messages
|
||||
}
|
||||
|
||||
res.json({
|
||||
message: 'Settings updated successfully',
|
||||
settings: updatedSettings
|
||||
message: "Settings updated successfully",
|
||||
settings: updatedSettings,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Settings update error:', error);
|
||||
res.status(500).json({ error: 'Failed to update settings' });
|
||||
console.error("Settings update error:", error);
|
||||
res.status(500).json({ error: "Failed to update settings" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Get server URL for public use (used by installation scripts)
|
||||
router.get('/server-url', async (req, res) => {
|
||||
router.get("/server-url", async (_req, res) => {
|
||||
try {
|
||||
const settings = await getSettings();
|
||||
const serverUrl = settings.server_url;
|
||||
res.json({ server_url: serverUrl });
|
||||
} catch (error) {
|
||||
console.error('Server URL fetch error:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch server URL' });
|
||||
console.error("Server URL fetch error:", error);
|
||||
res.status(500).json({ error: "Failed to fetch server URL" });
|
||||
}
|
||||
});
|
||||
|
||||
// Get update interval policy for agents (public endpoint)
|
||||
router.get('/update-interval', async (req, res) => {
|
||||
// Get update interval policy for agents (requires API authentication)
|
||||
router.get("/update-interval", async (req, res) => {
|
||||
try {
|
||||
// Verify API credentials
|
||||
const apiId = req.headers["x-api-id"];
|
||||
const apiKey = req.headers["x-api-key"];
|
||||
|
||||
if (!apiId || !apiKey) {
|
||||
return res.status(401).json({ error: "API credentials required" });
|
||||
}
|
||||
|
||||
// Validate API credentials
|
||||
const host = await prisma.hosts.findUnique({
|
||||
where: { api_id: apiId },
|
||||
});
|
||||
|
||||
if (!host || host.api_key !== apiKey) {
|
||||
return res.status(401).json({ error: "Invalid API credentials" });
|
||||
}
|
||||
|
||||
const settings = await getSettings();
|
||||
const interval = normalizeUpdateInterval(settings.update_interval || 60);
|
||||
res.json({
|
||||
updateInterval: settings.update_interval,
|
||||
cronExpression: `*/${settings.update_interval} * * * *` // Generate cron expression
|
||||
updateInterval: interval,
|
||||
cronExpression: buildCronExpression(interval),
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Update interval fetch error:', error);
|
||||
res.json({ updateInterval: 60, cronExpression: '0 * * * *' });
|
||||
console.error("Update interval fetch error:", error);
|
||||
res.json({ updateInterval: 60, cronExpression: "0 * * * *" });
|
||||
}
|
||||
});
|
||||
|
||||
// Get auto-update policy for agents (public endpoint)
|
||||
router.get('/auto-update', async (req, res) => {
|
||||
router.get("/auto-update", async (_req, res) => {
|
||||
try {
|
||||
const settings = await getSettings();
|
||||
res.json({
|
||||
autoUpdate: settings.auto_update || false
|
||||
autoUpdate: settings.auto_update || false,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Auto-update fetch error:', error);
|
||||
console.error("Auto-update fetch error:", error);
|
||||
res.json({ autoUpdate: false });
|
||||
}
|
||||
});
|
||||
|
||||
// Upload logo files
|
||||
router.post(
|
||||
"/logos/upload",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { logoType, fileContent, fileName } = req.body;
|
||||
|
||||
if (!logoType || !fileContent) {
|
||||
return res.status(400).json({
|
||||
error: "Logo type and file content are required",
|
||||
});
|
||||
}
|
||||
|
||||
if (!["dark", "light", "favicon"].includes(logoType)) {
|
||||
return res.status(400).json({
|
||||
error: "Logo type must be 'dark', 'light', or 'favicon'",
|
||||
});
|
||||
}
|
||||
|
||||
// Validate file content (basic checks)
|
||||
if (typeof fileContent !== "string") {
|
||||
return res.status(400).json({
|
||||
error: "File content must be a base64 string",
|
||||
});
|
||||
}
|
||||
|
||||
const fs = require("node:fs").promises;
|
||||
const path = require("node:path");
|
||||
const _crypto = require("node:crypto");
|
||||
|
||||
// Create assets directory if it doesn't exist
|
||||
// In development: save to public/assets (served by Vite)
|
||||
// In production: save to dist/assets (served by built app)
|
||||
const isDevelopment = process.env.NODE_ENV !== "production";
|
||||
const assetsDir = isDevelopment
|
||||
? path.join(__dirname, "../../../frontend/public/assets")
|
||||
: path.join(__dirname, "../../../frontend/dist/assets");
|
||||
await fs.mkdir(assetsDir, { recursive: true });
|
||||
|
||||
// Determine file extension and path
|
||||
let fileExtension;
|
||||
let fileName_final;
|
||||
|
||||
if (logoType === "favicon") {
|
||||
fileExtension = ".svg";
|
||||
fileName_final = fileName || "logo_square.svg";
|
||||
} else {
|
||||
// Determine extension from file content or use default
|
||||
if (fileContent.startsWith("data:image/png")) {
|
||||
fileExtension = ".png";
|
||||
} else if (fileContent.startsWith("data:image/svg")) {
|
||||
fileExtension = ".svg";
|
||||
} else if (
|
||||
fileContent.startsWith("data:image/jpeg") ||
|
||||
fileContent.startsWith("data:image/jpg")
|
||||
) {
|
||||
fileExtension = ".jpg";
|
||||
} else {
|
||||
fileExtension = ".png"; // Default to PNG
|
||||
}
|
||||
fileName_final = fileName || `logo_${logoType}${fileExtension}`;
|
||||
}
|
||||
|
||||
const filePath = path.join(assetsDir, fileName_final);
|
||||
|
||||
// Handle base64 data URLs
|
||||
let fileBuffer;
|
||||
if (fileContent.startsWith("data:")) {
|
||||
const base64Data = fileContent.split(",")[1];
|
||||
fileBuffer = Buffer.from(base64Data, "base64");
|
||||
} else {
|
||||
// Assume it's already base64
|
||||
fileBuffer = Buffer.from(fileContent, "base64");
|
||||
}
|
||||
|
||||
// Create backup of existing file
|
||||
try {
|
||||
const backupPath = `${filePath}.backup.${Date.now()}`;
|
||||
await fs.copyFile(filePath, backupPath);
|
||||
console.log(`Created backup: ${backupPath}`);
|
||||
} catch (error) {
|
||||
// Ignore if original doesn't exist
|
||||
if (error.code !== "ENOENT") {
|
||||
console.warn("Failed to create backup:", error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Write new logo file
|
||||
await fs.writeFile(filePath, fileBuffer);
|
||||
|
||||
// Update settings with new logo path
|
||||
const settings = await getSettings();
|
||||
const logoPath = `/assets/${fileName_final}`;
|
||||
|
||||
const updateData = {};
|
||||
if (logoType === "dark") {
|
||||
updateData.logo_dark = logoPath;
|
||||
} else if (logoType === "light") {
|
||||
updateData.logo_light = logoPath;
|
||||
} else if (logoType === "favicon") {
|
||||
updateData.favicon = logoPath;
|
||||
}
|
||||
|
||||
await updateSettings(settings.id, updateData);
|
||||
|
||||
// Get file stats
|
||||
const stats = await fs.stat(filePath);
|
||||
|
||||
res.json({
|
||||
message: `${logoType} logo uploaded successfully`,
|
||||
fileName: fileName_final,
|
||||
path: logoPath,
|
||||
size: stats.size,
|
||||
sizeFormatted: `${(stats.size / 1024).toFixed(1)} KB`,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Upload logo error:", error);
|
||||
res.status(500).json({ error: "Failed to upload logo" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Reset logo to default
|
||||
router.post(
|
||||
"/logos/reset",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { logoType } = req.body;
|
||||
|
||||
if (!logoType) {
|
||||
return res.status(400).json({
|
||||
error: "Logo type is required",
|
||||
});
|
||||
}
|
||||
|
||||
if (!["dark", "light", "favicon"].includes(logoType)) {
|
||||
return res.status(400).json({
|
||||
error: "Logo type must be 'dark', 'light', or 'favicon'",
|
||||
});
|
||||
}
|
||||
|
||||
// Get current settings
|
||||
const settings = await getSettings();
|
||||
|
||||
// Clear the custom logo path to revert to default
|
||||
const updateData = {};
|
||||
if (logoType === "dark") {
|
||||
updateData.logo_dark = null;
|
||||
} else if (logoType === "light") {
|
||||
updateData.logo_light = null;
|
||||
} else if (logoType === "favicon") {
|
||||
updateData.favicon = null;
|
||||
}
|
||||
|
||||
await updateSettings(settings.id, updateData);
|
||||
|
||||
res.json({
|
||||
message: `${logoType} logo reset to default successfully`,
|
||||
logoType,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Reset logo error:", error);
|
||||
res.status(500).json({ error: "Failed to reset logo" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,35 +1,35 @@
|
||||
const express = require('express');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const speakeasy = require('speakeasy');
|
||||
const QRCode = require('qrcode');
|
||||
const { authenticateToken } = require('../middleware/auth');
|
||||
const { body, validationResult } = require('express-validator');
|
||||
const express = require("express");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const speakeasy = require("speakeasy");
|
||||
const QRCode = require("qrcode");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const { body, validationResult } = require("express-validator");
|
||||
|
||||
const router = express.Router();
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
// Generate TFA secret and QR code
|
||||
router.get('/setup', authenticateToken, async (req, res) => {
|
||||
router.get("/setup", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
|
||||
// Check if user already has TFA enabled
|
||||
const user = await prisma.users.findUnique({
|
||||
where: { id: userId },
|
||||
select: { tfa_enabled: true, tfa_secret: true }
|
||||
select: { tfa_enabled: true, tfa_secret: true },
|
||||
});
|
||||
|
||||
if (user.tfa_enabled) {
|
||||
return res.status(400).json({
|
||||
error: 'Two-factor authentication is already enabled for this account'
|
||||
error: "Two-factor authentication is already enabled for this account",
|
||||
});
|
||||
}
|
||||
|
||||
// Generate a new secret
|
||||
const secret = speakeasy.generateSecret({
|
||||
name: `PatchMon (${req.user.username})`,
|
||||
issuer: 'PatchMon',
|
||||
length: 32
|
||||
issuer: "PatchMon",
|
||||
length: 32,
|
||||
});
|
||||
|
||||
// Generate QR code
|
||||
@@ -38,25 +38,33 @@ router.get('/setup', authenticateToken, async (req, res) => {
|
||||
// Store the secret temporarily (not enabled yet)
|
||||
await prisma.users.update({
|
||||
where: { id: userId },
|
||||
data: { tfa_secret: secret.base32 }
|
||||
data: { tfa_secret: secret.base32 },
|
||||
});
|
||||
|
||||
res.json({
|
||||
secret: secret.base32,
|
||||
qrCode: qrCodeUrl,
|
||||
manualEntryKey: secret.base32
|
||||
manualEntryKey: secret.base32,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('TFA setup error:', error);
|
||||
res.status(500).json({ error: 'Failed to setup two-factor authentication' });
|
||||
console.error("TFA setup error:", error);
|
||||
res
|
||||
.status(500)
|
||||
.json({ error: "Failed to setup two-factor authentication" });
|
||||
}
|
||||
});
|
||||
|
||||
// Verify TFA setup
|
||||
router.post('/verify-setup', authenticateToken, [
|
||||
body('token').isLength({ min: 6, max: 6 }).withMessage('Token must be 6 digits'),
|
||||
body('token').isNumeric().withMessage('Token must contain only numbers')
|
||||
], async (req, res) => {
|
||||
router.post(
|
||||
"/verify-setup",
|
||||
authenticateToken,
|
||||
[
|
||||
body("token")
|
||||
.isLength({ min: 6, max: 6 })
|
||||
.withMessage("Token must be 6 digits"),
|
||||
body("token").isNumeric().withMessage("Token must contain only numbers"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
@@ -69,38 +77,39 @@ router.post('/verify-setup', authenticateToken, [
|
||||
// Get user's TFA secret
|
||||
const user = await prisma.users.findUnique({
|
||||
where: { id: userId },
|
||||
select: { tfa_secret: true, tfa_enabled: true }
|
||||
select: { tfa_secret: true, tfa_enabled: true },
|
||||
});
|
||||
|
||||
if (!user.tfa_secret) {
|
||||
return res.status(400).json({
|
||||
error: 'No TFA secret found. Please start the setup process first.'
|
||||
error: "No TFA secret found. Please start the setup process first.",
|
||||
});
|
||||
}
|
||||
|
||||
if (user.tfa_enabled) {
|
||||
return res.status(400).json({
|
||||
error: 'Two-factor authentication is already enabled for this account'
|
||||
error:
|
||||
"Two-factor authentication is already enabled for this account",
|
||||
});
|
||||
}
|
||||
|
||||
// Verify the token
|
||||
const verified = speakeasy.totp.verify({
|
||||
secret: user.tfa_secret,
|
||||
encoding: 'base32',
|
||||
encoding: "base32",
|
||||
token: token,
|
||||
window: 2 // Allow 2 time windows (60 seconds) for clock drift
|
||||
window: 2, // Allow 2 time windows (60 seconds) for clock drift
|
||||
});
|
||||
|
||||
if (!verified) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid verification code. Please try again.'
|
||||
error: "Invalid verification code. Please try again.",
|
||||
});
|
||||
}
|
||||
|
||||
// Generate backup codes
|
||||
const backupCodes = Array.from({ length: 10 }, () =>
|
||||
Math.random().toString(36).substring(2, 8).toUpperCase()
|
||||
Math.random().toString(36).substring(2, 8).toUpperCase(),
|
||||
);
|
||||
|
||||
// Enable TFA and store backup codes
|
||||
@@ -108,69 +117,81 @@ router.post('/verify-setup', authenticateToken, [
|
||||
where: { id: userId },
|
||||
data: {
|
||||
tfa_enabled: true,
|
||||
tfa_backup_codes: JSON.stringify(backupCodes)
|
||||
}
|
||||
tfa_backup_codes: JSON.stringify(backupCodes),
|
||||
},
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: 'Two-factor authentication has been enabled successfully',
|
||||
backupCodes: backupCodes
|
||||
message: "Two-factor authentication has been enabled successfully",
|
||||
backupCodes: backupCodes,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('TFA verification error:', error);
|
||||
res.status(500).json({ error: 'Failed to verify two-factor authentication setup' });
|
||||
console.error("TFA verification error:", error);
|
||||
res
|
||||
.status(500)
|
||||
.json({ error: "Failed to verify two-factor authentication setup" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Disable TFA
|
||||
router.post('/disable', authenticateToken, [
|
||||
body('password').notEmpty().withMessage('Password is required to disable TFA')
|
||||
], async (req, res) => {
|
||||
router.post(
|
||||
"/disable",
|
||||
authenticateToken,
|
||||
[
|
||||
body("password")
|
||||
.notEmpty()
|
||||
.withMessage("Password is required to disable TFA"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return res.status(400).json({ errors: errors.array() });
|
||||
}
|
||||
|
||||
const { password } = req.body;
|
||||
const { password: _password } = req.body;
|
||||
const userId = req.user.id;
|
||||
|
||||
// Verify password
|
||||
const user = await prisma.users.findUnique({
|
||||
where: { id: userId },
|
||||
select: { password_hash: true, tfa_enabled: true }
|
||||
select: { password_hash: true, tfa_enabled: true },
|
||||
});
|
||||
|
||||
if (!user.tfa_enabled) {
|
||||
return res.status(400).json({
|
||||
error: 'Two-factor authentication is not enabled for this account'
|
||||
error: "Two-factor authentication is not enabled for this account",
|
||||
});
|
||||
}
|
||||
|
||||
// Note: In a real implementation, you would verify the password hash here
|
||||
// FIXME: In a real implementation, you would verify the password hash here
|
||||
// For now, we'll skip password verification for simplicity
|
||||
|
||||
// Disable TFA
|
||||
await prisma.users.update({
|
||||
where: { id: id },
|
||||
where: { id: userId },
|
||||
data: {
|
||||
tfa_enabled: false,
|
||||
tfa_secret: null,
|
||||
tfa_backup_codes: null
|
||||
}
|
||||
tfa_backup_codes: null,
|
||||
},
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: 'Two-factor authentication has been disabled successfully'
|
||||
message: "Two-factor authentication has been disabled successfully",
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('TFA disable error:', error);
|
||||
res.status(500).json({ error: 'Failed to disable two-factor authentication' });
|
||||
console.error("TFA disable error:", error);
|
||||
res
|
||||
.status(500)
|
||||
.json({ error: "Failed to disable two-factor authentication" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Get TFA status
|
||||
router.get('/status', authenticateToken, async (req, res) => {
|
||||
router.get("/status", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
|
||||
@@ -179,66 +200,71 @@ router.get('/status', authenticateToken, async (req, res) => {
|
||||
select: {
|
||||
tfa_enabled: true,
|
||||
tfa_secret: true,
|
||||
tfa_backup_codes: true
|
||||
}
|
||||
tfa_backup_codes: true,
|
||||
},
|
||||
});
|
||||
|
||||
res.json({
|
||||
enabled: user.tfa_enabled,
|
||||
hasBackupCodes: !!user.tfa_backup_codes
|
||||
hasBackupCodes: !!user.tfa_backup_codes,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('TFA status error:', error);
|
||||
res.status(500).json({ error: 'Failed to get TFA status' });
|
||||
console.error("TFA status error:", error);
|
||||
res.status(500).json({ error: "Failed to get TFA status" });
|
||||
}
|
||||
});
|
||||
|
||||
// Regenerate backup codes
|
||||
router.post('/regenerate-backup-codes', authenticateToken, async (req, res) => {
|
||||
router.post("/regenerate-backup-codes", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
|
||||
// Check if TFA is enabled
|
||||
const user = await prisma.users.findUnique({
|
||||
where: { id: userId },
|
||||
select: { tfa_enabled: true }
|
||||
select: { tfa_enabled: true },
|
||||
});
|
||||
|
||||
if (!user.tfa_enabled) {
|
||||
return res.status(400).json({
|
||||
error: 'Two-factor authentication is not enabled for this account'
|
||||
error: "Two-factor authentication is not enabled for this account",
|
||||
});
|
||||
}
|
||||
|
||||
// Generate new backup codes
|
||||
const backupCodes = Array.from({ length: 10 }, () =>
|
||||
Math.random().toString(36).substring(2, 8).toUpperCase()
|
||||
Math.random().toString(36).substring(2, 8).toUpperCase(),
|
||||
);
|
||||
|
||||
// Update backup codes
|
||||
await prisma.users.update({
|
||||
where: { id: userId },
|
||||
data: {
|
||||
tfa_backup_codes: JSON.stringify(backupCodes)
|
||||
}
|
||||
tfa_backup_codes: JSON.stringify(backupCodes),
|
||||
},
|
||||
});
|
||||
|
||||
res.json({
|
||||
message: 'Backup codes have been regenerated successfully',
|
||||
backupCodes: backupCodes
|
||||
message: "Backup codes have been regenerated successfully",
|
||||
backupCodes: backupCodes,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('TFA backup codes regeneration error:', error);
|
||||
res.status(500).json({ error: 'Failed to regenerate backup codes' });
|
||||
console.error("TFA backup codes regeneration error:", error);
|
||||
res.status(500).json({ error: "Failed to regenerate backup codes" });
|
||||
}
|
||||
});
|
||||
|
||||
// Verify TFA token (for login)
|
||||
router.post('/verify', [
|
||||
body('username').notEmpty().withMessage('Username is required'),
|
||||
body('token').isLength({ min: 6, max: 6 }).withMessage('Token must be 6 digits'),
|
||||
body('token').isNumeric().withMessage('Token must contain only numbers')
|
||||
], async (req, res) => {
|
||||
router.post(
|
||||
"/verify",
|
||||
[
|
||||
body("username").notEmpty().withMessage("Username is required"),
|
||||
body("token")
|
||||
.isLength({ min: 6, max: 6 })
|
||||
.withMessage("Token must be 6 digits"),
|
||||
body("token").isNumeric().withMessage("Token must contain only numbers"),
|
||||
],
|
||||
async (req, res) => {
|
||||
try {
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
@@ -254,56 +280,61 @@ router.post('/verify', [
|
||||
id: true,
|
||||
tfa_enabled: true,
|
||||
tfa_secret: true,
|
||||
tfa_backup_codes: true
|
||||
}
|
||||
tfa_backup_codes: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!user || !user.tfa_enabled || !user.tfa_secret) {
|
||||
return res.status(400).json({
|
||||
error: 'Two-factor authentication is not enabled for this account'
|
||||
error: "Two-factor authentication is not enabled for this account",
|
||||
});
|
||||
}
|
||||
|
||||
// Check if it's a backup code
|
||||
const backupCodes = user.tfa_backup_codes ? JSON.parse(user.tfa_backup_codes) : [];
|
||||
const backupCodes = user.tfa_backup_codes
|
||||
? JSON.parse(user.tfa_backup_codes)
|
||||
: [];
|
||||
const isBackupCode = backupCodes.includes(token);
|
||||
|
||||
let verified = false;
|
||||
|
||||
if (isBackupCode) {
|
||||
// Remove the used backup code
|
||||
const updatedBackupCodes = backupCodes.filter(code => code !== token);
|
||||
const updatedBackupCodes = backupCodes.filter((code) => code !== token);
|
||||
await prisma.users.update({
|
||||
where: { id: user.id },
|
||||
data: {
|
||||
tfa_backup_codes: JSON.stringify(updatedBackupCodes)
|
||||
}
|
||||
tfa_backup_codes: JSON.stringify(updatedBackupCodes),
|
||||
},
|
||||
});
|
||||
verified = true;
|
||||
} else {
|
||||
// Verify TOTP token
|
||||
verified = speakeasy.totp.verify({
|
||||
secret: user.tfa_secret,
|
||||
encoding: 'base32',
|
||||
encoding: "base32",
|
||||
token: token,
|
||||
window: 2
|
||||
window: 2,
|
||||
});
|
||||
}
|
||||
|
||||
if (!verified) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid verification code'
|
||||
error: "Invalid verification code",
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
message: 'Two-factor authentication verified successfully',
|
||||
userId: user.id
|
||||
message: "Two-factor authentication verified successfully",
|
||||
userId: user.id,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('TFA verification error:', error);
|
||||
res.status(500).json({ error: 'Failed to verify two-factor authentication' });
|
||||
console.error("TFA verification error:", error);
|
||||
res
|
||||
.status(500)
|
||||
.json({ error: "Failed to verify two-factor authentication" });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,204 +1,358 @@
|
||||
const express = require('express');
|
||||
const { authenticateToken } = require('../middleware/auth');
|
||||
const { requireManageSettings } = require('../middleware/permissions');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { exec } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
const express = require("express");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const { requireManageSettings } = require("../middleware/permissions");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
// Default GitHub repository URL
|
||||
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get current version info
|
||||
router.get('/current', authenticateToken, async (req, res) => {
|
||||
// Helper function to get current version from package.json
|
||||
function getCurrentVersion() {
|
||||
try {
|
||||
// Read version from package.json dynamically
|
||||
let currentVersion = '1.2.6'; // fallback
|
||||
|
||||
try {
|
||||
const packageJson = require('../../package.json');
|
||||
if (packageJson && packageJson.version) {
|
||||
currentVersion = packageJson.version;
|
||||
}
|
||||
const packageJson = require("../../package.json");
|
||||
return packageJson?.version || "1.2.9";
|
||||
} catch (packageError) {
|
||||
console.warn('Could not read version from package.json, using fallback:', packageError.message);
|
||||
}
|
||||
|
||||
res.json({
|
||||
version: currentVersion,
|
||||
buildDate: new Date().toISOString(),
|
||||
environment: process.env.NODE_ENV || 'development'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error getting current version:', error);
|
||||
res.status(500).json({ error: 'Failed to get current version' });
|
||||
}
|
||||
});
|
||||
|
||||
// Test SSH key permissions and GitHub access
|
||||
router.post('/test-ssh-key', authenticateToken, requireManageSettings, async (req, res) => {
|
||||
try {
|
||||
const { sshKeyPath, githubRepoUrl } = req.body;
|
||||
|
||||
if (!sshKeyPath || !githubRepoUrl) {
|
||||
return res.status(400).json({
|
||||
error: 'SSH key path and GitHub repo URL are required'
|
||||
});
|
||||
}
|
||||
|
||||
// Parse repository info
|
||||
let owner, repo;
|
||||
if (githubRepoUrl.includes('git@github.com:')) {
|
||||
const match = githubRepoUrl.match(/git@github\.com:([^\/]+)\/([^\/]+)\.git/);
|
||||
if (match) {
|
||||
[, owner, repo] = match;
|
||||
}
|
||||
} else if (githubRepoUrl.includes('github.com/')) {
|
||||
const match = githubRepoUrl.match(/github\.com\/([^\/]+)\/([^\/]+)/);
|
||||
if (match) {
|
||||
[, owner, repo] = match;
|
||||
}
|
||||
}
|
||||
|
||||
if (!owner || !repo) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid GitHub repository URL format'
|
||||
});
|
||||
}
|
||||
|
||||
// Check if SSH key file exists and is readable
|
||||
try {
|
||||
require('fs').accessSync(sshKeyPath);
|
||||
} catch (e) {
|
||||
return res.status(400).json({
|
||||
error: 'SSH key file not found or not accessible',
|
||||
details: `Cannot access: ${sshKeyPath}`,
|
||||
suggestion: 'Check the file path and ensure the application has read permissions'
|
||||
});
|
||||
}
|
||||
|
||||
// Test SSH connection to GitHub
|
||||
const sshRepoUrl = `git@github.com:${owner}/${repo}.git`;
|
||||
const env = {
|
||||
...process.env,
|
||||
GIT_SSH_COMMAND: `ssh -i ${sshKeyPath} -o StrictHostKeyChecking=no -o IdentitiesOnly=yes -o ConnectTimeout=10`
|
||||
};
|
||||
|
||||
try {
|
||||
// Test with a simple git command
|
||||
const { stdout } = await execAsync(
|
||||
`git ls-remote --heads ${sshRepoUrl} | head -n 1`,
|
||||
{
|
||||
timeout: 15000,
|
||||
env: env
|
||||
}
|
||||
console.warn(
|
||||
"Could not read version from package.json, using fallback:",
|
||||
packageError.message,
|
||||
);
|
||||
|
||||
if (stdout.trim()) {
|
||||
return res.json({
|
||||
success: true,
|
||||
message: 'SSH key is working correctly',
|
||||
details: {
|
||||
sshKeyPath,
|
||||
repository: `${owner}/${repo}`,
|
||||
testResult: 'Successfully connected to GitHub'
|
||||
}
|
||||
});
|
||||
} else {
|
||||
return res.status(400).json({
|
||||
error: 'SSH connection succeeded but no data returned',
|
||||
suggestion: 'Check repository access permissions'
|
||||
});
|
||||
}
|
||||
} catch (sshError) {
|
||||
console.error('SSH test error:', sshError.message);
|
||||
|
||||
if (sshError.message.includes('Permission denied')) {
|
||||
return res.status(403).json({
|
||||
error: 'SSH key permission denied',
|
||||
details: 'The SSH key exists but GitHub rejected the connection',
|
||||
suggestion: 'Verify the SSH key is added to the repository as a deploy key with read access'
|
||||
});
|
||||
} else if (sshError.message.includes('Host key verification failed')) {
|
||||
return res.status(403).json({
|
||||
error: 'Host key verification failed',
|
||||
suggestion: 'This is normal for first-time connections. The key will be added to known_hosts automatically.'
|
||||
});
|
||||
} else if (sshError.message.includes('Connection timed out')) {
|
||||
return res.status(408).json({
|
||||
error: 'Connection timed out',
|
||||
suggestion: 'Check your internet connection and GitHub status'
|
||||
});
|
||||
} else {
|
||||
return res.status(500).json({
|
||||
error: 'SSH connection failed',
|
||||
details: sshError.message,
|
||||
suggestion: 'Check the SSH key format and repository URL'
|
||||
});
|
||||
return "1.2.9";
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('SSH key test error:', error);
|
||||
res.status(500).json({
|
||||
error: 'Failed to test SSH key',
|
||||
details: error.message
|
||||
});
|
||||
}
|
||||
});
|
||||
// Helper function to parse GitHub repository URL
|
||||
function parseGitHubRepo(repoUrl) {
|
||||
let owner, repo;
|
||||
|
||||
// Check for updates from GitHub
|
||||
router.get('/check-updates', authenticateToken, requireManageSettings, async (req, res) => {
|
||||
if (repoUrl.includes("git@github.com:")) {
|
||||
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
|
||||
if (match) {
|
||||
[, owner, repo] = match;
|
||||
}
|
||||
} else if (repoUrl.includes("github.com/")) {
|
||||
const match = repoUrl.match(/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/);
|
||||
if (match) {
|
||||
[, owner, repo] = match;
|
||||
}
|
||||
}
|
||||
|
||||
return { owner, repo };
|
||||
}
|
||||
|
||||
// Helper function to get latest release from GitHub API
|
||||
async function getLatestRelease(owner, repo) {
|
||||
try {
|
||||
// Get cached update information from settings
|
||||
const settings = await prisma.settings.findFirst();
|
||||
const currentVersion = getCurrentVersion();
|
||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
||||
|
||||
if (!settings) {
|
||||
return res.status(400).json({ error: 'Settings not found' });
|
||||
}
|
||||
|
||||
const currentVersion = '1.2.6';
|
||||
const latestVersion = settings.latest_version || currentVersion;
|
||||
const isUpdateAvailable = settings.update_available || false;
|
||||
const lastUpdateCheck = settings.last_update_check || null;
|
||||
|
||||
res.json({
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
isUpdateAvailable,
|
||||
lastUpdateCheck,
|
||||
repositoryType: settings.repository_type || 'public',
|
||||
latestRelease: {
|
||||
tagName: latestVersion ? `v${latestVersion}` : null,
|
||||
version: latestVersion,
|
||||
repository: settings.github_repo_url ? settings.github_repo_url.split('/').slice(-2).join('/') : null,
|
||||
accessMethod: settings.repository_type === 'private' ? 'ssh' : 'api'
|
||||
}
|
||||
const response = await fetch(apiUrl, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Accept: "application/vnd.github.v3+json",
|
||||
"User-Agent": `PatchMon-Server/${currentVersion}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
if (
|
||||
errorText.includes("rate limit") ||
|
||||
errorText.includes("API rate limit")
|
||||
) {
|
||||
throw new Error("GitHub API rate limit exceeded");
|
||||
}
|
||||
throw new Error(
|
||||
`GitHub API error: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const releaseData = await response.json();
|
||||
return {
|
||||
tagName: releaseData.tag_name,
|
||||
version: releaseData.tag_name.replace("v", ""),
|
||||
publishedAt: releaseData.published_at,
|
||||
htmlUrl: releaseData.html_url,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error getting update information:', error);
|
||||
res.status(500).json({ error: 'Failed to get update information' });
|
||||
console.error("Error fetching latest release:", error.message);
|
||||
throw error; // Re-throw to be caught by the calling function
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to get latest commit from main branch
|
||||
async function getLatestCommit(owner, repo) {
|
||||
try {
|
||||
const currentVersion = getCurrentVersion();
|
||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/commits/main`;
|
||||
|
||||
const response = await fetch(apiUrl, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Accept: "application/vnd.github.v3+json",
|
||||
"User-Agent": `PatchMon-Server/${currentVersion}`,
|
||||
},
|
||||
});
|
||||
|
||||
// Simple version comparison function
|
||||
function compareVersions(version1, version2) {
|
||||
const v1Parts = version1.split('.').map(Number);
|
||||
const v2Parts = version2.split('.').map(Number);
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
if (
|
||||
errorText.includes("rate limit") ||
|
||||
errorText.includes("API rate limit")
|
||||
) {
|
||||
throw new Error("GitHub API rate limit exceeded");
|
||||
}
|
||||
throw new Error(
|
||||
`GitHub API error: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const maxLength = Math.max(v1Parts.length, v2Parts.length);
|
||||
const commitData = await response.json();
|
||||
return {
|
||||
sha: commitData.sha,
|
||||
message: commitData.commit.message,
|
||||
author: commitData.commit.author.name,
|
||||
date: commitData.commit.author.date,
|
||||
htmlUrl: commitData.html_url,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error fetching latest commit:", error.message);
|
||||
throw error; // Re-throw to be caught by the calling function
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to get commit count difference
|
||||
async function getCommitDifference(owner, repo, currentVersion) {
|
||||
// Try both with and without 'v' prefix for compatibility
|
||||
const versionTags = [
|
||||
currentVersion, // Try without 'v' first (new format)
|
||||
`v${currentVersion}`, // Try with 'v' prefix (old format)
|
||||
];
|
||||
|
||||
for (const versionTag of versionTags) {
|
||||
try {
|
||||
// Compare main branch with the released version tag
|
||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/compare/${versionTag}...main`;
|
||||
|
||||
const response = await fetch(apiUrl, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Accept: "application/vnd.github.v3+json",
|
||||
"User-Agent": `PatchMon-Server/${getCurrentVersion()}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
if (
|
||||
errorText.includes("rate limit") ||
|
||||
errorText.includes("API rate limit")
|
||||
) {
|
||||
throw new Error("GitHub API rate limit exceeded");
|
||||
}
|
||||
// If 404, try next tag format
|
||||
if (response.status === 404) {
|
||||
continue;
|
||||
}
|
||||
throw new Error(
|
||||
`GitHub API error: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const compareData = await response.json();
|
||||
return {
|
||||
commitsBehind: compareData.behind_by || 0, // How many commits main is behind release
|
||||
commitsAhead: compareData.ahead_by || 0, // How many commits main is ahead of release
|
||||
totalCommits: compareData.total_commits || 0,
|
||||
branchInfo: "main branch vs release",
|
||||
};
|
||||
} catch (error) {
|
||||
// If rate limit, throw immediately
|
||||
if (error.message.includes("rate limit")) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If all attempts failed, throw error
|
||||
throw new Error(
|
||||
`Could not find tag '${currentVersion}' or 'v${currentVersion}' in repository`,
|
||||
);
|
||||
}
|
||||
|
||||
// Helper function to compare version strings (semantic versioning)
|
||||
function compareVersions(version1, version2) {
|
||||
const v1parts = version1.split(".").map(Number);
|
||||
const v2parts = version2.split(".").map(Number);
|
||||
|
||||
const maxLength = Math.max(v1parts.length, v2parts.length);
|
||||
|
||||
for (let i = 0; i < maxLength; i++) {
|
||||
const v1Part = v1Parts[i] || 0;
|
||||
const v2Part = v2Parts[i] || 0;
|
||||
const v1part = v1parts[i] || 0;
|
||||
const v2part = v2parts[i] || 0;
|
||||
|
||||
if (v1Part > v2Part) return 1;
|
||||
if (v1Part < v2Part) return -1;
|
||||
if (v1part > v2part) return 1;
|
||||
if (v1part < v2part) return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Get current version info
|
||||
router.get("/current", authenticateToken, async (_req, res) => {
|
||||
try {
|
||||
const currentVersion = getCurrentVersion();
|
||||
|
||||
// Get settings with cached update info (no GitHub API calls)
|
||||
const settings = await prisma.settings.findFirst();
|
||||
const githubRepoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
||||
const { owner, repo } = parseGitHubRepo(githubRepoUrl);
|
||||
|
||||
// Return current version and cached update information
|
||||
// The backend scheduler updates this data periodically
|
||||
res.json({
|
||||
version: currentVersion,
|
||||
latest_version: settings?.latest_version || null,
|
||||
is_update_available: settings?.is_update_available || false,
|
||||
last_update_check: settings?.last_update_check || null,
|
||||
buildDate: new Date().toISOString(),
|
||||
environment: process.env.NODE_ENV || "development",
|
||||
github: {
|
||||
repository: githubRepoUrl,
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error getting current version:", error);
|
||||
res.status(500).json({ error: "Failed to get current version" });
|
||||
}
|
||||
});
|
||||
|
||||
// Test SSH key permissions and GitHub access
|
||||
router.post(
|
||||
"/test-ssh-key",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (_req, res) => {
|
||||
res.status(410).json({
|
||||
error:
|
||||
"SSH key testing has been removed. Using default public repository.",
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
// Check for updates from GitHub
|
||||
router.get(
|
||||
"/check-updates",
|
||||
authenticateToken,
|
||||
requireManageSettings,
|
||||
async (_req, res) => {
|
||||
try {
|
||||
// Get cached update information from settings
|
||||
const settings = await prisma.settings.findFirst();
|
||||
|
||||
if (!settings) {
|
||||
return res.status(400).json({ error: "Settings not found" });
|
||||
}
|
||||
|
||||
const currentVersion = getCurrentVersion();
|
||||
const githubRepoUrl = settings.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
||||
const { owner, repo } = parseGitHubRepo(githubRepoUrl);
|
||||
|
||||
let latestRelease = null;
|
||||
let latestCommit = null;
|
||||
let commitDifference = null;
|
||||
|
||||
// Fetch fresh GitHub data if we have valid owner/repo
|
||||
if (owner && repo) {
|
||||
try {
|
||||
const [releaseData, commitData, differenceData] = await Promise.all([
|
||||
getLatestRelease(owner, repo),
|
||||
getLatestCommit(owner, repo),
|
||||
getCommitDifference(owner, repo, currentVersion),
|
||||
]);
|
||||
|
||||
latestRelease = releaseData;
|
||||
latestCommit = commitData;
|
||||
commitDifference = differenceData;
|
||||
} catch (githubError) {
|
||||
console.warn(
|
||||
"Failed to fetch fresh GitHub data:",
|
||||
githubError.message,
|
||||
);
|
||||
|
||||
// Provide fallback data when GitHub API is rate-limited
|
||||
if (
|
||||
githubError.message.includes("rate limit") ||
|
||||
githubError.message.includes("API rate limit")
|
||||
) {
|
||||
console.log("GitHub API rate limited, providing fallback data");
|
||||
latestRelease = {
|
||||
tagName: "v1.2.8",
|
||||
version: "1.2.8",
|
||||
publishedAt: "2025-10-02T17:12:53Z",
|
||||
htmlUrl:
|
||||
"https://github.com/PatchMon/PatchMon/releases/tag/v1.2.8",
|
||||
};
|
||||
latestCommit = {
|
||||
sha: "cc89df161b8ea5d48ff95b0eb405fe69042052cd",
|
||||
message: "Update README.md\n\nAdded Documentation Links",
|
||||
author: "9 Technology Group LTD",
|
||||
date: "2025-10-04T18:38:09Z",
|
||||
htmlUrl:
|
||||
"https://github.com/PatchMon/PatchMon/commit/cc89df161b8ea5d48ff95b0eb405fe69042052cd",
|
||||
};
|
||||
commitDifference = {
|
||||
commitsBehind: 0,
|
||||
commitsAhead: 3, // Main branch is ahead of release
|
||||
totalCommits: 3,
|
||||
branchInfo: "main branch vs release",
|
||||
};
|
||||
} else {
|
||||
// Fall back to cached data for other errors
|
||||
const githubRepoUrl = settings.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
||||
latestRelease = settings.latest_version
|
||||
? {
|
||||
version: settings.latest_version,
|
||||
tagName: `v${settings.latest_version}`,
|
||||
publishedAt: null, // Only use date from GitHub API, not cached data
|
||||
htmlUrl: `${githubRepoUrl.replace(/\.git$/, "")}/releases/tag/v${settings.latest_version}`,
|
||||
}
|
||||
: null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const latestVersion =
|
||||
latestRelease?.version || settings.latest_version || currentVersion;
|
||||
const isUpdateAvailable = latestRelease
|
||||
? compareVersions(latestVersion, currentVersion) > 0
|
||||
: settings.update_available || false;
|
||||
|
||||
res.json({
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
isUpdateAvailable,
|
||||
lastUpdateCheck: settings.last_update_check || null,
|
||||
repositoryType: settings.repository_type || "public",
|
||||
github: {
|
||||
repository: githubRepoUrl,
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
latestRelease: latestRelease,
|
||||
latestCommit: latestCommit,
|
||||
commitDifference: commitDifference,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error getting update information:", error);
|
||||
res.status(500).json({ error: "Failed to get update information" });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
143
backend/src/routes/wsRoutes.js
Normal file
143
backend/src/routes/wsRoutes.js
Normal file
@@ -0,0 +1,143 @@
|
||||
const express = require("express");
|
||||
const { authenticateToken } = require("../middleware/auth");
|
||||
const {
|
||||
getConnectionInfo,
|
||||
subscribeToConnectionChanges,
|
||||
} = require("../services/agentWs");
|
||||
const {
|
||||
validate_session,
|
||||
update_session_activity,
|
||||
} = require("../utils/session_manager");
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Get WebSocket connection status by api_id (no database access - pure memory lookup)
|
||||
router.get("/status/:apiId", authenticateToken, async (req, res) => {
|
||||
try {
|
||||
const { apiId } = req.params;
|
||||
|
||||
// Direct in-memory check - no database query needed
|
||||
const connectionInfo = getConnectionInfo(apiId);
|
||||
|
||||
// Minimal response for maximum speed
|
||||
res.json({
|
||||
success: true,
|
||||
data: connectionInfo,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching WebSocket status:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to fetch WebSocket status",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Server-Sent Events endpoint for real-time status updates (no polling needed!)
|
||||
router.get("/status/:apiId/stream", async (req, res) => {
|
||||
try {
|
||||
const { apiId } = req.params;
|
||||
|
||||
// Manual authentication for SSE (EventSource doesn't support custom headers)
|
||||
const token =
|
||||
req.query.token || req.headers.authorization?.replace("Bearer ", "");
|
||||
if (!token) {
|
||||
return res.status(401).json({ error: "Authentication required" });
|
||||
}
|
||||
|
||||
// Verify token manually with session validation
|
||||
const jwt = require("jsonwebtoken");
|
||||
try {
|
||||
const decoded = jwt.verify(token, process.env.JWT_SECRET);
|
||||
|
||||
// Validate session (same as regular auth middleware)
|
||||
const validation = await validate_session(decoded.sessionId, token);
|
||||
if (!validation.valid) {
|
||||
console.error("[SSE] Session validation failed:", validation.reason);
|
||||
console.error("[SSE] Invalid session for api_id:", apiId);
|
||||
return res.status(401).json({ error: "Invalid or expired session" });
|
||||
}
|
||||
|
||||
// Update session activity to prevent inactivity timeout
|
||||
await update_session_activity(decoded.sessionId);
|
||||
|
||||
req.user = validation.user;
|
||||
} catch (err) {
|
||||
console.error("[SSE] JWT verification failed:", err.message);
|
||||
console.error("[SSE] Invalid token for api_id:", apiId);
|
||||
return res.status(401).json({ error: "Invalid or expired token" });
|
||||
}
|
||||
|
||||
console.log("[SSE] Client connected for api_id:", apiId);
|
||||
|
||||
// Set headers for SSE
|
||||
res.setHeader("Content-Type", "text/event-stream");
|
||||
res.setHeader("Cache-Control", "no-cache");
|
||||
res.setHeader("Connection", "keep-alive");
|
||||
res.setHeader("X-Accel-Buffering", "no"); // Disable nginx buffering
|
||||
|
||||
// Send initial status immediately
|
||||
const initialInfo = getConnectionInfo(apiId);
|
||||
res.write(`data: ${JSON.stringify(initialInfo)}\n\n`);
|
||||
res.flushHeaders(); // Ensure headers are sent immediately
|
||||
|
||||
// Subscribe to connection changes for this specific api_id
|
||||
const unsubscribe = subscribeToConnectionChanges(apiId, (_connected) => {
|
||||
try {
|
||||
// Push update to client instantly when status changes
|
||||
const connectionInfo = getConnectionInfo(apiId);
|
||||
console.log(
|
||||
`[SSE] Pushing status change for ${apiId}: connected=${connectionInfo.connected} secure=${connectionInfo.secure}`,
|
||||
);
|
||||
res.write(`data: ${JSON.stringify(connectionInfo)}\n\n`);
|
||||
} catch (err) {
|
||||
console.error("[SSE] Error writing to stream:", err);
|
||||
}
|
||||
});
|
||||
|
||||
// Heartbeat to keep connection alive (every 30 seconds)
|
||||
const heartbeat = setInterval(() => {
|
||||
try {
|
||||
res.write(": heartbeat\n\n");
|
||||
} catch (err) {
|
||||
console.error("[SSE] Error writing heartbeat:", err);
|
||||
clearInterval(heartbeat);
|
||||
}
|
||||
}, 30000);
|
||||
|
||||
// Cleanup on client disconnect
|
||||
req.on("close", () => {
|
||||
console.log("[SSE] Client disconnected for api_id:", apiId);
|
||||
clearInterval(heartbeat);
|
||||
unsubscribe();
|
||||
});
|
||||
|
||||
// Handle errors - distinguish between different error types
|
||||
req.on("error", (err) => {
|
||||
// Only log non-connection-reset errors to reduce noise
|
||||
if (err.code !== "ECONNRESET" && err.code !== "EPIPE") {
|
||||
console.error("[SSE] Request error:", err);
|
||||
} else {
|
||||
console.log("[SSE] Client connection reset for api_id:", apiId);
|
||||
}
|
||||
clearInterval(heartbeat);
|
||||
unsubscribe();
|
||||
});
|
||||
|
||||
// Handle response errors
|
||||
res.on("error", (err) => {
|
||||
if (err.code !== "ECONNRESET" && err.code !== "EPIPE") {
|
||||
console.error("[SSE] Response error:", err);
|
||||
}
|
||||
clearInterval(heartbeat);
|
||||
unsubscribe();
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[SSE] Unexpected error:", error);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ error: "Internal server error" });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
File diff suppressed because it is too large
Load Diff
190
backend/src/services/agentWs.js
Normal file
190
backend/src/services/agentWs.js
Normal file
@@ -0,0 +1,190 @@
|
||||
// Lightweight WebSocket hub for agent connections
|
||||
// Auth: X-API-ID / X-API-KEY headers on the upgrade request
|
||||
|
||||
const WebSocket = require("ws");
|
||||
const url = require("node:url");
|
||||
|
||||
// Connection registry by api_id
|
||||
const apiIdToSocket = new Map();
|
||||
|
||||
// Connection metadata (secure/insecure)
|
||||
// Map<api_id, { ws: WebSocket, secure: boolean }>
|
||||
const connectionMetadata = new Map();
|
||||
|
||||
// Subscribers for connection status changes (for SSE)
|
||||
// Map<api_id, Set<callback>>
|
||||
const connectionChangeSubscribers = new Map();
|
||||
|
||||
let wss;
|
||||
let prisma;
|
||||
|
||||
function init(server, prismaClient) {
|
||||
prisma = prismaClient;
|
||||
wss = new WebSocket.Server({ noServer: true });
|
||||
|
||||
// Handle HTTP upgrade events and authenticate before accepting WS
|
||||
server.on("upgrade", async (request, socket, head) => {
|
||||
try {
|
||||
const { pathname } = url.parse(request.url);
|
||||
if (!pathname || !pathname.startsWith("/api/")) {
|
||||
socket.destroy();
|
||||
return;
|
||||
}
|
||||
|
||||
// Expected path: /api/{v}/agents/ws
|
||||
const parts = pathname.split("/").filter(Boolean); // [api, v1, agents, ws]
|
||||
if (parts.length !== 4 || parts[2] !== "agents" || parts[3] !== "ws") {
|
||||
socket.destroy();
|
||||
return;
|
||||
}
|
||||
|
||||
const apiId = request.headers["x-api-id"];
|
||||
const apiKey = request.headers["x-api-key"];
|
||||
if (!apiId || !apiKey) {
|
||||
socket.destroy();
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate credentials
|
||||
const host = await prisma.hosts.findUnique({ where: { api_id: apiId } });
|
||||
if (!host || host.api_key !== apiKey) {
|
||||
socket.destroy();
|
||||
return;
|
||||
}
|
||||
|
||||
wss.handleUpgrade(request, socket, head, (ws) => {
|
||||
ws.apiId = apiId;
|
||||
|
||||
// Detect if connection is secure (wss://) or not (ws://)
|
||||
const isSecure =
|
||||
socket.encrypted || request.headers["x-forwarded-proto"] === "https";
|
||||
|
||||
apiIdToSocket.set(apiId, ws);
|
||||
connectionMetadata.set(apiId, { ws, secure: isSecure });
|
||||
|
||||
console.log(
|
||||
`[agent-ws] connected api_id=${apiId} protocol=${isSecure ? "wss" : "ws"} total=${apiIdToSocket.size}`,
|
||||
);
|
||||
|
||||
// Notify subscribers of connection
|
||||
notifyConnectionChange(apiId, true);
|
||||
|
||||
ws.on("message", () => {
|
||||
// Currently we don't need to handle agent->server messages
|
||||
});
|
||||
|
||||
ws.on("close", () => {
|
||||
const existing = apiIdToSocket.get(apiId);
|
||||
if (existing === ws) {
|
||||
apiIdToSocket.delete(apiId);
|
||||
connectionMetadata.delete(apiId);
|
||||
// Notify subscribers of disconnection
|
||||
notifyConnectionChange(apiId, false);
|
||||
}
|
||||
console.log(
|
||||
`[agent-ws] disconnected api_id=${apiId} total=${apiIdToSocket.size}`,
|
||||
);
|
||||
});
|
||||
|
||||
// Optional: greet/ack
|
||||
safeSend(ws, JSON.stringify({ type: "connected" }));
|
||||
});
|
||||
} catch (_err) {
|
||||
try {
|
||||
socket.destroy();
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function safeSend(ws, data) {
|
||||
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||
try {
|
||||
ws.send(data);
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function broadcastSettingsUpdate(newInterval) {
|
||||
const payload = JSON.stringify({
|
||||
type: "settings_update",
|
||||
update_interval: newInterval,
|
||||
});
|
||||
for (const [, ws] of apiIdToSocket) {
|
||||
safeSend(ws, payload);
|
||||
}
|
||||
}
|
||||
|
||||
function pushReportNow(apiId) {
|
||||
const ws = apiIdToSocket.get(apiId);
|
||||
safeSend(ws, JSON.stringify({ type: "report_now" }));
|
||||
}
|
||||
|
||||
function pushSettingsUpdate(apiId, newInterval) {
|
||||
const ws = apiIdToSocket.get(apiId);
|
||||
safeSend(
|
||||
ws,
|
||||
JSON.stringify({ type: "settings_update", update_interval: newInterval }),
|
||||
);
|
||||
}
|
||||
|
||||
// Notify all subscribers when connection status changes
|
||||
function notifyConnectionChange(apiId, connected) {
|
||||
const subscribers = connectionChangeSubscribers.get(apiId);
|
||||
if (subscribers) {
|
||||
for (const callback of subscribers) {
|
||||
try {
|
||||
callback(connected);
|
||||
} catch (err) {
|
||||
console.error(`[agent-ws] error notifying subscriber:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Subscribe to connection status changes for a specific api_id
|
||||
function subscribeToConnectionChanges(apiId, callback) {
|
||||
if (!connectionChangeSubscribers.has(apiId)) {
|
||||
connectionChangeSubscribers.set(apiId, new Set());
|
||||
}
|
||||
connectionChangeSubscribers.get(apiId).add(callback);
|
||||
|
||||
// Return unsubscribe function
|
||||
return () => {
|
||||
const subscribers = connectionChangeSubscribers.get(apiId);
|
||||
if (subscribers) {
|
||||
subscribers.delete(callback);
|
||||
if (subscribers.size === 0) {
|
||||
connectionChangeSubscribers.delete(apiId);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
init,
|
||||
broadcastSettingsUpdate,
|
||||
pushReportNow,
|
||||
pushSettingsUpdate,
|
||||
// Expose read-only view of connected agents
|
||||
getConnectedApiIds: () => Array.from(apiIdToSocket.keys()),
|
||||
isConnected: (apiId) => {
|
||||
const ws = apiIdToSocket.get(apiId);
|
||||
return !!ws && ws.readyState === WebSocket.OPEN;
|
||||
},
|
||||
// Get connection info including protocol (ws/wss)
|
||||
getConnectionInfo: (apiId) => {
|
||||
const metadata = connectionMetadata.get(apiId);
|
||||
if (!metadata) {
|
||||
return { connected: false, secure: false };
|
||||
}
|
||||
const connected = metadata.ws.readyState === WebSocket.OPEN;
|
||||
return { connected, secure: metadata.secure };
|
||||
},
|
||||
// Subscribe to connection status changes (for SSE)
|
||||
subscribeToConnectionChanges,
|
||||
};
|
||||
153
backend/src/services/automation/githubUpdateCheck.js
Normal file
153
backend/src/services/automation/githubUpdateCheck.js
Normal file
@@ -0,0 +1,153 @@
|
||||
const { prisma } = require("./shared/prisma");
|
||||
const { compareVersions, checkPublicRepo } = require("./shared/utils");
|
||||
|
||||
/**
|
||||
* GitHub Update Check Automation
|
||||
* Checks for new releases on GitHub using HTTPS API
|
||||
*/
|
||||
class GitHubUpdateCheck {
|
||||
constructor(queueManager) {
|
||||
this.queueManager = queueManager;
|
||||
this.queueName = "github-update-check";
|
||||
}
|
||||
|
||||
/**
|
||||
* Process GitHub update check job
|
||||
*/
|
||||
async process(_job) {
|
||||
const startTime = Date.now();
|
||||
console.log("🔍 Starting GitHub update check...");
|
||||
|
||||
try {
|
||||
// Get settings
|
||||
const settings = await prisma.settings.findFirst();
|
||||
const DEFAULT_GITHUB_REPO = "https://github.com/patchMon/patchmon";
|
||||
const repoUrl = settings?.githubRepoUrl || DEFAULT_GITHUB_REPO;
|
||||
let owner, repo;
|
||||
|
||||
// Parse GitHub repository URL (supports both HTTPS and SSH formats)
|
||||
if (repoUrl.includes("git@github.com:")) {
|
||||
const match = repoUrl.match(/git@github\.com:([^/]+)\/([^/]+)\.git/);
|
||||
if (match) {
|
||||
[, owner, repo] = match;
|
||||
}
|
||||
} else if (repoUrl.includes("github.com/")) {
|
||||
const match = repoUrl.match(
|
||||
/github\.com\/([^/]+)\/([^/]+?)(?:\.git)?$/,
|
||||
);
|
||||
if (match) {
|
||||
[, owner, repo] = match;
|
||||
}
|
||||
}
|
||||
|
||||
if (!owner || !repo) {
|
||||
throw new Error("Could not parse GitHub repository URL");
|
||||
}
|
||||
|
||||
// Always use HTTPS GitHub API (simpler and more reliable)
|
||||
const latestVersion = await checkPublicRepo(owner, repo);
|
||||
|
||||
if (!latestVersion) {
|
||||
throw new Error("Could not determine latest version");
|
||||
}
|
||||
|
||||
// Read version from package.json
|
||||
let currentVersion = "1.2.7"; // fallback
|
||||
try {
|
||||
const packageJson = require("../../../package.json");
|
||||
if (packageJson?.version) {
|
||||
currentVersion = packageJson.version;
|
||||
}
|
||||
} catch (packageError) {
|
||||
console.warn(
|
||||
"Could not read version from package.json:",
|
||||
packageError.message,
|
||||
);
|
||||
}
|
||||
|
||||
const isUpdateAvailable =
|
||||
compareVersions(latestVersion, currentVersion) > 0;
|
||||
|
||||
// Update settings with check results
|
||||
await prisma.settings.update({
|
||||
where: { id: settings.id },
|
||||
data: {
|
||||
last_update_check: new Date(),
|
||||
update_available: isUpdateAvailable,
|
||||
latest_version: latestVersion,
|
||||
},
|
||||
});
|
||||
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.log(
|
||||
`✅ GitHub update check completed in ${executionTime}ms - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`,
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
isUpdateAvailable,
|
||||
executionTime,
|
||||
};
|
||||
} catch (error) {
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.error(
|
||||
`❌ GitHub update check failed after ${executionTime}ms:`,
|
||||
error.message,
|
||||
);
|
||||
|
||||
// Update last check time even on error
|
||||
try {
|
||||
const settings = await prisma.settings.findFirst();
|
||||
if (settings) {
|
||||
await prisma.settings.update({
|
||||
where: { id: settings.id },
|
||||
data: {
|
||||
last_update_check: new Date(),
|
||||
update_available: false,
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (updateError) {
|
||||
console.error(
|
||||
"❌ Error updating last check time:",
|
||||
updateError.message,
|
||||
);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule recurring GitHub update check (daily at midnight)
|
||||
*/
|
||||
async schedule() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"github-update-check",
|
||||
{},
|
||||
{
|
||||
repeat: { cron: "0 0 * * *" }, // Daily at midnight
|
||||
jobId: "github-update-check-recurring",
|
||||
},
|
||||
);
|
||||
console.log("✅ GitHub update check scheduled");
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual GitHub update check
|
||||
*/
|
||||
async triggerManual() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"github-update-check-manual",
|
||||
{},
|
||||
{ priority: 1 },
|
||||
);
|
||||
console.log("✅ Manual GitHub update check triggered");
|
||||
return job;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GitHubUpdateCheck;
|
||||
517
backend/src/services/automation/index.js
Normal file
517
backend/src/services/automation/index.js
Normal file
@@ -0,0 +1,517 @@
|
||||
const { Queue, Worker } = require("bullmq");
|
||||
const { redis, redisConnection } = require("./shared/redis");
|
||||
const { prisma } = require("./shared/prisma");
|
||||
const agentWs = require("../agentWs");
|
||||
|
||||
// Import automation classes
|
||||
const GitHubUpdateCheck = require("./githubUpdateCheck");
|
||||
const SessionCleanup = require("./sessionCleanup");
|
||||
const OrphanedRepoCleanup = require("./orphanedRepoCleanup");
|
||||
const OrphanedPackageCleanup = require("./orphanedPackageCleanup");
|
||||
|
||||
// Queue names
|
||||
const QUEUE_NAMES = {
|
||||
GITHUB_UPDATE_CHECK: "github-update-check",
|
||||
SESSION_CLEANUP: "session-cleanup",
|
||||
ORPHANED_REPO_CLEANUP: "orphaned-repo-cleanup",
|
||||
ORPHANED_PACKAGE_CLEANUP: "orphaned-package-cleanup",
|
||||
AGENT_COMMANDS: "agent-commands",
|
||||
};
|
||||
|
||||
/**
|
||||
* Main Queue Manager
|
||||
* Manages all BullMQ queues and workers
|
||||
*/
|
||||
class QueueManager {
|
||||
constructor() {
|
||||
this.queues = {};
|
||||
this.workers = {};
|
||||
this.automations = {};
|
||||
this.isInitialized = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize all queues, workers, and automations
|
||||
*/
|
||||
async initialize() {
|
||||
try {
|
||||
console.log("✅ Redis connection successful");
|
||||
|
||||
// Initialize queues
|
||||
await this.initializeQueues();
|
||||
|
||||
// Initialize automation classes
|
||||
await this.initializeAutomations();
|
||||
|
||||
// Initialize workers
|
||||
await this.initializeWorkers();
|
||||
|
||||
// Setup event listeners
|
||||
this.setupEventListeners();
|
||||
|
||||
this.isInitialized = true;
|
||||
console.log("✅ Queue manager initialized successfully");
|
||||
} catch (error) {
|
||||
console.error("❌ Failed to initialize queue manager:", error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize all queues
|
||||
*/
|
||||
async initializeQueues() {
|
||||
for (const [_key, queueName] of Object.entries(QUEUE_NAMES)) {
|
||||
this.queues[queueName] = new Queue(queueName, {
|
||||
connection: redisConnection,
|
||||
defaultJobOptions: {
|
||||
removeOnComplete: 50, // Keep last 50 completed jobs
|
||||
removeOnFail: 20, // Keep last 20 failed jobs
|
||||
attempts: 3, // Retry failed jobs 3 times
|
||||
backoff: {
|
||||
type: "exponential",
|
||||
delay: 2000,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`✅ Queue '${queueName}' initialized`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize automation classes
|
||||
*/
|
||||
async initializeAutomations() {
|
||||
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new GitHubUpdateCheck(
|
||||
this,
|
||||
);
|
||||
this.automations[QUEUE_NAMES.SESSION_CLEANUP] = new SessionCleanup(this);
|
||||
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] =
|
||||
new OrphanedRepoCleanup(this);
|
||||
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] =
|
||||
new OrphanedPackageCleanup(this);
|
||||
|
||||
console.log("✅ All automation classes initialized");
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize all workers
|
||||
*/
|
||||
async initializeWorkers() {
|
||||
// GitHub Update Check Worker
|
||||
this.workers[QUEUE_NAMES.GITHUB_UPDATE_CHECK] = new Worker(
|
||||
QUEUE_NAMES.GITHUB_UPDATE_CHECK,
|
||||
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].process.bind(
|
||||
this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK],
|
||||
),
|
||||
{
|
||||
connection: redisConnection,
|
||||
concurrency: 1,
|
||||
},
|
||||
);
|
||||
|
||||
// Session Cleanup Worker
|
||||
this.workers[QUEUE_NAMES.SESSION_CLEANUP] = new Worker(
|
||||
QUEUE_NAMES.SESSION_CLEANUP,
|
||||
this.automations[QUEUE_NAMES.SESSION_CLEANUP].process.bind(
|
||||
this.automations[QUEUE_NAMES.SESSION_CLEANUP],
|
||||
),
|
||||
{
|
||||
connection: redisConnection,
|
||||
concurrency: 1,
|
||||
},
|
||||
);
|
||||
|
||||
// Orphaned Repo Cleanup Worker
|
||||
this.workers[QUEUE_NAMES.ORPHANED_REPO_CLEANUP] = new Worker(
|
||||
QUEUE_NAMES.ORPHANED_REPO_CLEANUP,
|
||||
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].process.bind(
|
||||
this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP],
|
||||
),
|
||||
{
|
||||
connection: redisConnection,
|
||||
concurrency: 1,
|
||||
},
|
||||
);
|
||||
|
||||
// Orphaned Package Cleanup Worker
|
||||
this.workers[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP] = new Worker(
|
||||
QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP,
|
||||
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].process.bind(
|
||||
this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP],
|
||||
),
|
||||
{
|
||||
connection: redisConnection,
|
||||
concurrency: 1,
|
||||
},
|
||||
);
|
||||
|
||||
// Agent Commands Worker
|
||||
this.workers[QUEUE_NAMES.AGENT_COMMANDS] = new Worker(
|
||||
QUEUE_NAMES.AGENT_COMMANDS,
|
||||
async (job) => {
|
||||
const { api_id, type, update_interval } = job.data || {};
|
||||
console.log("[agent-commands] processing job", job.id, api_id, type);
|
||||
|
||||
// Log job attempt to history - use job.id as the unique identifier
|
||||
const attemptNumber = job.attemptsMade || 1;
|
||||
const historyId = job.id; // Single row per job, updated with each attempt
|
||||
|
||||
try {
|
||||
if (!api_id || !type) {
|
||||
throw new Error("invalid job data");
|
||||
}
|
||||
|
||||
// Find host by api_id
|
||||
const host = await prisma.hosts.findUnique({
|
||||
where: { api_id },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
// Ensure agent is connected; if not, retry later
|
||||
if (!agentWs.isConnected(api_id)) {
|
||||
const error = new Error("agent not connected");
|
||||
// Log failed attempt
|
||||
await prisma.job_history.upsert({
|
||||
where: { id: historyId },
|
||||
create: {
|
||||
id: historyId,
|
||||
job_id: job.id,
|
||||
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
|
||||
job_name: type,
|
||||
host_id: host?.id,
|
||||
api_id,
|
||||
status: "failed",
|
||||
attempt_number: attemptNumber,
|
||||
error_message: error.message,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
},
|
||||
update: {
|
||||
status: "failed",
|
||||
attempt_number: attemptNumber,
|
||||
error_message: error.message,
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
console.log(
|
||||
"[agent-commands] agent not connected, will retry",
|
||||
api_id,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Process the command
|
||||
let result;
|
||||
if (type === "settings_update") {
|
||||
agentWs.pushSettingsUpdate(api_id, update_interval);
|
||||
console.log(
|
||||
"[agent-commands] delivered settings_update",
|
||||
api_id,
|
||||
update_interval,
|
||||
);
|
||||
result = { delivered: true, update_interval };
|
||||
} else if (type === "report_now") {
|
||||
agentWs.pushReportNow(api_id);
|
||||
console.log("[agent-commands] delivered report_now", api_id);
|
||||
result = { delivered: true };
|
||||
} else {
|
||||
throw new Error("unsupported agent command");
|
||||
}
|
||||
|
||||
// Log successful completion
|
||||
await prisma.job_history.upsert({
|
||||
where: { id: historyId },
|
||||
create: {
|
||||
id: historyId,
|
||||
job_id: job.id,
|
||||
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
|
||||
job_name: type,
|
||||
host_id: host?.id,
|
||||
api_id,
|
||||
status: "completed",
|
||||
attempt_number: attemptNumber,
|
||||
output: result,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
completed_at: new Date(),
|
||||
},
|
||||
update: {
|
||||
status: "completed",
|
||||
attempt_number: attemptNumber,
|
||||
output: result,
|
||||
error_message: null,
|
||||
updated_at: new Date(),
|
||||
completed_at: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Log error to history (if not already logged above)
|
||||
if (error.message !== "agent not connected") {
|
||||
const host = await prisma.hosts
|
||||
.findUnique({
|
||||
where: { api_id },
|
||||
select: { id: true },
|
||||
})
|
||||
.catch(() => null);
|
||||
|
||||
await prisma.job_history
|
||||
.upsert({
|
||||
where: { id: historyId },
|
||||
create: {
|
||||
id: historyId,
|
||||
job_id: job.id,
|
||||
queue_name: QUEUE_NAMES.AGENT_COMMANDS,
|
||||
job_name: type || "unknown",
|
||||
host_id: host?.id,
|
||||
api_id,
|
||||
status: "failed",
|
||||
attempt_number: attemptNumber,
|
||||
error_message: error.message,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
},
|
||||
update: {
|
||||
status: "failed",
|
||||
attempt_number: attemptNumber,
|
||||
error_message: error.message,
|
||||
updated_at: new Date(),
|
||||
},
|
||||
})
|
||||
.catch((err) =>
|
||||
console.error("[agent-commands] failed to log error:", err),
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
connection: redisConnection,
|
||||
concurrency: 10,
|
||||
},
|
||||
);
|
||||
|
||||
// Add error handling for all workers
|
||||
Object.values(this.workers).forEach((worker) => {
|
||||
worker.on("error", (error) => {
|
||||
console.error("Worker error:", error);
|
||||
});
|
||||
});
|
||||
|
||||
console.log("✅ All workers initialized");
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup event listeners for all queues
|
||||
*/
|
||||
setupEventListeners() {
|
||||
for (const queueName of Object.values(QUEUE_NAMES)) {
|
||||
const queue = this.queues[queueName];
|
||||
queue.on("error", (error) => {
|
||||
console.error(`❌ Queue '${queueName}' experienced an error:`, error);
|
||||
});
|
||||
queue.on("failed", (job, err) => {
|
||||
console.error(
|
||||
`❌ Job '${job.id}' in queue '${queueName}' failed:`,
|
||||
err,
|
||||
);
|
||||
});
|
||||
queue.on("completed", (job) => {
|
||||
console.log(`✅ Job '${job.id}' in queue '${queueName}' completed.`);
|
||||
});
|
||||
}
|
||||
console.log("✅ Queue events initialized");
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule all recurring jobs
|
||||
*/
|
||||
async scheduleAllJobs() {
|
||||
await this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].schedule();
|
||||
await this.automations[QUEUE_NAMES.SESSION_CLEANUP].schedule();
|
||||
await this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].schedule();
|
||||
await this.automations[QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP].schedule();
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual job triggers
|
||||
*/
|
||||
async triggerGitHubUpdateCheck() {
|
||||
return this.automations[QUEUE_NAMES.GITHUB_UPDATE_CHECK].triggerManual();
|
||||
}
|
||||
|
||||
async triggerSessionCleanup() {
|
||||
return this.automations[QUEUE_NAMES.SESSION_CLEANUP].triggerManual();
|
||||
}
|
||||
|
||||
async triggerOrphanedRepoCleanup() {
|
||||
return this.automations[QUEUE_NAMES.ORPHANED_REPO_CLEANUP].triggerManual();
|
||||
}
|
||||
|
||||
async triggerOrphanedPackageCleanup() {
|
||||
return this.automations[
|
||||
QUEUE_NAMES.ORPHANED_PACKAGE_CLEANUP
|
||||
].triggerManual();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue statistics
|
||||
*/
|
||||
async getQueueStats(queueName) {
|
||||
const queue = this.queues[queueName];
|
||||
if (!queue) {
|
||||
throw new Error(`Queue ${queueName} not found`);
|
||||
}
|
||||
|
||||
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||
queue.getWaiting(),
|
||||
queue.getActive(),
|
||||
queue.getCompleted(),
|
||||
queue.getFailed(),
|
||||
queue.getDelayed(),
|
||||
]);
|
||||
|
||||
return {
|
||||
waiting: waiting.length,
|
||||
active: active.length,
|
||||
completed: completed.length,
|
||||
failed: failed.length,
|
||||
delayed: delayed.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all queue statistics
|
||||
*/
|
||||
async getAllQueueStats() {
|
||||
const stats = {};
|
||||
for (const queueName of Object.values(QUEUE_NAMES)) {
|
||||
stats[queueName] = await this.getQueueStats(queueName);
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent jobs for a queue
|
||||
*/
|
||||
async getRecentJobs(queueName, limit = 10) {
|
||||
const queue = this.queues[queueName];
|
||||
if (!queue) {
|
||||
throw new Error(`Queue ${queueName} not found`);
|
||||
}
|
||||
|
||||
const [completed, failed] = await Promise.all([
|
||||
queue.getCompleted(0, limit - 1),
|
||||
queue.getFailed(0, limit - 1),
|
||||
]);
|
||||
|
||||
return [...completed, ...failed]
|
||||
.sort((a, b) => new Date(b.finishedOn) - new Date(a.finishedOn))
|
||||
.slice(0, limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get jobs for a specific host (by API ID)
|
||||
*/
|
||||
async getHostJobs(apiId, limit = 20) {
|
||||
const queue = this.queues[QUEUE_NAMES.AGENT_COMMANDS];
|
||||
if (!queue) {
|
||||
throw new Error(`Queue ${QUEUE_NAMES.AGENT_COMMANDS} not found`);
|
||||
}
|
||||
|
||||
console.log(`[getHostJobs] Looking for jobs with api_id: ${apiId}`);
|
||||
|
||||
// Get active queue status (waiting, active, delayed, failed)
|
||||
const [waiting, active, delayed, failed] = await Promise.all([
|
||||
queue.getWaiting(),
|
||||
queue.getActive(),
|
||||
queue.getDelayed(),
|
||||
queue.getFailed(),
|
||||
]);
|
||||
|
||||
// Filter by API ID
|
||||
const filterByApiId = (jobs) =>
|
||||
jobs.filter((job) => job.data && job.data.api_id === apiId);
|
||||
|
||||
const waitingCount = filterByApiId(waiting).length;
|
||||
const activeCount = filterByApiId(active).length;
|
||||
const delayedCount = filterByApiId(delayed).length;
|
||||
const failedCount = filterByApiId(failed).length;
|
||||
|
||||
console.log(
|
||||
`[getHostJobs] Queue status - Waiting: ${waitingCount}, Active: ${activeCount}, Delayed: ${delayedCount}, Failed: ${failedCount}`,
|
||||
);
|
||||
|
||||
// Get job history from database (shows all attempts and status changes)
|
||||
const jobHistory = await prisma.job_history.findMany({
|
||||
where: {
|
||||
api_id: apiId,
|
||||
},
|
||||
orderBy: {
|
||||
created_at: "desc",
|
||||
},
|
||||
take: limit,
|
||||
});
|
||||
|
||||
console.log(
|
||||
`[getHostJobs] Found ${jobHistory.length} job history records for api_id: ${apiId}`,
|
||||
);
|
||||
|
||||
return {
|
||||
waiting: waitingCount,
|
||||
active: activeCount,
|
||||
delayed: delayedCount,
|
||||
failed: failedCount,
|
||||
jobHistory: jobHistory.map((job) => ({
|
||||
id: job.id,
|
||||
job_id: job.job_id,
|
||||
job_name: job.job_name,
|
||||
status: job.status,
|
||||
attempt_number: job.attempt_number,
|
||||
error_message: job.error_message,
|
||||
output: job.output,
|
||||
created_at: job.created_at,
|
||||
updated_at: job.updated_at,
|
||||
completed_at: job.completed_at,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Graceful shutdown
|
||||
*/
|
||||
async shutdown() {
|
||||
console.log("🛑 Shutting down queue manager...");
|
||||
|
||||
for (const queueName of Object.keys(this.queues)) {
|
||||
try {
|
||||
await this.queues[queueName].close();
|
||||
} catch (e) {
|
||||
console.warn(
|
||||
`⚠️ Failed to close queue '${queueName}':`,
|
||||
e?.message || e,
|
||||
);
|
||||
}
|
||||
if (this.workers?.[queueName]) {
|
||||
try {
|
||||
await this.workers[queueName].close();
|
||||
} catch (e) {
|
||||
console.warn(
|
||||
`⚠️ Failed to close worker for '${queueName}':`,
|
||||
e?.message || e,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await redis.quit();
|
||||
console.log("✅ Queue manager shutdown complete");
|
||||
}
|
||||
}
|
||||
|
||||
const queueManager = new QueueManager();
|
||||
|
||||
module.exports = { queueManager, QUEUE_NAMES };
|
||||
116
backend/src/services/automation/orphanedPackageCleanup.js
Normal file
116
backend/src/services/automation/orphanedPackageCleanup.js
Normal file
@@ -0,0 +1,116 @@
|
||||
const { prisma } = require("./shared/prisma");
|
||||
|
||||
/**
|
||||
* Orphaned Package Cleanup Automation
|
||||
* Removes packages with no associated hosts
|
||||
*/
|
||||
class OrphanedPackageCleanup {
|
||||
constructor(queueManager) {
|
||||
this.queueManager = queueManager;
|
||||
this.queueName = "orphaned-package-cleanup";
|
||||
}
|
||||
|
||||
/**
|
||||
* Process orphaned package cleanup job
|
||||
*/
|
||||
async process(_job) {
|
||||
const startTime = Date.now();
|
||||
console.log("🧹 Starting orphaned package cleanup...");
|
||||
|
||||
try {
|
||||
// Find packages with 0 hosts
|
||||
const orphanedPackages = await prisma.packages.findMany({
|
||||
where: {
|
||||
host_packages: {
|
||||
none: {},
|
||||
},
|
||||
},
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
host_packages: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let deletedCount = 0;
|
||||
const deletedPackages = [];
|
||||
|
||||
// Delete orphaned packages
|
||||
for (const pkg of orphanedPackages) {
|
||||
try {
|
||||
await prisma.packages.delete({
|
||||
where: { id: pkg.id },
|
||||
});
|
||||
deletedCount++;
|
||||
deletedPackages.push({
|
||||
id: pkg.id,
|
||||
name: pkg.name,
|
||||
description: pkg.description,
|
||||
category: pkg.category,
|
||||
latest_version: pkg.latest_version,
|
||||
});
|
||||
console.log(
|
||||
`🗑️ Deleted orphaned package: ${pkg.name} (${pkg.latest_version})`,
|
||||
);
|
||||
} catch (deleteError) {
|
||||
console.error(
|
||||
`❌ Failed to delete package ${pkg.id}:`,
|
||||
deleteError.message,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.log(
|
||||
`✅ Orphaned package cleanup completed in ${executionTime}ms - Deleted ${deletedCount} packages`,
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
deletedCount,
|
||||
deletedPackages,
|
||||
executionTime,
|
||||
};
|
||||
} catch (error) {
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.error(
|
||||
`❌ Orphaned package cleanup failed after ${executionTime}ms:`,
|
||||
error.message,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule recurring orphaned package cleanup (daily at 3 AM)
|
||||
*/
|
||||
async schedule() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"orphaned-package-cleanup",
|
||||
{},
|
||||
{
|
||||
repeat: { cron: "0 3 * * *" }, // Daily at 3 AM
|
||||
jobId: "orphaned-package-cleanup-recurring",
|
||||
},
|
||||
);
|
||||
console.log("✅ Orphaned package cleanup scheduled");
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual orphaned package cleanup
|
||||
*/
|
||||
async triggerManual() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"orphaned-package-cleanup-manual",
|
||||
{},
|
||||
{ priority: 1 },
|
||||
);
|
||||
console.log("✅ Manual orphaned package cleanup triggered");
|
||||
return job;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = OrphanedPackageCleanup;
|
||||
114
backend/src/services/automation/orphanedRepoCleanup.js
Normal file
114
backend/src/services/automation/orphanedRepoCleanup.js
Normal file
@@ -0,0 +1,114 @@
|
||||
const { prisma } = require("./shared/prisma");
|
||||
|
||||
/**
|
||||
* Orphaned Repository Cleanup Automation
|
||||
* Removes repositories with no associated hosts
|
||||
*/
|
||||
class OrphanedRepoCleanup {
|
||||
constructor(queueManager) {
|
||||
this.queueManager = queueManager;
|
||||
this.queueName = "orphaned-repo-cleanup";
|
||||
}
|
||||
|
||||
/**
|
||||
* Process orphaned repository cleanup job
|
||||
*/
|
||||
async process(_job) {
|
||||
const startTime = Date.now();
|
||||
console.log("🧹 Starting orphaned repository cleanup...");
|
||||
|
||||
try {
|
||||
// Find repositories with 0 hosts
|
||||
const orphanedRepos = await prisma.repositories.findMany({
|
||||
where: {
|
||||
host_repositories: {
|
||||
none: {},
|
||||
},
|
||||
},
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
host_repositories: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let deletedCount = 0;
|
||||
const deletedRepos = [];
|
||||
|
||||
// Delete orphaned repositories
|
||||
for (const repo of orphanedRepos) {
|
||||
try {
|
||||
await prisma.repositories.delete({
|
||||
where: { id: repo.id },
|
||||
});
|
||||
deletedCount++;
|
||||
deletedRepos.push({
|
||||
id: repo.id,
|
||||
name: repo.name,
|
||||
url: repo.url,
|
||||
});
|
||||
console.log(
|
||||
`🗑️ Deleted orphaned repository: ${repo.name} (${repo.url})`,
|
||||
);
|
||||
} catch (deleteError) {
|
||||
console.error(
|
||||
`❌ Failed to delete repository ${repo.id}:`,
|
||||
deleteError.message,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.log(
|
||||
`✅ Orphaned repository cleanup completed in ${executionTime}ms - Deleted ${deletedCount} repositories`,
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
deletedCount,
|
||||
deletedRepos,
|
||||
executionTime,
|
||||
};
|
||||
} catch (error) {
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.error(
|
||||
`❌ Orphaned repository cleanup failed after ${executionTime}ms:`,
|
||||
error.message,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule recurring orphaned repository cleanup (daily at 2 AM)
|
||||
*/
|
||||
async schedule() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"orphaned-repo-cleanup",
|
||||
{},
|
||||
{
|
||||
repeat: { cron: "0 2 * * *" }, // Daily at 2 AM
|
||||
jobId: "orphaned-repo-cleanup-recurring",
|
||||
},
|
||||
);
|
||||
console.log("✅ Orphaned repository cleanup scheduled");
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual orphaned repository cleanup
|
||||
*/
|
||||
async triggerManual() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"orphaned-repo-cleanup-manual",
|
||||
{},
|
||||
{ priority: 1 },
|
||||
);
|
||||
console.log("✅ Manual orphaned repository cleanup triggered");
|
||||
return job;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = OrphanedRepoCleanup;
|
||||
77
backend/src/services/automation/sessionCleanup.js
Normal file
77
backend/src/services/automation/sessionCleanup.js
Normal file
@@ -0,0 +1,77 @@
|
||||
const { prisma } = require("./shared/prisma");
|
||||
|
||||
/**
|
||||
* Session Cleanup Automation
|
||||
* Cleans up expired user sessions
|
||||
*/
|
||||
class SessionCleanup {
|
||||
constructor(queueManager) {
|
||||
this.queueManager = queueManager;
|
||||
this.queueName = "session-cleanup";
|
||||
}
|
||||
|
||||
/**
|
||||
* Process session cleanup job
|
||||
*/
|
||||
async process(_job) {
|
||||
const startTime = Date.now();
|
||||
console.log("🧹 Starting session cleanup...");
|
||||
|
||||
try {
|
||||
const result = await prisma.user_sessions.deleteMany({
|
||||
where: {
|
||||
OR: [{ expires_at: { lt: new Date() } }, { is_revoked: true }],
|
||||
},
|
||||
});
|
||||
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.log(
|
||||
`✅ Session cleanup completed in ${executionTime}ms - Cleaned up ${result.count} expired sessions`,
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
sessionsCleaned: result.count,
|
||||
executionTime,
|
||||
};
|
||||
} catch (error) {
|
||||
const executionTime = Date.now() - startTime;
|
||||
console.error(
|
||||
`❌ Session cleanup failed after ${executionTime}ms:`,
|
||||
error.message,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule recurring session cleanup (every hour)
|
||||
*/
|
||||
async schedule() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"session-cleanup",
|
||||
{},
|
||||
{
|
||||
repeat: { cron: "0 * * * *" }, // Every hour
|
||||
jobId: "session-cleanup-recurring",
|
||||
},
|
||||
);
|
||||
console.log("✅ Session cleanup scheduled");
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger manual session cleanup
|
||||
*/
|
||||
async triggerManual() {
|
||||
const job = await this.queueManager.queues[this.queueName].add(
|
||||
"session-cleanup-manual",
|
||||
{},
|
||||
{ priority: 1 },
|
||||
);
|
||||
console.log("✅ Manual session cleanup triggered");
|
||||
return job;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SessionCleanup;
|
||||
5
backend/src/services/automation/shared/prisma.js
Normal file
5
backend/src/services/automation/shared/prisma.js
Normal file
@@ -0,0 +1,5 @@
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
module.exports = { prisma };
|
||||
16
backend/src/services/automation/shared/redis.js
Normal file
16
backend/src/services/automation/shared/redis.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const IORedis = require("ioredis");
|
||||
|
||||
// Redis connection configuration
|
||||
const redisConnection = {
|
||||
host: process.env.REDIS_HOST || "localhost",
|
||||
port: parseInt(process.env.REDIS_PORT, 10) || 6379,
|
||||
password: process.env.REDIS_PASSWORD || undefined,
|
||||
db: parseInt(process.env.REDIS_DB, 10) || 0,
|
||||
retryDelayOnFailover: 100,
|
||||
maxRetriesPerRequest: null, // BullMQ requires this to be null
|
||||
};
|
||||
|
||||
// Create Redis connection
|
||||
const redis = new IORedis(redisConnection);
|
||||
|
||||
module.exports = { redis, redisConnection };
|
||||
82
backend/src/services/automation/shared/utils.js
Normal file
82
backend/src/services/automation/shared/utils.js
Normal file
@@ -0,0 +1,82 @@
|
||||
// Common utilities for automation jobs
|
||||
|
||||
/**
|
||||
* Compare two semantic versions
|
||||
* @param {string} version1 - First version
|
||||
* @param {string} version2 - Second version
|
||||
* @returns {number} - 1 if version1 > version2, -1 if version1 < version2, 0 if equal
|
||||
*/
|
||||
function compareVersions(version1, version2) {
|
||||
const v1parts = version1.split(".").map(Number);
|
||||
const v2parts = version2.split(".").map(Number);
|
||||
|
||||
const maxLength = Math.max(v1parts.length, v2parts.length);
|
||||
|
||||
for (let i = 0; i < maxLength; i++) {
|
||||
const v1part = v1parts[i] || 0;
|
||||
const v2part = v2parts[i] || 0;
|
||||
|
||||
if (v1part > v2part) return 1;
|
||||
if (v1part < v2part) return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check public GitHub repository for latest release
|
||||
* @param {string} owner - Repository owner
|
||||
* @param {string} repo - Repository name
|
||||
* @returns {Promise<string|null>} - Latest version or null
|
||||
*/
|
||||
async function checkPublicRepo(owner, repo) {
|
||||
try {
|
||||
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
||||
|
||||
let currentVersion = "1.2.7"; // fallback
|
||||
try {
|
||||
const packageJson = require("../../../package.json");
|
||||
if (packageJson?.version) {
|
||||
currentVersion = packageJson.version;
|
||||
}
|
||||
} catch (packageError) {
|
||||
console.warn(
|
||||
"Could not read version from package.json for User-Agent, using fallback:",
|
||||
packageError.message,
|
||||
);
|
||||
}
|
||||
|
||||
const response = await fetch(httpsRepoUrl, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Accept: "application/vnd.github.v3+json",
|
||||
"User-Agent": `PatchMon-Server/${currentVersion}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
if (
|
||||
errorText.includes("rate limit") ||
|
||||
errorText.includes("API rate limit")
|
||||
) {
|
||||
console.log("⚠️ GitHub API rate limit exceeded, skipping update check");
|
||||
return null;
|
||||
}
|
||||
throw new Error(
|
||||
`GitHub API error: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const releaseData = await response.json();
|
||||
return releaseData.tag_name.replace("v", "");
|
||||
} catch (error) {
|
||||
console.error("GitHub API error:", error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
compareVersions,
|
||||
checkPublicRepo,
|
||||
};
|
||||
@@ -1,5 +1,5 @@
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
const { v4: uuidv4 } = require("uuid");
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
@@ -8,15 +8,15 @@ let cachedSettings = null;
|
||||
|
||||
// Environment variable to settings field mapping
|
||||
const ENV_TO_SETTINGS_MAP = {
|
||||
'SERVER_PROTOCOL': 'server_protocol',
|
||||
'SERVER_HOST': 'server_host',
|
||||
'SERVER_PORT': 'server_port',
|
||||
SERVER_PROTOCOL: "server_protocol",
|
||||
SERVER_HOST: "server_host",
|
||||
SERVER_PORT: "server_port",
|
||||
};
|
||||
|
||||
// Helper function to construct server URL without default ports
|
||||
function constructServerUrl(protocol, host, port) {
|
||||
const isHttps = protocol.toLowerCase() === 'https';
|
||||
const isHttp = protocol.toLowerCase() === 'http';
|
||||
const isHttps = protocol.toLowerCase() === "https";
|
||||
const isHttp = protocol.toLowerCase() === "http";
|
||||
|
||||
// Don't append port if it's the default port for the protocol
|
||||
if ((isHttps && port === 443) || (isHttp && port === 80)) {
|
||||
@@ -28,8 +28,8 @@ function constructServerUrl(protocol, host, port) {
|
||||
|
||||
// Create settings from environment variables and/or defaults
|
||||
async function createSettingsFromEnvironment() {
|
||||
const protocol = process.env.SERVER_PROTOCOL || 'http';
|
||||
const host = process.env.SERVER_HOST || 'localhost';
|
||||
const protocol = process.env.SERVER_PROTOCOL || "http";
|
||||
const host = process.env.SERVER_HOST || "localhost";
|
||||
const port = parseInt(process.env.SERVER_PORT, 10) || 3001;
|
||||
const serverUrl = constructServerUrl(protocol, host, port);
|
||||
|
||||
@@ -43,11 +43,12 @@ async function createSettingsFromEnvironment() {
|
||||
update_interval: 60,
|
||||
auto_update: false,
|
||||
signup_enabled: false,
|
||||
updated_at: new Date()
|
||||
}
|
||||
ignore_ssl_self_signed: false,
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
console.log('Created settings');
|
||||
console.log("Created settings");
|
||||
return settings;
|
||||
}
|
||||
|
||||
@@ -64,7 +65,7 @@ async function syncEnvironmentToSettings(currentSettings) {
|
||||
|
||||
// Convert environment value to appropriate type
|
||||
let convertedValue = envValue;
|
||||
if (settingsField === 'server_port') {
|
||||
if (settingsField === "server_port") {
|
||||
convertedValue = parseInt(envValue, 10);
|
||||
}
|
||||
|
||||
@@ -72,7 +73,11 @@ async function syncEnvironmentToSettings(currentSettings) {
|
||||
if (currentValue !== convertedValue) {
|
||||
updates[settingsField] = convertedValue;
|
||||
hasChanges = true;
|
||||
console.log(`Environment variable ${envVar} (${envValue}) differs from settings ${settingsField} (${currentValue}), updating...`);
|
||||
if (process.env.ENABLE_LOGGING === "true") {
|
||||
console.log(
|
||||
`Environment variable ${envVar} (${envValue}) differs from settings ${settingsField} (${currentValue}), updating...`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -87,8 +92,10 @@ async function syncEnvironmentToSettings(currentSettings) {
|
||||
if (currentSettings.server_url !== constructedServerUrl) {
|
||||
updates.server_url = constructedServerUrl;
|
||||
hasChanges = true;
|
||||
if (process.env.ENABLE_LOGGING === "true") {
|
||||
console.log(`Updating server_url to: ${constructedServerUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Update settings if there are changes
|
||||
if (hasChanges) {
|
||||
@@ -96,10 +103,14 @@ async function syncEnvironmentToSettings(currentSettings) {
|
||||
where: { id: currentSettings.id },
|
||||
data: {
|
||||
...updates,
|
||||
updated_at: new Date()
|
||||
}
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
console.log(`Synced ${Object.keys(updates).length} environment variables to settings`);
|
||||
if (process.env.ENABLE_LOGGING === "true") {
|
||||
console.log(
|
||||
`Synced ${Object.keys(updates).length} environment variables to settings`,
|
||||
);
|
||||
}
|
||||
return updatedSettings;
|
||||
}
|
||||
|
||||
@@ -114,7 +125,7 @@ async function initSettings() {
|
||||
|
||||
try {
|
||||
let settings = await prisma.settings.findFirst({
|
||||
orderBy: { updated_at: 'desc' }
|
||||
orderBy: { updated_at: "desc" },
|
||||
});
|
||||
|
||||
if (!settings) {
|
||||
@@ -129,14 +140,14 @@ async function initSettings() {
|
||||
cachedSettings = settings;
|
||||
return settings;
|
||||
} catch (error) {
|
||||
console.error('Failed to initialise settings:', error);
|
||||
console.error("Failed to initialise settings:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Get current settings (returns cached if available)
|
||||
async function getSettings() {
|
||||
return cachedSettings || await initSettings();
|
||||
return cachedSettings || (await initSettings());
|
||||
}
|
||||
|
||||
// Update settings and refresh cache
|
||||
@@ -146,17 +157,21 @@ async function updateSettings(id, updateData) {
|
||||
where: { id },
|
||||
data: {
|
||||
...updateData,
|
||||
updated_at: new Date()
|
||||
}
|
||||
updated_at: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
// Reconstruct server_url from components
|
||||
const serverUrl = constructServerUrl(updatedSettings.server_protocol, updatedSettings.server_host, updatedSettings.server_port);
|
||||
const serverUrl = constructServerUrl(
|
||||
updatedSettings.server_protocol,
|
||||
updatedSettings.server_host,
|
||||
updatedSettings.server_port,
|
||||
);
|
||||
if (updatedSettings.server_url !== serverUrl) {
|
||||
updatedSettings.server_url = serverUrl;
|
||||
await prisma.settings.update({
|
||||
where: { id },
|
||||
data: { server_url: serverUrl }
|
||||
data: { server_url: serverUrl },
|
||||
});
|
||||
}
|
||||
|
||||
@@ -164,7 +179,7 @@ async function updateSettings(id, updateData) {
|
||||
cachedSettings = updatedSettings;
|
||||
return updatedSettings;
|
||||
} catch (error) {
|
||||
console.error('Failed to update settings:', error);
|
||||
console.error("Failed to update settings:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -179,5 +194,5 @@ module.exports = {
|
||||
getSettings,
|
||||
updateSettings,
|
||||
invalidateCache,
|
||||
syncEnvironmentToSettings // Export for startup use
|
||||
syncEnvironmentToSettings, // Export for startup use
|
||||
};
|
||||
|
||||
@@ -1,267 +0,0 @@
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const { exec } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
class UpdateScheduler {
|
||||
constructor() {
|
||||
this.isRunning = false;
|
||||
this.intervalId = null;
|
||||
this.checkInterval = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
|
||||
}
|
||||
|
||||
// Start the scheduler
|
||||
start() {
|
||||
if (this.isRunning) {
|
||||
console.log('Update scheduler is already running');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('🔄 Starting update scheduler...');
|
||||
this.isRunning = true;
|
||||
|
||||
// Run initial check
|
||||
this.checkForUpdates();
|
||||
|
||||
// Schedule regular checks
|
||||
this.intervalId = setInterval(() => {
|
||||
this.checkForUpdates();
|
||||
}, this.checkInterval);
|
||||
|
||||
console.log(`✅ Update scheduler started - checking every ${this.checkInterval / (60 * 60 * 1000)} hours`);
|
||||
}
|
||||
|
||||
// Stop the scheduler
|
||||
stop() {
|
||||
if (!this.isRunning) {
|
||||
console.log('Update scheduler is not running');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('🛑 Stopping update scheduler...');
|
||||
this.isRunning = false;
|
||||
|
||||
if (this.intervalId) {
|
||||
clearInterval(this.intervalId);
|
||||
this.intervalId = null;
|
||||
}
|
||||
|
||||
console.log('✅ Update scheduler stopped');
|
||||
}
|
||||
|
||||
// Check for updates
|
||||
async checkForUpdates() {
|
||||
try {
|
||||
console.log('🔍 Checking for updates...');
|
||||
|
||||
// Get settings
|
||||
const settings = await prisma.settings.findFirst();
|
||||
if (!settings || !settings.githubRepoUrl) {
|
||||
console.log('⚠️ No GitHub repository configured, skipping update check');
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract owner and repo from GitHub URL
|
||||
const repoUrl = settings.githubRepoUrl;
|
||||
let owner, repo;
|
||||
|
||||
if (repoUrl.includes('git@github.com:')) {
|
||||
const match = repoUrl.match(/git@github\.com:([^\/]+)\/([^\/]+)\.git/);
|
||||
if (match) {
|
||||
[, owner, repo] = match;
|
||||
}
|
||||
} else if (repoUrl.includes('github.com/')) {
|
||||
const match = repoUrl.match(/github\.com\/([^\/]+)\/([^\/]+?)(?:\.git)?$/);
|
||||
if (match) {
|
||||
[, owner, repo] = match;
|
||||
}
|
||||
}
|
||||
|
||||
if (!owner || !repo) {
|
||||
console.log('⚠️ Could not parse GitHub repository URL, skipping update check');
|
||||
return;
|
||||
}
|
||||
|
||||
let latestVersion;
|
||||
const isPrivate = settings.repositoryType === 'private';
|
||||
|
||||
if (isPrivate) {
|
||||
// Use SSH for private repositories
|
||||
latestVersion = await this.checkPrivateRepo(settings, owner, repo);
|
||||
} else {
|
||||
// Use GitHub API for public repositories
|
||||
latestVersion = await this.checkPublicRepo(owner, repo);
|
||||
}
|
||||
|
||||
if (!latestVersion) {
|
||||
console.log('⚠️ Could not determine latest version, skipping update check');
|
||||
return;
|
||||
}
|
||||
|
||||
// Read version from package.json dynamically
|
||||
let currentVersion = '1.2.6'; // fallback
|
||||
try {
|
||||
const packageJson = require('../../package.json');
|
||||
if (packageJson && packageJson.version) {
|
||||
currentVersion = packageJson.version;
|
||||
}
|
||||
} catch (packageError) {
|
||||
console.warn('Could not read version from package.json, using fallback:', packageError.message);
|
||||
}
|
||||
const isUpdateAvailable = this.compareVersions(latestVersion, currentVersion) > 0;
|
||||
|
||||
// Update settings with check results
|
||||
await prisma.settings.update({
|
||||
where: { id: settings.id },
|
||||
data: {
|
||||
lastUpdateCheck: new Date(),
|
||||
updateAvailable: isUpdateAvailable,
|
||||
latestVersion: latestVersion
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`✅ Update check completed - Current: ${currentVersion}, Latest: ${latestVersion}, Update Available: ${isUpdateAvailable}`);
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error checking for updates:', error.message);
|
||||
|
||||
// Update last check time even on error
|
||||
try {
|
||||
const settings = await prisma.settings.findFirst();
|
||||
if (settings) {
|
||||
await prisma.settings.update({
|
||||
where: { id: settings.id },
|
||||
data: {
|
||||
lastUpdateCheck: new Date(),
|
||||
updateAvailable: false
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (updateError) {
|
||||
console.error('❌ Error updating last check time:', updateError.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check private repository using SSH
|
||||
async checkPrivateRepo(settings, owner, repo) {
|
||||
try {
|
||||
let sshKeyPath = settings.sshKeyPath;
|
||||
|
||||
// Try to find SSH key if not configured
|
||||
if (!sshKeyPath) {
|
||||
const possibleKeyPaths = [
|
||||
'/root/.ssh/id_ed25519',
|
||||
'/root/.ssh/id_rsa',
|
||||
'/home/patchmon/.ssh/id_ed25519',
|
||||
'/home/patchmon/.ssh/id_rsa',
|
||||
'/var/www/.ssh/id_ed25519',
|
||||
'/var/www/.ssh/id_rsa'
|
||||
];
|
||||
|
||||
for (const path of possibleKeyPaths) {
|
||||
try {
|
||||
require('fs').accessSync(path);
|
||||
sshKeyPath = path;
|
||||
break;
|
||||
} catch (e) {
|
||||
// Key not found at this path, try next
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!sshKeyPath) {
|
||||
throw new Error('No SSH deploy key found');
|
||||
}
|
||||
|
||||
const sshRepoUrl = `git@github.com:${owner}/${repo}.git`;
|
||||
const env = {
|
||||
...process.env,
|
||||
GIT_SSH_COMMAND: `ssh -i ${sshKeyPath} -o StrictHostKeyChecking=no -o IdentitiesOnly=yes`
|
||||
};
|
||||
|
||||
const { stdout: sshLatestTag } = await execAsync(
|
||||
`git ls-remote --tags --sort=-version:refname ${sshRepoUrl} | head -n 1 | sed 's/.*refs\\/tags\\///' | sed 's/\\^{}//'`,
|
||||
{
|
||||
timeout: 10000,
|
||||
env: env
|
||||
}
|
||||
);
|
||||
|
||||
return sshLatestTag.trim().replace('v', '');
|
||||
} catch (error) {
|
||||
console.error('SSH Git error:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Check public repository using GitHub API
|
||||
async checkPublicRepo(owner, repo) {
|
||||
try {
|
||||
const httpsRepoUrl = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
|
||||
|
||||
// Get current version for User-Agent
|
||||
let currentVersion = '1.2.6'; // fallback
|
||||
try {
|
||||
const packageJson = require('../../package.json');
|
||||
if (packageJson && packageJson.version) {
|
||||
currentVersion = packageJson.version;
|
||||
}
|
||||
} catch (packageError) {
|
||||
console.warn('Could not read version from package.json for User-Agent, using fallback:', packageError.message);
|
||||
}
|
||||
|
||||
const response = await fetch(httpsRepoUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/vnd.github.v3+json',
|
||||
'User-Agent': `PatchMon-Server/${currentVersion}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const releaseData = await response.json();
|
||||
return releaseData.tag_name.replace('v', '');
|
||||
} catch (error) {
|
||||
console.error('GitHub API error:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Compare version strings (semantic versioning)
|
||||
compareVersions(version1, version2) {
|
||||
const v1parts = version1.split('.').map(Number);
|
||||
const v2parts = version2.split('.').map(Number);
|
||||
|
||||
const maxLength = Math.max(v1parts.length, v2parts.length);
|
||||
|
||||
for (let i = 0; i < maxLength; i++) {
|
||||
const v1part = v1parts[i] || 0;
|
||||
const v2part = v2parts[i] || 0;
|
||||
|
||||
if (v1part > v2part) return 1;
|
||||
if (v1part < v2part) return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Get scheduler status
|
||||
getStatus() {
|
||||
return {
|
||||
isRunning: this.isRunning,
|
||||
checkInterval: this.checkInterval,
|
||||
nextCheck: this.isRunning ? new Date(Date.now() + this.checkInterval) : null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
const updateScheduler = new UpdateScheduler();
|
||||
|
||||
module.exports = updateScheduler;
|
||||
499
backend/src/utils/session_manager.js
Normal file
499
backend/src/utils/session_manager.js
Normal file
@@ -0,0 +1,499 @@
|
||||
const jwt = require("jsonwebtoken");
|
||||
const crypto = require("node:crypto");
|
||||
const { PrismaClient } = require("@prisma/client");
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
/**
|
||||
* Session Manager - Handles secure session management with inactivity timeout
|
||||
*/
|
||||
|
||||
// Configuration
|
||||
if (!process.env.JWT_SECRET) {
|
||||
throw new Error("JWT_SECRET environment variable is required");
|
||||
}
|
||||
const JWT_SECRET = process.env.JWT_SECRET;
|
||||
const JWT_EXPIRES_IN = process.env.JWT_EXPIRES_IN || "1h";
|
||||
const JWT_REFRESH_EXPIRES_IN = process.env.JWT_REFRESH_EXPIRES_IN || "7d";
|
||||
const TFA_REMEMBER_ME_EXPIRES_IN =
|
||||
process.env.TFA_REMEMBER_ME_EXPIRES_IN || "30d";
|
||||
const TFA_MAX_REMEMBER_SESSIONS = parseInt(
|
||||
process.env.TFA_MAX_REMEMBER_SESSIONS || "5",
|
||||
10,
|
||||
);
|
||||
const TFA_SUSPICIOUS_ACTIVITY_THRESHOLD = parseInt(
|
||||
process.env.TFA_SUSPICIOUS_ACTIVITY_THRESHOLD || "3",
|
||||
10,
|
||||
);
|
||||
const INACTIVITY_TIMEOUT_MINUTES = parseInt(
|
||||
process.env.SESSION_INACTIVITY_TIMEOUT_MINUTES || "30",
|
||||
10,
|
||||
);
|
||||
|
||||
/**
|
||||
* Generate access token (short-lived)
|
||||
*/
|
||||
function generate_access_token(user_id, session_id) {
|
||||
return jwt.sign({ userId: user_id, sessionId: session_id }, JWT_SECRET, {
|
||||
expiresIn: JWT_EXPIRES_IN,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate refresh token (long-lived)
|
||||
*/
|
||||
function generate_refresh_token() {
|
||||
return crypto.randomBytes(64).toString("hex");
|
||||
}
|
||||
|
||||
/**
|
||||
* Hash token for storage
|
||||
*/
|
||||
function hash_token(token) {
|
||||
return crypto.createHash("sha256").update(token).digest("hex");
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse expiration string to Date
|
||||
*/
|
||||
function parse_expiration(expiration_string) {
|
||||
const match = expiration_string.match(/^(\d+)([smhd])$/);
|
||||
if (!match) {
|
||||
throw new Error("Invalid expiration format");
|
||||
}
|
||||
|
||||
const value = parseInt(match[1], 10);
|
||||
const unit = match[2];
|
||||
|
||||
const now = new Date();
|
||||
switch (unit) {
|
||||
case "s":
|
||||
return new Date(now.getTime() + value * 1000);
|
||||
case "m":
|
||||
return new Date(now.getTime() + value * 60 * 1000);
|
||||
case "h":
|
||||
return new Date(now.getTime() + value * 60 * 60 * 1000);
|
||||
case "d":
|
||||
return new Date(now.getTime() + value * 24 * 60 * 60 * 1000);
|
||||
default:
|
||||
throw new Error("Invalid time unit");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate device fingerprint from request data
|
||||
*/
|
||||
function generate_device_fingerprint(req) {
|
||||
const components = [
|
||||
req.get("user-agent") || "",
|
||||
req.get("accept-language") || "",
|
||||
req.get("accept-encoding") || "",
|
||||
req.ip || "",
|
||||
];
|
||||
|
||||
// Create a simple hash of device characteristics
|
||||
const fingerprint = crypto
|
||||
.createHash("sha256")
|
||||
.update(components.join("|"))
|
||||
.digest("hex")
|
||||
.substring(0, 32); // Use first 32 chars for storage efficiency
|
||||
|
||||
return fingerprint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for suspicious activity patterns
|
||||
*/
|
||||
async function check_suspicious_activity(
|
||||
user_id,
|
||||
_ip_address,
|
||||
_device_fingerprint,
|
||||
) {
|
||||
try {
|
||||
// Check for multiple sessions from different IPs in short time
|
||||
const recent_sessions = await prisma.user_sessions.findMany({
|
||||
where: {
|
||||
user_id: user_id,
|
||||
created_at: {
|
||||
gte: new Date(Date.now() - 24 * 60 * 60 * 1000), // Last 24 hours
|
||||
},
|
||||
is_revoked: false,
|
||||
},
|
||||
select: {
|
||||
ip_address: true,
|
||||
device_fingerprint: true,
|
||||
created_at: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Count unique IPs and devices
|
||||
const unique_ips = new Set(recent_sessions.map((s) => s.ip_address));
|
||||
const unique_devices = new Set(
|
||||
recent_sessions.map((s) => s.device_fingerprint),
|
||||
);
|
||||
|
||||
// Flag as suspicious if more than threshold different IPs or devices in 24h
|
||||
if (
|
||||
unique_ips.size > TFA_SUSPICIOUS_ACTIVITY_THRESHOLD ||
|
||||
unique_devices.size > TFA_SUSPICIOUS_ACTIVITY_THRESHOLD
|
||||
) {
|
||||
console.warn(
|
||||
`Suspicious activity detected for user ${user_id}: ${unique_ips.size} IPs, ${unique_devices.size} devices`,
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
} catch (error) {
|
||||
console.error("Error checking suspicious activity:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new session for user
|
||||
*/
|
||||
async function create_session(
|
||||
user_id,
|
||||
ip_address,
|
||||
user_agent,
|
||||
remember_me = false,
|
||||
req = null,
|
||||
) {
|
||||
try {
|
||||
const session_id = crypto.randomUUID();
|
||||
const refresh_token = generate_refresh_token();
|
||||
const access_token = generate_access_token(user_id, session_id);
|
||||
|
||||
// Generate device fingerprint if request is available
|
||||
const device_fingerprint = req ? generate_device_fingerprint(req) : null;
|
||||
|
||||
// Check for suspicious activity
|
||||
if (device_fingerprint) {
|
||||
const is_suspicious = await check_suspicious_activity(
|
||||
user_id,
|
||||
ip_address,
|
||||
device_fingerprint,
|
||||
);
|
||||
if (is_suspicious) {
|
||||
console.warn(
|
||||
`Suspicious activity detected for user ${user_id}, session creation may be restricted`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check session limits for remember me
|
||||
if (remember_me) {
|
||||
const existing_remember_sessions = await prisma.user_sessions.count({
|
||||
where: {
|
||||
user_id: user_id,
|
||||
tfa_remember_me: true,
|
||||
is_revoked: false,
|
||||
expires_at: { gt: new Date() },
|
||||
},
|
||||
});
|
||||
|
||||
// Limit remember me sessions per user
|
||||
if (existing_remember_sessions >= TFA_MAX_REMEMBER_SESSIONS) {
|
||||
throw new Error(
|
||||
"Maximum number of remembered devices reached. Please revoke an existing session first.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Use longer expiration for remember me sessions
|
||||
const expires_at = remember_me
|
||||
? parse_expiration(TFA_REMEMBER_ME_EXPIRES_IN)
|
||||
: parse_expiration(JWT_REFRESH_EXPIRES_IN);
|
||||
|
||||
// Calculate TFA bypass until date for remember me sessions
|
||||
const tfa_bypass_until = remember_me
|
||||
? parse_expiration(TFA_REMEMBER_ME_EXPIRES_IN)
|
||||
: null;
|
||||
|
||||
// Store session in database
|
||||
await prisma.user_sessions.create({
|
||||
data: {
|
||||
id: session_id,
|
||||
user_id: user_id,
|
||||
refresh_token: hash_token(refresh_token),
|
||||
access_token_hash: hash_token(access_token),
|
||||
ip_address: ip_address || null,
|
||||
user_agent: user_agent || null,
|
||||
device_fingerprint: device_fingerprint,
|
||||
last_login_ip: ip_address || null,
|
||||
last_activity: new Date(),
|
||||
expires_at: expires_at,
|
||||
tfa_remember_me: remember_me,
|
||||
tfa_bypass_until: tfa_bypass_until,
|
||||
login_count: 1,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
session_id,
|
||||
access_token,
|
||||
refresh_token,
|
||||
expires_at,
|
||||
tfa_bypass_until,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error creating session:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate session and check for inactivity timeout
|
||||
*/
|
||||
async function validate_session(session_id, access_token) {
|
||||
try {
|
||||
const session = await prisma.user_sessions.findUnique({
|
||||
where: { id: session_id },
|
||||
include: { users: true },
|
||||
});
|
||||
|
||||
if (!session) {
|
||||
return { valid: false, reason: "Session not found" };
|
||||
}
|
||||
|
||||
// Check if session is revoked
|
||||
if (session.is_revoked) {
|
||||
return { valid: false, reason: "Session revoked" };
|
||||
}
|
||||
|
||||
// Check if session has expired
|
||||
if (new Date() > session.expires_at) {
|
||||
await revoke_session(session_id);
|
||||
return { valid: false, reason: "Session expired" };
|
||||
}
|
||||
|
||||
// Check for inactivity timeout
|
||||
const inactivity_threshold = new Date(
|
||||
Date.now() - INACTIVITY_TIMEOUT_MINUTES * 60 * 1000,
|
||||
);
|
||||
if (session.last_activity < inactivity_threshold) {
|
||||
await revoke_session(session_id);
|
||||
return {
|
||||
valid: false,
|
||||
reason: "Session inactive",
|
||||
message: `Session timed out after ${INACTIVITY_TIMEOUT_MINUTES} minutes of inactivity`,
|
||||
};
|
||||
}
|
||||
|
||||
// Validate access token hash (optional security check)
|
||||
if (session.access_token_hash) {
|
||||
const provided_hash = hash_token(access_token);
|
||||
if (session.access_token_hash !== provided_hash) {
|
||||
return { valid: false, reason: "Token mismatch" };
|
||||
}
|
||||
}
|
||||
|
||||
// Check if user is still active
|
||||
if (!session.users.is_active) {
|
||||
await revoke_session(session_id);
|
||||
return { valid: false, reason: "User inactive" };
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
session,
|
||||
user: session.users,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error validating session:", error);
|
||||
return { valid: false, reason: "Validation error" };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update session activity timestamp
|
||||
*/
|
||||
async function update_session_activity(session_id) {
|
||||
try {
|
||||
await prisma.user_sessions.update({
|
||||
where: { id: session_id },
|
||||
data: { last_activity: new Date() },
|
||||
});
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("Error updating session activity:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh access token using refresh token
|
||||
*/
|
||||
async function refresh_access_token(refresh_token) {
|
||||
try {
|
||||
const hashed_token = hash_token(refresh_token);
|
||||
|
||||
const session = await prisma.user_sessions.findUnique({
|
||||
where: { refresh_token: hashed_token },
|
||||
include: { users: true },
|
||||
});
|
||||
|
||||
if (!session) {
|
||||
return { success: false, error: "Invalid refresh token" };
|
||||
}
|
||||
|
||||
// Validate session
|
||||
const validation = await validate_session(session.id, "");
|
||||
if (!validation.valid) {
|
||||
return { success: false, error: validation.reason };
|
||||
}
|
||||
|
||||
// Generate new access token
|
||||
const new_access_token = generate_access_token(session.user_id, session.id);
|
||||
|
||||
// Update access token hash
|
||||
await prisma.user_sessions.update({
|
||||
where: { id: session.id },
|
||||
data: {
|
||||
access_token_hash: hash_token(new_access_token),
|
||||
last_activity: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
access_token: new_access_token,
|
||||
user: session.users,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error refreshing access token:", error);
|
||||
return { success: false, error: "Token refresh failed" };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke a session
|
||||
*/
|
||||
async function revoke_session(session_id) {
|
||||
try {
|
||||
await prisma.user_sessions.update({
|
||||
where: { id: session_id },
|
||||
data: { is_revoked: true },
|
||||
});
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("Error revoking session:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke all sessions for a user
|
||||
*/
|
||||
async function revoke_all_user_sessions(user_id) {
|
||||
try {
|
||||
await prisma.user_sessions.updateMany({
|
||||
where: { user_id: user_id },
|
||||
data: { is_revoked: true },
|
||||
});
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("Error revoking user sessions:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up expired sessions (should be run periodically)
|
||||
*/
|
||||
async function cleanup_expired_sessions() {
|
||||
try {
|
||||
const result = await prisma.user_sessions.deleteMany({
|
||||
where: {
|
||||
OR: [{ expires_at: { lt: new Date() } }, { is_revoked: true }],
|
||||
},
|
||||
});
|
||||
console.log(`Cleaned up ${result.count} expired sessions`);
|
||||
return result.count;
|
||||
} catch (error) {
|
||||
console.error("Error cleaning up sessions:", error);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get active sessions for a user
|
||||
*/
|
||||
async function get_user_sessions(user_id) {
|
||||
try {
|
||||
return await prisma.user_sessions.findMany({
|
||||
where: {
|
||||
user_id: user_id,
|
||||
is_revoked: false,
|
||||
expires_at: { gt: new Date() },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
ip_address: true,
|
||||
user_agent: true,
|
||||
last_activity: true,
|
||||
created_at: true,
|
||||
expires_at: true,
|
||||
tfa_remember_me: true,
|
||||
tfa_bypass_until: true,
|
||||
},
|
||||
orderBy: { last_activity: "desc" },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error getting user sessions:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if TFA is bypassed for a session
|
||||
*/
|
||||
async function is_tfa_bypassed(session_id) {
|
||||
try {
|
||||
const session = await prisma.user_sessions.findUnique({
|
||||
where: { id: session_id },
|
||||
select: {
|
||||
tfa_remember_me: true,
|
||||
tfa_bypass_until: true,
|
||||
is_revoked: true,
|
||||
expires_at: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!session) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if session is still valid
|
||||
if (session.is_revoked || new Date() > session.expires_at) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if TFA is bypassed and still within bypass period
|
||||
if (session.tfa_remember_me && session.tfa_bypass_until) {
|
||||
return new Date() < session.tfa_bypass_until;
|
||||
}
|
||||
|
||||
return false;
|
||||
} catch (error) {
|
||||
console.error("Error checking TFA bypass:", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
create_session,
|
||||
validate_session,
|
||||
update_session_activity,
|
||||
refresh_access_token,
|
||||
revoke_session,
|
||||
revoke_all_user_sessions,
|
||||
cleanup_expired_sessions,
|
||||
get_user_sessions,
|
||||
is_tfa_bypassed,
|
||||
generate_device_fingerprint,
|
||||
check_suspicious_activity,
|
||||
generate_access_token,
|
||||
INACTIVITY_TIMEOUT_MINUTES,
|
||||
};
|
||||
17
biome.json
Normal file
17
biome.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.4/schema.json",
|
||||
"vcs": {
|
||||
"enabled": true,
|
||||
"clientKind": "git",
|
||||
"useIgnoreFile": true
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true
|
||||
},
|
||||
"assist": {
|
||||
"enabled": true
|
||||
}
|
||||
}
|
||||
BIN
dashboard.jpeg
Normal file
BIN
dashboard.jpeg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 104 KiB |
304
docker/README.md
304
docker/README.md
@@ -2,33 +2,108 @@
|
||||
|
||||
## Overview
|
||||
|
||||
PatchMon is a containerised application that monitors system patches and updates. The application consists of three main services:
|
||||
PatchMon is a containerised application that monitors system patches and updates. The application consists of four main services:
|
||||
|
||||
- **Database**: PostgreSQL 17
|
||||
- **Redis**: Redis 7 for BullMQ job queues and caching
|
||||
- **Backend**: Node.js API server
|
||||
- **Frontend**: React application served via Nginx
|
||||
- **Frontend**: React application served via NGINX
|
||||
|
||||
## Images
|
||||
|
||||
- **Backend**: `ghcr.io/9technologygroup/patchmon-backend:latest`
|
||||
- **Frontend**: `ghcr.io/9technologygroup/patchmon-frontend:latest`
|
||||
- **Backend**: [ghcr.io/patchmon/patchmon-backend](https://github.com/patchmon/patchmon.net/pkgs/container/patchmon-backend)
|
||||
- **Frontend**: [ghcr.io/patchmon/patchmon-frontend](https://github.com/patchmon/patchmon.net/pkgs/container/patchmon-frontend)
|
||||
|
||||
Version tags are also available (e.g. `1.2.3`) for both of these images.
|
||||
### Tags
|
||||
|
||||
- `latest`: The latest stable release of PatchMon
|
||||
- `x.y.z`: Full version tags (e.g. `1.2.3`) - Use this for exact version pinning.
|
||||
- `x.y`: Minor version tags (e.g. `1.2`) - Use this to get the latest patch release in a minor version series.
|
||||
- `x`: Major version tags (e.g. `1`) - Use this to get the latest minor and patch release in a major version series.
|
||||
- `edge`: The latest development build with the most recent features and fixes. This tag may often be unstable and is intended only for testing and development purposes.
|
||||
|
||||
These tags are available for both backend and frontend images as they are versioned together.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Production Deployment
|
||||
|
||||
1. Download the [Docker Compose file](docker-compose.yml)
|
||||
|
||||
2. Configure environment variables (see [Configuration](#configuration) section)
|
||||
|
||||
3. Start the application:
|
||||
2. Set a database password in the file where it says:
|
||||
```yaml
|
||||
environment:
|
||||
POSTGRES_PASSWORD: # CREATE A STRONG PASSWORD AND PUT IT HERE
|
||||
```
|
||||
3. Update the corresponding `DATABASE_URL` with your password in the backend service where it says:
|
||||
```yaml
|
||||
environment:
|
||||
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
|
||||
```
|
||||
4. Set a Redis password in the Redis service where it says:
|
||||
```yaml
|
||||
environment:
|
||||
REDIS_PASSWORD: # CREATE A STRONG REDIS PASSWORD AND PUT IT HERE
|
||||
```
|
||||
5. Update the corresponding `REDIS_PASSWORD` in the backend service where it says:
|
||||
```yaml
|
||||
environment:
|
||||
REDIS_PASSWORD: REPLACE_YOUR_REDIS_PASSWORD_HERE
|
||||
```
|
||||
6. Generate a strong JWT secret. You can do this like so:
|
||||
```bash
|
||||
openssl rand -hex 64
|
||||
```
|
||||
7. Set a JWT secret in the backend service where it says:
|
||||
```yaml
|
||||
environment:
|
||||
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE
|
||||
```
|
||||
8. Configure environment variables (see [Configuration](#configuration) section)
|
||||
9. Start the application:
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
10. Access the application at `http://localhost:3000`
|
||||
|
||||
4. Access the application at `http://localhost:3000`
|
||||
## Updating
|
||||
|
||||
By default, the compose file uses the `latest` tag for both backend and frontend images.
|
||||
|
||||
This means you can update PatchMon to the latest version as easily as:
|
||||
|
||||
```bash
|
||||
docker compose up -d --pull
|
||||
```
|
||||
|
||||
This command will:
|
||||
- Pull the latest images from the registry
|
||||
- Recreate containers with updated images
|
||||
- Maintain your data and configuration
|
||||
|
||||
### Version-Specific Updates
|
||||
|
||||
If you'd like to pin your Docker deployment of PatchMon to a specific version, you can do this in the compose file.
|
||||
|
||||
When you do this, updating to a new version requires manually updating the image tags in the compose file yourself:
|
||||
|
||||
1. Update the image tags in `docker-compose.yml`. For example:
|
||||
```yaml
|
||||
services:
|
||||
backend:
|
||||
image: ghcr.io/patchmon/patchmon-backend:1.2.3 # Update version here
|
||||
...
|
||||
frontend:
|
||||
image: ghcr.io/patchmon/patchmon-frontend:1.2.3 # Update version here
|
||||
...
|
||||
```
|
||||
|
||||
2. Then run the update command:
|
||||
```bash
|
||||
docker compose up -d --pull
|
||||
```
|
||||
|
||||
> [!TIP]
|
||||
> Check the [releases page](https://github.com/PatchMon/PatchMon/releases) for version-specific changes and migration notes.
|
||||
|
||||
## Configuration
|
||||
|
||||
@@ -36,47 +111,96 @@ Version tags are also available (e.g. `1.2.3`) for both of these images.
|
||||
|
||||
#### Database Service
|
||||
|
||||
- `POSTGRES_DB`: Database name (default: `patchmon_db`)
|
||||
- `POSTGRES_USER`: Database user (default: `patchmon_user`)
|
||||
- `POSTGRES_PASSWORD`: Database password - **MUST BE CHANGED!**
|
||||
| Variable | Description | Default |
|
||||
| ------------------- | ----------------- | ---------------- |
|
||||
| `POSTGRES_DB` | Database name | `patchmon_db` |
|
||||
| `POSTGRES_USER` | Database user | `patchmon_user` |
|
||||
| `POSTGRES_PASSWORD` | Database password | **MUST BE SET!** |
|
||||
|
||||
#### Redis Service
|
||||
|
||||
| Variable | Description | Default |
|
||||
| -------------- | ------------------ | ---------------- |
|
||||
| `REDIS_PASSWORD` | Redis password | **MUST BE SET!** |
|
||||
|
||||
#### Backend Service
|
||||
|
||||
- `LOG_LEVEL`: Logging level (`debug`, `info`, `warn`, `error`)
|
||||
- `DATABASE_URL`: PostgreSQL connection string
|
||||
- `PM_DB_CONN_MAX_ATTEMPTS`: Maximum database connection attempts (default: 30)
|
||||
- `PM_DB_CONN_WAIT_INTERVAL`: Wait interval between connection attempts in seconds (default: 2)
|
||||
- `SERVER_PROTOCOL`: Frontend server protocol (`http` or `https`)
|
||||
- `SERVER_HOST`: Frontend server host (default: `localhost`)
|
||||
- `SERVER_PORT`: Frontend server port (default: 3000)
|
||||
- `PORT`: Backend API port (default: 3001)
|
||||
- `API_VERSION`: API version (default: `v1`)
|
||||
- `CORS_ORIGIN`: CORS origin URL
|
||||
- `RATE_LIMIT_WINDOW_MS`: Rate limiting window in milliseconds (default: 900000)
|
||||
- `RATE_LIMIT_MAX`: Maximum requests per window (default: 100)
|
||||
- `ENABLE_HSTS`: Enable HTTP Strict Transport Security (default: true)
|
||||
- `TRUST_PROXY`: Trust proxy headers (default: true) - See [Express.js docs](https://expressjs.com/en/guide/behind-proxies.html) for usage.
|
||||
##### Database Configuration
|
||||
|
||||
| Variable | Description | Default |
|
||||
| -------------------------- | ---------------------------------------------------- | ------------------------------------------------ |
|
||||
| `DATABASE_URL` | PostgreSQL connection string | **MUST BE UPDATED WITH YOUR POSTGRES_PASSWORD!** |
|
||||
| `PM_DB_CONN_MAX_ATTEMPTS` | Maximum database connection attempts | `30` |
|
||||
| `PM_DB_CONN_WAIT_INTERVAL` | Wait interval between connection attempts in seconds | `2` |
|
||||
|
||||
##### Redis Configuration
|
||||
|
||||
| Variable | Description | Default |
|
||||
| --------------- | ------------------------------ | ------- |
|
||||
| `REDIS_HOST` | Redis server hostname | `redis` |
|
||||
| `REDIS_PORT` | Redis server port | `6379` |
|
||||
| `REDIS_PASSWORD` | Redis authentication password | **MUST BE UPDATED WITH YOUR REDIS_PASSWORD!** |
|
||||
| `REDIS_DB` | Redis database number | `0` |
|
||||
|
||||
##### Authentication & Security
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ------------------------------------ | --------------------------------------------------------- | ---------------- |
|
||||
| `JWT_SECRET` | JWT signing secret - Generate with `openssl rand -hex 64` | **MUST BE SET!** |
|
||||
| `JWT_EXPIRES_IN` | JWT token expiration time | `1h` |
|
||||
| `JWT_REFRESH_EXPIRES_IN` | JWT refresh token expiration time | `7d` |
|
||||
| `SESSION_INACTIVITY_TIMEOUT_MINUTES` | Session inactivity timeout in minutes | `30` |
|
||||
| `DEFAULT_USER_ROLE` | Default role for new users | `user` |
|
||||
|
||||
##### Server & Network Configuration
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ----------------- | ----------------------------------------------------------------------------------------------- | ----------------------- |
|
||||
| `PORT` | Backend API port | `3001` |
|
||||
| `SERVER_PROTOCOL` | Frontend server protocol (`http` or `https`) | `http` |
|
||||
| `SERVER_HOST` | Frontend server host | `localhost` |
|
||||
| `SERVER_PORT` | Frontend server port | `3000` |
|
||||
| `CORS_ORIGIN` | CORS origin URL | `http://localhost:3000` |
|
||||
| `ENABLE_HSTS` | Enable HTTP Strict Transport Security | `true` |
|
||||
| `TRUST_PROXY` | Trust proxy headers - See [Express.js docs](https://expressjs.com/en/guide/behind-proxies.html) | `true` |
|
||||
|
||||
##### Rate Limiting
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ---------------------------- | --------------------------------------------------- | -------- |
|
||||
| `RATE_LIMIT_WINDOW_MS` | Rate limiting window in milliseconds | `900000` |
|
||||
| `RATE_LIMIT_MAX` | Maximum requests per window | `5000` |
|
||||
| `AUTH_RATE_LIMIT_WINDOW_MS` | Authentication rate limiting window in milliseconds | `600000` |
|
||||
| `AUTH_RATE_LIMIT_MAX` | Maximum authentication requests per window | `500` |
|
||||
| `AGENT_RATE_LIMIT_WINDOW_MS` | Agent API rate limiting window in milliseconds | `60000` |
|
||||
| `AGENT_RATE_LIMIT_MAX` | Maximum agent requests per window | `1000` |
|
||||
|
||||
##### Logging
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ---------------- | ------------------------------------------------ | ------- |
|
||||
| `LOG_LEVEL` | Logging level (`debug`, `info`, `warn`, `error`) | `info` |
|
||||
| `ENABLE_LOGGING` | Enable application logging | `true` |
|
||||
|
||||
#### Frontend Service
|
||||
|
||||
- `BACKEND_HOST`: Backend service hostname (default: `backend`)
|
||||
- `BACKEND_PORT`: Backend service port (default: 3001)
|
||||
| Variable | Description | Default |
|
||||
| -------------- | ------------------------ | --------- |
|
||||
| `BACKEND_HOST` | Backend service hostname | `backend` |
|
||||
| `BACKEND_PORT` | Backend service port | `3001` |
|
||||
|
||||
### Security Configuration
|
||||
### Volumes
|
||||
|
||||
**⚠️ IMPORTANT**: Before deploying to production, you MUST:
|
||||
The compose file creates three Docker volumes:
|
||||
|
||||
1. Change the default database password in `docker-compose.yml`:
|
||||
```yaml
|
||||
environment:
|
||||
POSTGRES_PASSWORD: YOUR_SECURE_PASSWORD_HERE
|
||||
```
|
||||
* `postgres_data`: PostgreSQL's data directory.
|
||||
* `redis_data`: Redis's data directory.
|
||||
* `agent_files`: PatchMon's agent files.
|
||||
|
||||
2. Update the corresponding `DATABASE_URL` in the backend service:
|
||||
```yaml
|
||||
environment:
|
||||
DATABASE_URL: postgresql://patchmon_user:YOUR_SECURE_PASSWORD_HERE@database:5432/patchmon_db
|
||||
```
|
||||
If you wish to bind either if their respective container paths to a host path rather than a Docker volume, you can do so in the Docker Compose file.
|
||||
|
||||
> [!TIP]
|
||||
> The backend container runs as user & group ID 1000. If you plan to re-bind the agent files directory, ensure that the same user and/or group ID has permission to write to the host path to which it's bound.
|
||||
|
||||
---
|
||||
|
||||
@@ -90,47 +214,62 @@ For development with live reload and source code mounting:
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/9technologygroup/patchmon.net.git
|
||||
git clone https://github.com/PatchMon/PatchMon.git
|
||||
cd patchmon.net
|
||||
```
|
||||
|
||||
2. Start development environment:
|
||||
```bash
|
||||
# Attached, live log output, services stopped on Ctrl+C
|
||||
docker compose -f docker/docker-compose.dev.yml up
|
||||
|
||||
# Detached
|
||||
docker compose -f docker/docker-compose.dev.yml up -d
|
||||
```
|
||||
_See [Development Commands](#development-commands) for more options._
|
||||
|
||||
3. Access the application:
|
||||
- Frontend: `http://localhost:3000`
|
||||
- Backend API: `http://localhost:3001`
|
||||
- Database: `localhost:5432`
|
||||
- Redis: `localhost:6379`
|
||||
|
||||
## Development Docker Compose
|
||||
|
||||
The development compose file (`docker/docker-compose.dev.yml`):
|
||||
- Builds images locally from source
|
||||
- Enables development workflow
|
||||
- Supports live reload and debugging
|
||||
- Builds images locally from source using development targets
|
||||
- Enables hot reload with Docker Compose watch functionality
|
||||
- Exposes database and backend ports for testing and development
|
||||
- Mounts source code directly into containers for live development
|
||||
- Supports debugging with enhanced logging
|
||||
|
||||
## Building Images Locally
|
||||
|
||||
```
|
||||
docker build -t patchmon-backend:dev -f docker/backend.Dockerfile .
|
||||
docker build -t patchmon-frontend:dev -f docker/frontend.Dockerfile .
|
||||
```
|
||||
|
||||
## Running in Docker Compose
|
||||
|
||||
For development or custom builds:
|
||||
Both Dockerfiles use multi-stage builds with separate development and production targets:
|
||||
|
||||
```bash
|
||||
# Build backend image
|
||||
docker build -f docker/backend.Dockerfile -t patchmon-backend:dev .
|
||||
# Build development images
|
||||
docker build -f docker/backend.Dockerfile --target development -t patchmon-backend:dev .
|
||||
docker build -f docker/frontend.Dockerfile --target development -t patchmon-frontend:dev .
|
||||
|
||||
# Build frontend image
|
||||
docker build -f docker/frontend.Dockerfile -t patchmon-frontend:dev .
|
||||
# Build production images (default target)
|
||||
docker build -f docker/backend.Dockerfile -t patchmon-backend:latest .
|
||||
docker build -f docker/frontend.Dockerfile -t patchmon-frontend:latest .
|
||||
```
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Hot Reload Development
|
||||
```bash
|
||||
# Attached, live log output, services stopped on Ctrl+C
|
||||
docker compose -f docker/docker-compose.dev.yml up
|
||||
|
||||
# Attached with Docker Compose watch for hot reload
|
||||
docker compose -f docker/docker-compose.dev.yml up --watch
|
||||
|
||||
# Detached
|
||||
docker compose -f docker/docker-compose.dev.yml up -d
|
||||
|
||||
# Quiet, no log output, with Docker Compose watch for hot reload
|
||||
docker compose -f docker/docker-compose.dev.yml watch
|
||||
```
|
||||
|
||||
### Rebuild Services
|
||||
```bash
|
||||
# Rebuild specific service
|
||||
@@ -140,9 +279,44 @@ docker compose -f docker/docker-compose.dev.yml up -d --build backend
|
||||
docker compose -f docker/docker-compose.dev.yml up -d --build
|
||||
```
|
||||
|
||||
### Development Ports
|
||||
The development setup exposes additional ports for debugging:
|
||||
- **Database**: `5432` - Direct PostgreSQL access
|
||||
- **Redis**: `6379` - Direct Redis access
|
||||
- **Backend**: `3001` - API server with development features
|
||||
- **Frontend**: `3000` - React development server with hot reload
|
||||
|
||||
## Development Workflow
|
||||
|
||||
1. **Code Changes**: Edit source files
|
||||
2. **Rebuild**: `docker compose -f docker/docker-compose.dev.yml up -d --build`
|
||||
3. **Test**: Access application and verify changes
|
||||
4. **Debug**: Check logs with `docker compose logs -f`
|
||||
1. **Initial Setup**: Clone repository and start development environment
|
||||
```bash
|
||||
git clone https://github.com/PatchMon/PatchMon.git
|
||||
cd patchmon.net
|
||||
docker compose -f docker/docker-compose.dev.yml up -d --build
|
||||
```
|
||||
|
||||
2. **Hot Reload Development**: Use Docker Compose watch for automatic reload
|
||||
```bash
|
||||
docker compose -f docker/docker-compose.dev.yml up --watch --build
|
||||
```
|
||||
|
||||
3. **Code Changes**:
|
||||
- **Frontend/Backend Source**: Files are synced automatically with watch mode
|
||||
- **Package.json Changes**: Triggers automatic service rebuild
|
||||
- **Prisma Schema Changes**: Backend service restarts automatically
|
||||
|
||||
4. **Database Access**: Connect database client directly to `localhost:5432`
|
||||
5. **Redis Access**: Connect Redis client directly to `localhost:6379`
|
||||
6. **Debug**: If started with `docker compose [...] up -d` or `docker compose [...] watch`, check logs manually:
|
||||
```bash
|
||||
docker compose -f docker/docker-compose.dev.yml logs -f
|
||||
```
|
||||
Otherwise logs are shown automatically in attached modes (`up`, `up --watch`).
|
||||
|
||||
### Features in Development Mode
|
||||
|
||||
- **Hot Reload**: Automatic code synchronization and service restarts
|
||||
- **Enhanced Logging**: Detailed logs for debugging
|
||||
- **Direct Access**: Exposed ports for database, Redis, and API debugging
|
||||
- **Health Checks**: Built-in health monitoring for services
|
||||
- **Volume Persistence**: Development data persists between restarts
|
||||
|
||||
@@ -1,22 +1,8 @@
|
||||
FROM node:lts-alpine AS builder
|
||||
# Development target
|
||||
FROM node:lts-alpine AS development
|
||||
|
||||
RUN apk add --no-cache openssl
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --chown=node:node package*.json /app/
|
||||
COPY --chown=node:node backend/ /app/backend/
|
||||
|
||||
WORKDIR /app/backend
|
||||
|
||||
RUN npm ci &&\
|
||||
npx prisma generate &&\
|
||||
npm prune --omit=dev &&\
|
||||
npm cache clean --force
|
||||
|
||||
FROM node:lts-alpine
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
ENV NODE_ENV=development \
|
||||
NPM_CONFIG_UPDATE_NOTIFIER=false \
|
||||
ENABLE_LOGGING=true \
|
||||
LOG_LEVEL=info \
|
||||
PM_LOG_TO_CONSOLE=true \
|
||||
@@ -28,8 +14,64 @@ USER node
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/backend /app/backend
|
||||
COPY --from=builder /app/node_modules /app/node_modules
|
||||
COPY --chown=node:node package*.json ./
|
||||
COPY --chown=node:node backend/ ./backend/
|
||||
COPY --chown=node:node agents ./agents_backup
|
||||
COPY --chown=node:node agents ./agents
|
||||
COPY --chmod=755 docker/backend.docker-entrypoint.sh ./entrypoint.sh
|
||||
|
||||
WORKDIR /app/backend
|
||||
|
||||
RUN npm ci --ignore-scripts && npx prisma generate
|
||||
|
||||
EXPOSE 3001
|
||||
|
||||
VOLUME [ "/app/agents" ]
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=5s --start-period=30s --retries=5 \
|
||||
CMD curl -f http://localhost:3001/health || exit 1
|
||||
|
||||
ENTRYPOINT ["/sbin/tini", "--"]
|
||||
CMD ["/app/entrypoint.sh"]
|
||||
|
||||
# Builder stage for production
|
||||
FROM node:lts-alpine AS builder
|
||||
|
||||
RUN apk add --no-cache openssl
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --chown=node:node package*.json ./
|
||||
COPY --chown=node:node backend/ ./backend/
|
||||
|
||||
WORKDIR /app/backend
|
||||
|
||||
RUN npm ci --ignore-scripts &&\
|
||||
npx prisma generate &&\
|
||||
npm prune --omit=dev &&\
|
||||
npm cache clean --force
|
||||
|
||||
# Production stage
|
||||
FROM node:lts-alpine
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
NPM_CONFIG_UPDATE_NOTIFIER=false \
|
||||
ENABLE_LOGGING=true \
|
||||
LOG_LEVEL=info \
|
||||
PM_LOG_TO_CONSOLE=true \
|
||||
PORT=3001 \
|
||||
JWT_EXPIRES_IN=1h \
|
||||
JWT_REFRESH_EXPIRES_IN=7d \
|
||||
SESSION_INACTIVITY_TIMEOUT_MINUTES=30
|
||||
|
||||
RUN apk add --no-cache openssl tini curl
|
||||
|
||||
USER node
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder /app/backend ./backend
|
||||
COPY --from=builder /app/node_modules ./node_modules
|
||||
COPY --chown=node:node agents ./agents_backup
|
||||
COPY --chown=node:node agents ./agents
|
||||
COPY --chmod=755 docker/backend.docker-entrypoint.sh ./entrypoint.sh
|
||||
|
||||
@@ -8,22 +8,101 @@ log() {
|
||||
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $*" >&2
|
||||
}
|
||||
|
||||
# Copy files from agents_backup to agents if agents directory is empty
|
||||
if [ -d "/app/agents" ] && [ -z "$(ls -A /app/agents 2>/dev/null)" ]; then
|
||||
if [ -d "/app/agents_backup" ]; then
|
||||
log "Agents directory is empty, copying from backup..."
|
||||
cp -r /app/agents_backup/* /app/agents/
|
||||
# Function to extract version from agent script
|
||||
get_agent_version() {
|
||||
local file="$1"
|
||||
if [ -f "$file" ]; then
|
||||
grep -m 1 '^AGENT_VERSION=' "$file" | cut -d'"' -f2 2>/dev/null || echo "0.0.0"
|
||||
else
|
||||
log "Warning: agents_backup directory not found"
|
||||
echo "0.0.0"
|
||||
fi
|
||||
else
|
||||
log "Agents directory already contains files, skipping copy"
|
||||
}
|
||||
|
||||
# Function to compare versions (returns 0 if $1 > $2)
|
||||
version_greater() {
|
||||
# Use sort -V for version comparison
|
||||
test "$(printf '%s\n' "$1" "$2" | sort -V | tail -n1)" = "$1" && test "$1" != "$2"
|
||||
}
|
||||
|
||||
# Check and update agent files if necessary
|
||||
update_agents() {
|
||||
local backup_agent="/app/agents_backup/patchmon-agent.sh"
|
||||
local current_agent="/app/agents/patchmon-agent.sh"
|
||||
|
||||
# Check if agents directory exists
|
||||
if [ ! -d "/app/agents" ]; then
|
||||
log "ERROR: /app/agents directory not found"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log "Starting PatchMon Backend..."
|
||||
# Check if backup exists
|
||||
if [ ! -d "/app/agents_backup" ]; then
|
||||
log "WARNING: agents_backup directory not found, skipping agent update"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Get versions
|
||||
local backup_version=$(get_agent_version "$backup_agent")
|
||||
local current_version=$(get_agent_version "$current_agent")
|
||||
|
||||
log "Agent version check:"
|
||||
log " Image version: ${backup_version}"
|
||||
log " Volume version: ${current_version}"
|
||||
|
||||
# Determine if update is needed
|
||||
local needs_update=0
|
||||
|
||||
# Case 1: No agents in volume (first time setup)
|
||||
if [ -z "$(find /app/agents -maxdepth 1 -type f -name '*.sh' 2>/dev/null | head -n 1)" ]; then
|
||||
log "Agents directory is empty - performing initial copy"
|
||||
needs_update=1
|
||||
# Case 2: Backup version is newer
|
||||
elif version_greater "$backup_version" "$current_version"; then
|
||||
log "Newer agent version available (${backup_version} > ${current_version})"
|
||||
needs_update=1
|
||||
else
|
||||
log "Agents are up to date"
|
||||
needs_update=0
|
||||
fi
|
||||
|
||||
# Perform update if needed
|
||||
if [ $needs_update -eq 1 ]; then
|
||||
log "Updating agents to version ${backup_version}..."
|
||||
|
||||
# Create backup of existing agents if they exist
|
||||
if [ -f "$current_agent" ]; then
|
||||
local backup_timestamp=$(date +%Y%m%d_%H%M%S)
|
||||
local backup_name="/app/agents/patchmon-agent.sh.backup.${backup_timestamp}"
|
||||
cp "$current_agent" "$backup_name" 2>/dev/null || true
|
||||
log "Previous agent backed up to: $(basename $backup_name)"
|
||||
fi
|
||||
|
||||
# Copy new agents
|
||||
cp -r /app/agents_backup/* /app/agents/
|
||||
|
||||
# Verify update
|
||||
local new_version=$(get_agent_version "$current_agent")
|
||||
if [ "$new_version" = "$backup_version" ]; then
|
||||
log "✅ Agents successfully updated to version ${new_version}"
|
||||
else
|
||||
log "⚠️ Warning: Agent update may have failed (expected: ${backup_version}, got: ${new_version})"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Main execution
|
||||
log "PatchMon Backend Container Starting..."
|
||||
log "Environment: ${NODE_ENV:-production}"
|
||||
|
||||
# Update agents (version-aware)
|
||||
update_agents
|
||||
|
||||
log "Running database migrations..."
|
||||
npx prisma migrate deploy
|
||||
|
||||
log "Starting application..."
|
||||
if [ "${NODE_ENV}" = "development" ]; then
|
||||
exec npm run dev
|
||||
else
|
||||
exec npm start
|
||||
fi
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
name: patchmon-dev
|
||||
|
||||
services:
|
||||
database:
|
||||
image: postgres:17-alpine
|
||||
@@ -5,41 +7,82 @@ services:
|
||||
environment:
|
||||
POSTGRES_DB: patchmon_db
|
||||
POSTGRES_USER: patchmon_user
|
||||
POSTGRES_PASSWORD: INSECURE_REPLACE_ME_PLEASE_INSECURE
|
||||
POSTGRES_PASSWORD: 1NS3CU6E_DEV_D8_PASSW0RD
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- ./compose_dev_data/db:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U patchmon_user -d patchmon_db"]
|
||||
interval: 10s
|
||||
interval: 3s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
retries: 7
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
command: redis-server --requirepass 1NS3CU6E_DEV_R3DIS_PASSW0RD
|
||||
environment:
|
||||
REDIS_PASSWORD: 1NS3CU6E_DEV_R3DIS_PASSW0RD
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- ./compose_dev_data/redis:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "1NS3CU6E_DEV_R3DIS_PASSW0RD", "ping"]
|
||||
interval: 3s
|
||||
timeout: 5s
|
||||
retries: 7
|
||||
|
||||
backend:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: docker/backend.Dockerfile
|
||||
target: development
|
||||
tags: [patchmon-backend:dev]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
NODE_ENV: development
|
||||
LOG_LEVEL: info
|
||||
DATABASE_URL: postgresql://patchmon_user:INSECURE_REPLACE_ME_PLEASE_INSECURE@database:5432/patchmon_db
|
||||
PM_DB_CONN_MAX_ATTEMPTS: 30
|
||||
PM_DB_CONN_WAIT_INTERVAL: 2
|
||||
DATABASE_URL: postgresql://patchmon_user:1NS3CU6E_DEV_D8_PASSW0RD@database:5432/patchmon_db
|
||||
JWT_SECRET: INS3CURE_DEV_7WT_5ECR3T
|
||||
SERVER_PROTOCOL: http
|
||||
SERVER_HOST: localhost
|
||||
SERVER_PORT: 3000
|
||||
CORS_ORIGIN: http://localhost:3000
|
||||
RATE_LIMIT_WINDOW_MS: 900000
|
||||
RATE_LIMIT_MAX: 100
|
||||
# Redis Configuration
|
||||
REDIS_HOST: redis
|
||||
REDIS_PORT: 6379
|
||||
REDIS_PASSWORD: 1NS3CU6E_DEV_R3DIS_PASSW0RD
|
||||
REDIS_DB: 0
|
||||
ports:
|
||||
- "3001:3001"
|
||||
volumes:
|
||||
- ./agents:/app/agents
|
||||
- ./compose_dev_data/agents:/app/agents
|
||||
depends_on:
|
||||
database:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
develop:
|
||||
watch:
|
||||
- action: sync
|
||||
path: ../backend/src
|
||||
target: /app/backend/src
|
||||
ignore:
|
||||
- node_modules/
|
||||
- action: sync
|
||||
path: ../backend/prisma
|
||||
target: /app/backend/prisma
|
||||
- action: rebuild
|
||||
path: ../backend/package.json
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: docker/frontend.Dockerfile
|
||||
target: development
|
||||
tags: [patchmon-frontend:dev]
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
BACKEND_HOST: backend
|
||||
@@ -49,6 +92,12 @@ services:
|
||||
depends_on:
|
||||
backend:
|
||||
condition: service_healthy
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
develop:
|
||||
watch:
|
||||
- action: sync
|
||||
path: ../frontend/src
|
||||
target: /app/frontend/src
|
||||
ignore:
|
||||
- node_modules/
|
||||
- action: rebuild
|
||||
path: ../frontend/package.json
|
||||
|
||||
@@ -1,41 +1,63 @@
|
||||
name: patchmon
|
||||
|
||||
services:
|
||||
database:
|
||||
image: postgres:17-alpine3.22
|
||||
image: postgres:17-alpine
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_DB: patchmon_db
|
||||
POSTGRES_USER: patchmon_user
|
||||
POSTGRES_PASSWORD: INSECURE_REPLACE_ME_PLEASE_INSECURE
|
||||
POSTGRES_PASSWORD: # CREATE A STRONG PASSWORD AND PUT IT HERE
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U patchmon_user -d patchmon_db"]
|
||||
interval: 10s
|
||||
interval: 3s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
retries: 7
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
command: redis-server /usr/local/etc/redis/redis.conf
|
||||
environment:
|
||||
REDIS_PASSWORD: # CREATE A STRONG REDIS PASSWORD AND PUT IT HERE
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
- ./docker/redis.conf:/usr/local/etc/redis/redis.conf:ro
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "--no-auth-warning", "-a", "${REDIS_PASSWORD}", "ping"]
|
||||
interval: 3s
|
||||
timeout: 5s
|
||||
retries: 7
|
||||
|
||||
backend:
|
||||
image: ghcr.io/9technologygroup/patchmon-backend:latest
|
||||
image: ghcr.io/patchmon/patchmon-backend:latest
|
||||
restart: unless-stopped
|
||||
# See PatchMon Docker README for additional environment variables and configuration instructions
|
||||
environment:
|
||||
LOG_LEVEL: info
|
||||
DATABASE_URL: postgresql://patchmon_user:INSECURE_REPLACE_ME_PLEASE_INSECURE@database:5432/patchmon_db
|
||||
PM_DB_CONN_MAX_ATTEMPTS: 30
|
||||
PM_DB_CONN_WAIT_INTERVAL: 2
|
||||
DATABASE_URL: postgresql://patchmon_user:REPLACE_YOUR_POSTGRES_PASSWORD_HERE@database:5432/patchmon_db
|
||||
JWT_SECRET: # CREATE A STRONG SECRET AND PUT IT HERE - Generate with 'openssl rand -hex 64'
|
||||
SERVER_PROTOCOL: http
|
||||
SERVER_HOST: localhost
|
||||
SERVER_PORT: 3000
|
||||
CORS_ORIGIN: http://localhost:3000
|
||||
RATE_LIMIT_WINDOW_MS: 900000
|
||||
RATE_LIMIT_MAX: 100
|
||||
# Redis Configuration
|
||||
REDIS_HOST: redis
|
||||
REDIS_PORT: 6379
|
||||
REDIS_PASSWORD: REPLACE_YOUR_REDIS_PASSWORD_HERE
|
||||
REDIS_DB: 0
|
||||
volumes:
|
||||
- ./agents:/app/agents
|
||||
- agent_files:/app/agents
|
||||
depends_on:
|
||||
database:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
|
||||
frontend:
|
||||
image: ghcr.io/9technologygroup/patchmon-frontend:latest
|
||||
image: ghcr.io/patchmon/patchmon-frontend:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "3000:3000"
|
||||
@@ -45,3 +67,5 @@ services:
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
agent_files:
|
||||
|
||||
@@ -1,3 +1,20 @@
|
||||
# Development target
|
||||
FROM node:lts-alpine AS development
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package*.json ./
|
||||
COPY frontend/ ./frontend/
|
||||
|
||||
RUN npm ci --ignore-scripts
|
||||
|
||||
WORKDIR /app/frontend
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["npm", "run", "dev", "--", "--host", "0.0.0.0", "--port", "3000"]
|
||||
|
||||
# Builder stage for production
|
||||
FROM node:lts-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
@@ -5,12 +22,13 @@ WORKDIR /app
|
||||
COPY package*.json ./
|
||||
COPY frontend/package*.json ./frontend/
|
||||
|
||||
RUN npm ci
|
||||
RUN npm ci --ignore-scripts
|
||||
|
||||
COPY frontend/ ./frontend/
|
||||
|
||||
RUN npm run build:frontend
|
||||
|
||||
# Production stage
|
||||
FROM nginxinc/nginx-unprivileged:alpine
|
||||
|
||||
ENV BACKEND_HOST=backend \
|
||||
|
||||
@@ -52,6 +52,64 @@ server {
|
||||
}
|
||||
}
|
||||
|
||||
# SSE (Server-Sent Events) specific configuration
|
||||
location /api/v1/ws/status/ {
|
||||
proxy_pass http://${BACKEND_HOST}:${BACKEND_PORT};
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
|
||||
# Critical SSE settings
|
||||
proxy_buffering off;
|
||||
proxy_cache off;
|
||||
proxy_set_header Connection '';
|
||||
proxy_http_version 1.1;
|
||||
chunked_transfer_encoding off;
|
||||
|
||||
# Timeout settings for long-lived connections
|
||||
proxy_read_timeout 24h;
|
||||
proxy_send_timeout 24h;
|
||||
proxy_connect_timeout 60s;
|
||||
|
||||
# Disable nginx buffering for real-time streaming
|
||||
proxy_request_buffering off;
|
||||
proxy_max_temp_file_size 0;
|
||||
|
||||
# CORS headers for SSE
|
||||
add_header Access-Control-Allow-Origin * always;
|
||||
add_header Access-Control-Allow-Methods "GET, OPTIONS" always;
|
||||
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization" always;
|
||||
|
||||
# Handle preflight requests
|
||||
if ($request_method = 'OPTIONS') {
|
||||
return 204;
|
||||
}
|
||||
}
|
||||
|
||||
# WebSocket upgrade handling
|
||||
location /api/v1/agents/ws {
|
||||
proxy_pass http://${BACKEND_HOST}:${BACKEND_PORT};
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
|
||||
# WebSocket timeout settings
|
||||
proxy_read_timeout 24h;
|
||||
proxy_send_timeout 24h;
|
||||
proxy_connect_timeout 60s;
|
||||
|
||||
# Disable buffering for WebSocket
|
||||
proxy_buffering off;
|
||||
proxy_cache off;
|
||||
}
|
||||
|
||||
# Static assets caching
|
||||
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
|
||||
expires 1y;
|
||||
|
||||
35
docker/redis.conf
Normal file
35
docker/redis.conf
Normal file
@@ -0,0 +1,35 @@
|
||||
# Redis Configuration for PatchMon Production
|
||||
# Security settings
|
||||
requirepass ${REDIS_PASSWORD}
|
||||
rename-command FLUSHDB ""
|
||||
rename-command FLUSHALL ""
|
||||
rename-command DEBUG ""
|
||||
rename-command CONFIG "CONFIG_${REDIS_PASSWORD}"
|
||||
|
||||
# Memory management
|
||||
maxmemory 256mb
|
||||
maxmemory-policy allkeys-lru
|
||||
|
||||
# Persistence settings
|
||||
save 900 1
|
||||
save 300 10
|
||||
save 60 10000
|
||||
|
||||
# Logging
|
||||
loglevel notice
|
||||
logfile ""
|
||||
|
||||
# Network security
|
||||
bind 127.0.0.1
|
||||
protected-mode yes
|
||||
|
||||
# Performance tuning
|
||||
tcp-keepalive 300
|
||||
timeout 0
|
||||
|
||||
# Disable dangerous commands
|
||||
rename-command SHUTDOWN "SHUTDOWN_${REDIS_PASSWORD}"
|
||||
rename-command KEYS ""
|
||||
rename-command MONITOR ""
|
||||
rename-command SLAVEOF ""
|
||||
rename-command REPLICAOF ""
|
||||
@@ -2,7 +2,7 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<link rel="icon" type="image/svg+xml" href="/assets/favicon.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>PatchMon - Linux Patch Monitoring Dashboard</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
{
|
||||
"name": "patchmon-frontend",
|
||||
"private": true,
|
||||
"version": "1.2.6",
|
||||
"version": "1.2.9",
|
||||
"license": "AGPL-3.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"lint": "eslint . --ext js,jsx --report-unused-disable-directives --max-warnings 0",
|
||||
"lint": "biome check .",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -14,31 +15,27 @@
|
||||
"@dnd-kit/sortable": "^10.0.0",
|
||||
"@dnd-kit/utilities": "^3.2.2",
|
||||
"@tanstack/react-query": "^5.87.4",
|
||||
"axios": "^1.6.2",
|
||||
"chart.js": "^4.4.0",
|
||||
"clsx": "^2.0.0",
|
||||
"axios": "^1.7.9",
|
||||
"chart.js": "^4.4.7",
|
||||
"clsx": "^2.1.1",
|
||||
"cors": "^2.8.5",
|
||||
"date-fns": "^2.30.0",
|
||||
"express": "^4.18.2",
|
||||
"http-proxy-middleware": "^2.0.6",
|
||||
"lucide-react": "^0.294.0",
|
||||
"react": "^18.2.0",
|
||||
"date-fns": "^4.1.0",
|
||||
"express": "^5.0.0",
|
||||
"http-proxy-middleware": "^3.0.3",
|
||||
"lucide-react": "^0.468.0",
|
||||
"react": "^18.3.1",
|
||||
"react-chartjs-2": "^5.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-icons": "^5.5.0",
|
||||
"react-router-dom": "^6.20.1"
|
||||
"react-router-dom": "^6.30.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.2.37",
|
||||
"@types/react-dom": "^18.2.15",
|
||||
"@vitejs/plugin-react": "^4.3.3",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"eslint": "^8.53.0",
|
||||
"eslint-plugin-react": "^7.33.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.4",
|
||||
"postcss": "^8.4.32",
|
||||
"tailwindcss": "^3.3.6",
|
||||
"@types/react": "^18.3.14",
|
||||
"@types/react-dom": "^18.3.1",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"postcss": "^8.5.6",
|
||||
"tailwindcss": "^3.4.17",
|
||||
"vite": "^7.1.5"
|
||||
},
|
||||
"overrides": {
|
||||
|
||||
@@ -3,4 +3,4 @@ export default {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
23
frontend/public/assets/bull-board-logo.svg
Normal file
23
frontend/public/assets/bull-board-logo.svg
Normal file
@@ -0,0 +1,23 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36">
|
||||
<circle fill="#DD2E44" cx="18" cy="18" r="18" />
|
||||
<circle fill="#FFF" cx="18" cy="18" r="13.5" />
|
||||
<circle fill="#DD2E44" cx="18" cy="18" r="10" />
|
||||
<circle fill="#FFF" cx="18" cy="18" r="6" />
|
||||
<circle fill="#DD2E44" cx="18" cy="18" r="3" />
|
||||
<path
|
||||
opacity=".2"
|
||||
d="M18.24 18.282l13.144 11.754s-2.647 3.376-7.89 5.109L17.579 18.42l.661-.138z"
|
||||
/>
|
||||
<path
|
||||
fill="#FFAC33"
|
||||
d="M18.294 19a.994.994 0 01-.704-1.699l.563-.563a.995.995 0 011.408 1.407l-.564.563a.987.987 0 01-.703.292z"
|
||||
/>
|
||||
<path
|
||||
fill="#55ACEE"
|
||||
d="M24.016 6.981c-.403 2.079 0 4.691 0 4.691l7.054-7.388c.291-1.454-.528-3.932-1.718-4.238-1.19-.306-4.079.803-5.336 6.935zm5.003 5.003c-2.079.403-4.691 0-4.691 0l7.388-7.054c1.454-.291 3.932.528 4.238 1.718.306 1.19-.803 4.079-6.935 5.336z"
|
||||
/>
|
||||
<path
|
||||
fill="#3A87C2"
|
||||
d="M32.798 4.485L21.176 17.587c-.362.362-1.673.882-2.51.046-.836-.836-.419-2.08-.057-2.443L31.815 3.501s.676-.635 1.159-.152-.176 1.136-.176 1.136z"
|
||||
/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
1
frontend/public/assets/favicon.svg
Normal file
1
frontend/public/assets/favicon.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="500" zoomAndPan="magnify" viewBox="0 0 375 374.999991" height="500" preserveAspectRatio="xMidYMid meet" version="1.0"><defs><g/><clipPath id="d62632d413"><path d="M 29 28 L 304 28 L 304 350 L 29 350 Z M 29 28 " clip-rule="nonzero"/></clipPath><clipPath id="ecc8b4d8ed"><path d="M 187.496094 -39.996094 L 416.601562 189.105469 L 187.496094 418.207031 L -41.605469 189.105469 Z M 187.496094 -39.996094 " clip-rule="nonzero"/></clipPath><clipPath id="3016db942f"><path d="M 187.496094 -39.996094 L 416.601562 189.105469 L 187.496094 418.207031 L -41.605469 189.105469 Z M 187.496094 -39.996094 " clip-rule="nonzero"/></clipPath><clipPath id="029f8ae6a8"><path d="M 29 28 L 304 28 L 304 350 L 29 350 Z M 29 28 " clip-rule="nonzero"/></clipPath><clipPath id="2d374b5e76"><path d="M 187.496094 -39.996094 L 416.601562 189.105469 L 187.496094 418.207031 L -41.605469 189.105469 Z M 187.496094 -39.996094 " clip-rule="nonzero"/></clipPath><clipPath id="544d823606"><path d="M 187.496094 -39.996094 L 416.601562 189.105469 L 187.496094 418.207031 L -41.605469 189.105469 Z M 187.496094 -39.996094 " clip-rule="nonzero"/></clipPath><clipPath id="b88a276116"><path d="M 187.496094 -39.996094 L 416.601562 189.105469 L 187.496094 418.207031 L -41.605469 189.105469 Z M 187.496094 -39.996094 " clip-rule="nonzero"/></clipPath><clipPath id="98c26e11a4"><rect x="0" width="103" y="0" height="208"/></clipPath></defs><g clip-path="url(#d62632d413)"><g clip-path="url(#ecc8b4d8ed)"><g clip-path="url(#3016db942f)"><path fill="#ff751f" d="M 303.214844 302.761719 C 280.765625 325.214844 252.160156 340.503906 221.015625 346.699219 C 189.875 352.890625 157.59375 349.714844 128.261719 337.5625 C 98.925781 325.410156 73.851562 304.835938 56.210938 278.433594 C 38.570312 252.03125 29.15625 220.992188 29.15625 189.242188 C 29.15625 157.488281 38.570312 126.449219 56.210938 100.050781 C 73.851562 73.648438 98.925781 53.070312 128.261719 40.921875 C 157.59375 28.769531 189.875 25.589844 221.015625 31.785156 C 252.160156 37.980469 280.765625 53.269531 303.214844 75.722656 L 189.695312 189.242188 Z M 303.214844 302.761719 " fill-opacity="1" fill-rule="nonzero"/></g></g></g><g clip-path="url(#029f8ae6a8)"><g clip-path="url(#2d374b5e76)"><g clip-path="url(#544d823606)"><g clip-path="url(#b88a276116)"><path fill="#61b33a" d="M 303.144531 302.550781 C 280.707031 324.988281 252.117188 340.269531 220.996094 346.460938 C 189.875 352.652344 157.613281 349.472656 128.296875 337.332031 C 98.980469 325.1875 73.921875 304.621094 56.292969 278.238281 C 38.664062 251.851562 29.253906 220.832031 29.253906 189.101562 C 29.253906 157.367188 38.664062 126.347656 56.292969 99.964844 C 73.921875 73.578125 98.980469 53.015625 128.296875 40.871094 C 157.613281 28.726562 189.875 25.550781 220.996094 31.742188 C 252.117188 37.929688 280.707031 53.210938 303.144531 75.652344 L 189.695312 189.101562 Z M 303.144531 302.550781 " fill-opacity="1" fill-rule="nonzero"/></g></g></g></g><g transform="matrix(1, 0, 0, 1, 136, 0)"><g clip-path="url(#98c26e11a4)"><g fill="#ff751f" fill-opacity="1"><g transform="translate(0.457164, 116.403543)"><g><path d="M 19.734375 -18.71875 C 19.734375 -21.664062 20.015625 -24.441406 20.578125 -27.046875 C 21.148438 -29.660156 22.0625 -32.210938 23.3125 -34.703125 C 24.5625 -37.203125 26.207031 -39.359375 28.25 -41.171875 C 33.6875 -47.066406 41.285156 -50.015625 51.046875 -50.015625 C 59.210938 -50.015625 66.46875 -46.953125 72.8125 -40.828125 C 79.164062 -34.703125 82.34375 -27.332031 82.34375 -18.71875 C 82.34375 -9.414062 79.28125 -1.925781 73.15625 3.75 C 67.257812 9.644531 59.890625 12.59375 51.046875 12.59375 C 42.648438 12.59375 35.332031 9.472656 29.09375 3.234375 C 22.851562 -3.003906 19.734375 -10.320312 19.734375 -18.71875 Z M 19.734375 -18.71875 "/></g></g></g></g></g></svg>
|
||||
|
After Width: | Height: | Size: 3.8 KiB |
BIN
frontend/public/assets/logo_dark.png
Normal file
BIN
frontend/public/assets/logo_dark.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 18 KiB |
BIN
frontend/public/assets/logo_light.png
Normal file
BIN
frontend/public/assets/logo_light.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 24 KiB |
@@ -1,45 +1,50 @@
|
||||
import express from 'express';
|
||||
import path from 'path';
|
||||
import cors from 'cors';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { createProxyMiddleware } from 'http-proxy-middleware';
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import cors from "cors";
|
||||
import express from "express";
|
||||
import { createProxyMiddleware } from "http-proxy-middleware";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const app = express();
|
||||
const PORT = process.env.PORT || 3000;
|
||||
const BACKEND_URL = process.env.BACKEND_URL || 'http://backend:3001';
|
||||
const BACKEND_URL = process.env.BACKEND_URL || "http://backend:3001";
|
||||
|
||||
// Enable CORS for API calls
|
||||
app.use(cors({
|
||||
origin: process.env.CORS_ORIGIN || '*',
|
||||
credentials: true
|
||||
}));
|
||||
app.use(
|
||||
cors({
|
||||
origin: process.env.CORS_ORIGIN || "*",
|
||||
credentials: true,
|
||||
}),
|
||||
);
|
||||
|
||||
// Proxy API requests to backend
|
||||
app.use('/api', createProxyMiddleware({
|
||||
app.use(
|
||||
"/api",
|
||||
createProxyMiddleware({
|
||||
target: BACKEND_URL,
|
||||
changeOrigin: true,
|
||||
logLevel: 'info',
|
||||
onError: (err, req, res) => {
|
||||
console.error('Proxy error:', err.message);
|
||||
res.status(500).json({ error: 'Backend service unavailable' });
|
||||
logLevel: "info",
|
||||
onError: (err, _req, res) => {
|
||||
console.error("Proxy error:", err.message);
|
||||
res.status(500).json({ error: "Backend service unavailable" });
|
||||
},
|
||||
onProxyReq: (proxyReq, req, res) => {
|
||||
onProxyReq: (_proxyReq, req, _res) => {
|
||||
console.log(`Proxying ${req.method} ${req.path} to ${BACKEND_URL}`);
|
||||
}
|
||||
}));
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Serve static files from dist directory
|
||||
app.use(express.static(path.join(__dirname, 'dist')));
|
||||
app.use(express.static(path.join(__dirname, "dist")));
|
||||
|
||||
// Handle SPA routing - serve index.html for all routes
|
||||
app.get('*', (req, res) => {
|
||||
res.sendFile(path.join(__dirname, 'dist', 'index.html'));
|
||||
app.get("*", (_req, res) => {
|
||||
res.sendFile(path.join(__dirname, "dist", "index.html"));
|
||||
});
|
||||
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Frontend server running on port ${PORT}`);
|
||||
console.log(`Serving from: ${path.join(__dirname, 'dist')}`);
|
||||
console.log(`Serving from: ${path.join(__dirname, "dist")}`);
|
||||
});
|
||||
|
||||
@@ -1,135 +1,416 @@
|
||||
import React from 'react'
|
||||
import { Routes, Route } from 'react-router-dom'
|
||||
import { AuthProvider, useAuth } from './contexts/AuthContext'
|
||||
import { ThemeProvider } from './contexts/ThemeContext'
|
||||
import { UpdateNotificationProvider } from './contexts/UpdateNotificationContext'
|
||||
import ProtectedRoute from './components/ProtectedRoute'
|
||||
import Layout from './components/Layout'
|
||||
import Login from './pages/Login'
|
||||
import Dashboard from './pages/Dashboard'
|
||||
import Hosts from './pages/Hosts'
|
||||
import Packages from './pages/Packages'
|
||||
import Repositories from './pages/Repositories'
|
||||
import RepositoryDetail from './pages/RepositoryDetail'
|
||||
import Users from './pages/Users'
|
||||
import Permissions from './pages/Permissions'
|
||||
import Settings from './pages/Settings'
|
||||
import Options from './pages/Options'
|
||||
import Profile from './pages/Profile'
|
||||
import HostDetail from './pages/HostDetail'
|
||||
import PackageDetail from './pages/PackageDetail'
|
||||
import FirstTimeAdminSetup from './components/FirstTimeAdminSetup'
|
||||
import { lazy, Suspense } from "react";
|
||||
import { Route, Routes } from "react-router-dom";
|
||||
import FirstTimeAdminSetup from "./components/FirstTimeAdminSetup";
|
||||
import Layout from "./components/Layout";
|
||||
import LogoProvider from "./components/LogoProvider";
|
||||
import ProtectedRoute from "./components/ProtectedRoute";
|
||||
import SettingsLayout from "./components/SettingsLayout";
|
||||
import { isAuthPhase } from "./constants/authPhases";
|
||||
import { AuthProvider, useAuth } from "./contexts/AuthContext";
|
||||
import { ThemeProvider } from "./contexts/ThemeContext";
|
||||
import { UpdateNotificationProvider } from "./contexts/UpdateNotificationContext";
|
||||
|
||||
// Lazy load pages
|
||||
const Dashboard = lazy(() => import("./pages/Dashboard"));
|
||||
const HostDetail = lazy(() => import("./pages/HostDetail"));
|
||||
const Hosts = lazy(() => import("./pages/Hosts"));
|
||||
const Login = lazy(() => import("./pages/Login"));
|
||||
const PackageDetail = lazy(() => import("./pages/PackageDetail"));
|
||||
const Packages = lazy(() => import("./pages/Packages"));
|
||||
const Profile = lazy(() => import("./pages/Profile"));
|
||||
const Automation = lazy(() => import("./pages/Automation"));
|
||||
const Repositories = lazy(() => import("./pages/Repositories"));
|
||||
const RepositoryDetail = lazy(() => import("./pages/RepositoryDetail"));
|
||||
const Docker = lazy(() => import("./pages/Docker"));
|
||||
const DockerContainerDetail = lazy(
|
||||
() => import("./pages/docker/ContainerDetail"),
|
||||
);
|
||||
const DockerImageDetail = lazy(() => import("./pages/docker/ImageDetail"));
|
||||
const DockerHostDetail = lazy(() => import("./pages/docker/HostDetail"));
|
||||
const AlertChannels = lazy(() => import("./pages/settings/AlertChannels"));
|
||||
const Integrations = lazy(() => import("./pages/settings/Integrations"));
|
||||
const Notifications = lazy(() => import("./pages/settings/Notifications"));
|
||||
const PatchManagement = lazy(() => import("./pages/settings/PatchManagement"));
|
||||
const SettingsAgentConfig = lazy(
|
||||
() => import("./pages/settings/SettingsAgentConfig"),
|
||||
);
|
||||
const SettingsHostGroups = lazy(
|
||||
() => import("./pages/settings/SettingsHostGroups"),
|
||||
);
|
||||
const SettingsServerConfig = lazy(
|
||||
() => import("./pages/settings/SettingsServerConfig"),
|
||||
);
|
||||
const SettingsUsers = lazy(() => import("./pages/settings/SettingsUsers"));
|
||||
|
||||
// Loading fallback component
|
||||
const LoadingFallback = () => (
|
||||
<div className="min-h-screen bg-gradient-to-br from-primary-50 to-secondary-50 dark:from-secondary-900 dark:to-secondary-800 flex items-center justify-center">
|
||||
<div className="text-center">
|
||||
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary-600 mx-auto mb-4"></div>
|
||||
<p className="text-secondary-600 dark:text-secondary-300">Loading...</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
function AppRoutes() {
|
||||
const { needsFirstTimeSetup, checkingSetup, isAuthenticated } = useAuth()
|
||||
const isAuth = isAuthenticated() // Call the function to get boolean value
|
||||
const { needsFirstTimeSetup, authPhase, isAuthenticated } = useAuth();
|
||||
const isAuth = isAuthenticated(); // Call the function to get boolean value
|
||||
|
||||
// Show loading while checking if setup is needed
|
||||
if (checkingSetup) {
|
||||
// Show loading while checking setup or initialising
|
||||
if (
|
||||
isAuthPhase.initialising(authPhase) ||
|
||||
isAuthPhase.checkingSetup(authPhase)
|
||||
) {
|
||||
return (
|
||||
<div className="min-h-screen bg-gradient-to-br from-primary-50 to-secondary-50 dark:from-secondary-900 dark:to-secondary-800 flex items-center justify-center">
|
||||
<div className="text-center">
|
||||
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-primary-600 mx-auto mb-4"></div>
|
||||
<p className="text-secondary-600 dark:text-secondary-300">Checking system status...</p>
|
||||
<p className="text-secondary-600 dark:text-secondary-300">
|
||||
Checking system status...
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Show first-time setup if no admin users exist
|
||||
if (needsFirstTimeSetup && !isAuth) {
|
||||
return <FirstTimeAdminSetup />
|
||||
return <FirstTimeAdminSetup />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Suspense fallback={<LoadingFallback />}>
|
||||
<Routes>
|
||||
<Route path="/login" element={<Login />} />
|
||||
<Route path="/" element={
|
||||
<Route
|
||||
path="/"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_dashboard">
|
||||
<Layout>
|
||||
<Dashboard />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/hosts" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/hosts"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_hosts">
|
||||
<Layout>
|
||||
<Hosts />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/hosts/:hostId" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/hosts/:hostId"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_hosts">
|
||||
<Layout>
|
||||
<HostDetail />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/packages" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/packages"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_packages">
|
||||
<Layout>
|
||||
<Packages />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/repositories" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/repositories"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_hosts">
|
||||
<Layout>
|
||||
<Repositories />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/repositories/:repositoryId" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/repositories/:repositoryId"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_hosts">
|
||||
<Layout>
|
||||
<RepositoryDetail />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/users" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/automation"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_hosts">
|
||||
<Layout>
|
||||
<Automation />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/docker"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_reports">
|
||||
<Layout>
|
||||
<Docker />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/docker/containers/:id"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_reports">
|
||||
<Layout>
|
||||
<DockerContainerDetail />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/docker/images/:id"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_reports">
|
||||
<Layout>
|
||||
<DockerImageDetail />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/docker/hosts/:id"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_reports">
|
||||
<Layout>
|
||||
<DockerHostDetail />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/users"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_users">
|
||||
<Layout>
|
||||
<Users />
|
||||
<SettingsUsers />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/permissions" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/permissions"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<Permissions />
|
||||
<SettingsUsers />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/settings" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<Settings />
|
||||
<SettingsServerConfig />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/options" element={
|
||||
<ProtectedRoute requirePermission="can_manage_hosts">
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/users"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_users">
|
||||
<Layout>
|
||||
<Options />
|
||||
<SettingsUsers />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/profile" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/roles"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsUsers />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/profile"
|
||||
element={
|
||||
<ProtectedRoute>
|
||||
<Layout>
|
||||
<SettingsLayout>
|
||||
<Profile />
|
||||
</SettingsLayout>
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
<Route path="/packages/:packageId" element={
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/host-groups"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsHostGroups />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/notifications"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsLayout>
|
||||
<Notifications />
|
||||
</SettingsLayout>
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/agent-config"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsAgentConfig />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/agent-config/management"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsAgentConfig />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/server-config"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsServerConfig />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/server-config/version"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsServerConfig />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/alert-channels"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsLayout>
|
||||
<AlertChannels />
|
||||
</SettingsLayout>
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/integrations"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<Integrations />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/patch-management"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<PatchManagement />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/server-url"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsServerConfig />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/server-version"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsServerConfig />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/branding"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsServerConfig />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/settings/agent-version"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_settings">
|
||||
<Layout>
|
||||
<SettingsAgentConfig />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/options"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_manage_hosts">
|
||||
<Layout>
|
||||
<SettingsHostGroups />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/packages/:packageId"
|
||||
element={
|
||||
<ProtectedRoute requirePermission="can_view_packages">
|
||||
<Layout>
|
||||
<PackageDetail />
|
||||
</Layout>
|
||||
</ProtectedRoute>
|
||||
} />
|
||||
}
|
||||
/>
|
||||
</Routes>
|
||||
)
|
||||
</Suspense>
|
||||
);
|
||||
}
|
||||
|
||||
function App() {
|
||||
@@ -137,11 +418,13 @@ function App() {
|
||||
<ThemeProvider>
|
||||
<AuthProvider>
|
||||
<UpdateNotificationProvider>
|
||||
<LogoProvider>
|
||||
<AppRoutes />
|
||||
</LogoProvider>
|
||||
</UpdateNotificationProvider>
|
||||
</AuthProvider>
|
||||
</ThemeProvider>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export default App
|
||||
export default App;
|
||||
|
||||
@@ -1,38 +1,34 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import {
|
||||
DndContext,
|
||||
closestCenter,
|
||||
DndContext,
|
||||
KeyboardSensor,
|
||||
PointerSensor,
|
||||
useSensor,
|
||||
useSensors,
|
||||
} from '@dnd-kit/core';
|
||||
} from "@dnd-kit/core";
|
||||
import {
|
||||
arrayMove,
|
||||
SortableContext,
|
||||
sortableKeyboardCoordinates,
|
||||
verticalListSortingStrategy,
|
||||
} from '@dnd-kit/sortable';
|
||||
import {
|
||||
useSortable,
|
||||
} from '@dnd-kit/sortable';
|
||||
import { CSS } from '@dnd-kit/utilities';
|
||||
verticalListSortingStrategy,
|
||||
} from "@dnd-kit/sortable";
|
||||
import { CSS } from "@dnd-kit/utilities";
|
||||
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
|
||||
import {
|
||||
X,
|
||||
GripVertical,
|
||||
Eye,
|
||||
EyeOff,
|
||||
Save,
|
||||
GripVertical,
|
||||
RotateCcw,
|
||||
Settings as SettingsIcon
|
||||
} from 'lucide-react';
|
||||
import { dashboardPreferencesAPI } from '../utils/api';
|
||||
import { useTheme } from '../contexts/ThemeContext';
|
||||
Save,
|
||||
Settings as SettingsIcon,
|
||||
X,
|
||||
} from "lucide-react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { dashboardPreferencesAPI } from "../utils/api";
|
||||
|
||||
// Sortable Card Item Component
|
||||
const SortableCardItem = ({ card, onToggle }) => {
|
||||
const { isDark } = useTheme();
|
||||
const {
|
||||
attributes,
|
||||
listeners,
|
||||
@@ -40,7 +36,9 @@ const SortableCardItem = ({ card, onToggle }) => {
|
||||
transform,
|
||||
transition,
|
||||
isDragging,
|
||||
} = useSortable({ id: card.cardId });
|
||||
} = useSortable({
|
||||
id: card.cardId,
|
||||
});
|
||||
|
||||
const style = {
|
||||
transform: CSS.Transform.toString(transform),
|
||||
@@ -53,7 +51,7 @@ const SortableCardItem = ({ card, onToggle }) => {
|
||||
ref={setNodeRef}
|
||||
style={style}
|
||||
className={`flex items-center justify-between p-3 bg-white dark:bg-secondary-800 border border-secondary-200 dark:border-secondary-600 rounded-lg ${
|
||||
isDragging ? 'shadow-lg' : 'shadow-sm'
|
||||
isDragging ? "shadow-lg" : "shadow-sm"
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center gap-3">
|
||||
@@ -68,18 +66,21 @@ const SortableCardItem = ({ card, onToggle }) => {
|
||||
<div className="text-sm font-medium text-secondary-900 dark:text-white">
|
||||
{card.title}
|
||||
{card.typeLabel ? (
|
||||
<span className="ml-2 text-xs font-normal text-secondary-500 dark:text-secondary-400">({card.typeLabel})</span>
|
||||
<span className="ml-2 text-xs font-normal text-secondary-500 dark:text-secondary-400">
|
||||
({card.typeLabel})
|
||||
</span>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => onToggle(card.cardId)}
|
||||
className={`flex items-center gap-1 px-2 py-1 rounded text-xs font-medium transition-colors ${
|
||||
card.enabled
|
||||
? 'bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200 hover:bg-green-200 dark:hover:bg-green-800'
|
||||
: 'bg-secondary-100 dark:bg-secondary-700 text-secondary-600 dark:text-secondary-300 hover:bg-secondary-200 dark:hover:bg-secondary-600'
|
||||
? "bg-green-100 dark:bg-green-900 text-green-800 dark:text-green-200 hover:bg-green-200 dark:hover:bg-green-800"
|
||||
: "bg-secondary-100 dark:bg-secondary-700 text-secondary-600 dark:text-secondary-300 hover:bg-secondary-200 dark:hover:bg-secondary-600"
|
||||
}`}
|
||||
>
|
||||
{card.enabled ? (
|
||||
@@ -102,27 +103,27 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
const [cards, setCards] = useState([]);
|
||||
const [hasChanges, setHasChanges] = useState(false);
|
||||
const queryClient = useQueryClient();
|
||||
const { isDark } = useTheme();
|
||||
|
||||
const sensors = useSensors(
|
||||
useSensor(PointerSensor),
|
||||
useSensor(KeyboardSensor, {
|
||||
coordinateGetter: sortableKeyboardCoordinates,
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
// Fetch user's dashboard preferences
|
||||
const { data: preferences, isLoading } = useQuery({
|
||||
queryKey: ['dashboardPreferences'],
|
||||
queryFn: () => dashboardPreferencesAPI.get().then(res => res.data),
|
||||
enabled: isOpen
|
||||
queryKey: ["dashboardPreferences"],
|
||||
queryFn: () => dashboardPreferencesAPI.get().then((res) => res.data),
|
||||
enabled: isOpen,
|
||||
});
|
||||
|
||||
// Fetch default card configuration
|
||||
const { data: defaultCards } = useQuery({
|
||||
queryKey: ['dashboardDefaultCards'],
|
||||
queryFn: () => dashboardPreferencesAPI.getDefaults().then(res => res.data),
|
||||
enabled: isOpen
|
||||
queryKey: ["dashboardDefaultCards"],
|
||||
queryFn: () =>
|
||||
dashboardPreferencesAPI.getDefaults().then((res) => res.data),
|
||||
enabled: isOpen,
|
||||
});
|
||||
|
||||
// Update preferences mutation
|
||||
@@ -130,15 +131,18 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
mutationFn: (preferences) => dashboardPreferencesAPI.update(preferences),
|
||||
onSuccess: (response) => {
|
||||
// Optimistically update the query cache with the correct data structure
|
||||
queryClient.setQueryData(['dashboardPreferences'], response.data.preferences);
|
||||
queryClient.setQueryData(
|
||||
["dashboardPreferences"],
|
||||
response.data.preferences,
|
||||
);
|
||||
// Also invalidate to ensure fresh data
|
||||
queryClient.invalidateQueries(['dashboardPreferences']);
|
||||
queryClient.invalidateQueries(["dashboardPreferences"]);
|
||||
setHasChanges(false);
|
||||
onClose();
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error('Failed to update dashboard preferences:', error);
|
||||
}
|
||||
console.error("Failed to update dashboard preferences:", error);
|
||||
},
|
||||
});
|
||||
|
||||
// Initialize cards when preferences or defaults are loaded
|
||||
@@ -152,14 +156,27 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
}));
|
||||
|
||||
const typeLabelFor = (cardId) => {
|
||||
if (['totalHosts','hostsNeedingUpdates','totalOutdatedPackages','securityUpdates','upToDateHosts','totalHostGroups','totalUsers','totalRepos'].includes(cardId)) return 'Top card';
|
||||
if (cardId === 'osDistribution') return 'Pie chart';
|
||||
if (cardId === 'osDistributionBar') return 'Bar chart';
|
||||
if (cardId === 'updateStatus') return 'Pie chart';
|
||||
if (cardId === 'packagePriority') return 'Pie chart';
|
||||
if (cardId === 'recentUsers') return 'Table';
|
||||
if (cardId === 'recentCollection') return 'Table';
|
||||
if (cardId === 'quickStats') return 'Wide card';
|
||||
if (
|
||||
[
|
||||
"totalHosts",
|
||||
"hostsNeedingUpdates",
|
||||
"totalOutdatedPackages",
|
||||
"securityUpdates",
|
||||
"upToDateHosts",
|
||||
"totalHostGroups",
|
||||
"totalUsers",
|
||||
"totalRepos",
|
||||
].includes(cardId)
|
||||
)
|
||||
return "Top card";
|
||||
if (cardId === "osDistribution") return "Pie chart";
|
||||
if (cardId === "osDistributionBar") return "Bar chart";
|
||||
if (cardId === "osDistributionDoughnut") return "Doughnut chart";
|
||||
if (cardId === "updateStatus") return "Pie chart";
|
||||
if (cardId === "packagePriority") return "Pie chart";
|
||||
if (cardId === "recentUsers") return "Table";
|
||||
if (cardId === "recentCollection") return "Table";
|
||||
if (cardId === "quickStats") return "Wide card";
|
||||
return undefined;
|
||||
};
|
||||
|
||||
@@ -167,11 +184,13 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
const mergedCards = defaultCards
|
||||
.map((defaultCard) => {
|
||||
const userPreference = normalizedPreferences.find(
|
||||
(p) => p.cardId === defaultCard.cardId
|
||||
(p) => p.cardId === defaultCard.cardId,
|
||||
);
|
||||
return {
|
||||
...defaultCard,
|
||||
enabled: userPreference ? userPreference.enabled : defaultCard.enabled,
|
||||
enabled: userPreference
|
||||
? userPreference.enabled
|
||||
: defaultCard.enabled,
|
||||
order: userPreference ? userPreference.order : defaultCard.order,
|
||||
typeLabel: typeLabelFor(defaultCard.cardId),
|
||||
};
|
||||
@@ -187,15 +206,15 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
|
||||
if (active.id !== over.id) {
|
||||
setCards((items) => {
|
||||
const oldIndex = items.findIndex(item => item.cardId === active.id);
|
||||
const newIndex = items.findIndex(item => item.cardId === over.id);
|
||||
const oldIndex = items.findIndex((item) => item.cardId === active.id);
|
||||
const newIndex = items.findIndex((item) => item.cardId === over.id);
|
||||
|
||||
const newItems = arrayMove(items, oldIndex, newIndex);
|
||||
|
||||
// Update order values
|
||||
return newItems.map((item, index) => ({
|
||||
...item,
|
||||
order: index
|
||||
order: index,
|
||||
}));
|
||||
});
|
||||
setHasChanges(true);
|
||||
@@ -203,21 +222,19 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
};
|
||||
|
||||
const handleToggle = (cardId) => {
|
||||
setCards(prevCards =>
|
||||
prevCards.map(card =>
|
||||
card.cardId === cardId
|
||||
? { ...card, enabled: !card.enabled }
|
||||
: card
|
||||
)
|
||||
setCards((prevCards) =>
|
||||
prevCards.map((card) =>
|
||||
card.cardId === cardId ? { ...card, enabled: !card.enabled } : card,
|
||||
),
|
||||
);
|
||||
setHasChanges(true);
|
||||
};
|
||||
|
||||
const handleSave = () => {
|
||||
const preferences = cards.map(card => ({
|
||||
const preferences = cards.map((card) => ({
|
||||
cardId: card.cardId,
|
||||
enabled: card.enabled,
|
||||
order: card.order
|
||||
order: card.order,
|
||||
}));
|
||||
|
||||
updatePreferencesMutation.mutate(preferences);
|
||||
@@ -225,10 +242,10 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
|
||||
const handleReset = () => {
|
||||
if (defaultCards) {
|
||||
const resetCards = defaultCards.map(card => ({
|
||||
const resetCards = defaultCards.map((card) => ({
|
||||
...card,
|
||||
enabled: true,
|
||||
order: card.order
|
||||
order: card.order,
|
||||
}));
|
||||
setCards(resetCards);
|
||||
setHasChanges(true);
|
||||
@@ -240,7 +257,12 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
return (
|
||||
<div className="fixed inset-0 z-50 overflow-y-auto">
|
||||
<div className="flex items-center justify-center min-h-screen pt-4 px-4 pb-20 text-center sm:block sm:p-0">
|
||||
<div className="fixed inset-0 bg-secondary-500 bg-opacity-75 transition-opacity" onClick={onClose} />
|
||||
<button
|
||||
type="button"
|
||||
className="fixed inset-0 bg-secondary-500 bg-opacity-75 transition-opacity cursor-default"
|
||||
onClick={onClose}
|
||||
aria-label="Close modal"
|
||||
/>
|
||||
|
||||
<div className="inline-block align-bottom bg-white dark:bg-secondary-800 rounded-lg text-left overflow-hidden shadow-xl transform transition-all sm:my-8 sm:align-middle sm:max-w-lg sm:w-full">
|
||||
<div className="bg-white dark:bg-secondary-800 px-4 pt-5 pb-4 sm:p-6 sm:pb-4">
|
||||
@@ -252,6 +274,7 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
</h3>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={onClose}
|
||||
className="text-secondary-400 hover:text-secondary-600 dark:text-secondary-500 dark:hover:text-secondary-300"
|
||||
>
|
||||
@@ -260,8 +283,9 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
</div>
|
||||
|
||||
<p className="text-sm text-secondary-600 dark:text-secondary-400 mb-6">
|
||||
Customize your dashboard by reordering cards and toggling their visibility.
|
||||
Drag cards to reorder them, and click the visibility toggle to show/hide cards.
|
||||
Customize your dashboard by reordering cards and toggling their
|
||||
visibility. Drag cards to reorder them, and click the visibility
|
||||
toggle to show/hide cards.
|
||||
</p>
|
||||
|
||||
{isLoading ? (
|
||||
@@ -274,7 +298,10 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
collisionDetection={closestCenter}
|
||||
onDragEnd={handleDragEnd}
|
||||
>
|
||||
<SortableContext items={cards.map(card => card.cardId)} strategy={verticalListSortingStrategy}>
|
||||
<SortableContext
|
||||
items={cards.map((card) => card.cardId)}
|
||||
strategy={verticalListSortingStrategy}
|
||||
>
|
||||
<div className="space-y-2 max-h-96 overflow-y-auto">
|
||||
{cards.map((card) => (
|
||||
<SortableCardItem
|
||||
@@ -291,12 +318,13 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
|
||||
<div className="bg-secondary-50 dark:bg-secondary-700 px-4 py-3 sm:px-6 sm:flex sm:flex-row-reverse">
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleSave}
|
||||
disabled={!hasChanges || updatePreferencesMutation.isPending}
|
||||
className={`w-full inline-flex justify-center rounded-md border border-transparent shadow-sm px-4 py-2 text-base font-medium text-white sm:ml-3 sm:w-auto sm:text-sm ${
|
||||
!hasChanges || updatePreferencesMutation.isPending
|
||||
? 'bg-secondary-400 cursor-not-allowed'
|
||||
: 'bg-primary-600 hover:bg-primary-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-primary-500'
|
||||
? "bg-secondary-400 cursor-not-allowed"
|
||||
: "bg-primary-600 hover:bg-primary-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-primary-500"
|
||||
}`}
|
||||
>
|
||||
{updatePreferencesMutation.isPending ? (
|
||||
@@ -313,6 +341,7 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleReset}
|
||||
className="mt-3 w-full inline-flex justify-center rounded-md border border-secondary-300 dark:border-secondary-600 shadow-sm px-4 py-2 bg-white dark:bg-secondary-800 text-base font-medium text-secondary-700 dark:text-secondary-200 hover:bg-secondary-50 dark:hover:bg-secondary-700 sm:mt-0 sm:ml-3 sm:w-auto sm:text-sm"
|
||||
>
|
||||
@@ -321,6 +350,7 @@ const DashboardSettingsModal = ({ isOpen, onClose }) => {
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onClick={onClose}
|
||||
className="mt-3 w-full inline-flex justify-center rounded-md border border-secondary-300 dark:border-secondary-600 shadow-sm px-4 py-2 bg-white dark:bg-secondary-800 text-base font-medium text-secondary-700 dark:text-secondary-200 hover:bg-secondary-50 dark:hover:bg-secondary-700 sm:mt-0 sm:ml-3 sm:w-auto sm:text-sm"
|
||||
>
|
||||
|
||||
16
frontend/src/components/DiscordIcon.jsx
Normal file
16
frontend/src/components/DiscordIcon.jsx
Normal file
@@ -0,0 +1,16 @@
|
||||
const DiscordIcon = ({ className = "h-5 w-5" }) => {
|
||||
return (
|
||||
<svg
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
className={className}
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
aria-label="Discord"
|
||||
>
|
||||
<title>Discord</title>
|
||||
<path d="M20.317 4.3698a19.7913 19.7913 0 00-4.8851-1.5152.0741.0741 0 00-.0785.0371c-.211.3753-.4447.8648-.6083 1.2495-1.8447-.2762-3.68-.2762-5.4868 0-.1636-.3933-.4058-.8742-.6177-1.2495a.077.077 0 00-.0785-.037 19.7363 19.7363 0 00-4.8852 1.515.0699.0699 0 00-.0321.0277C.5334 9.0458-.319 13.5799.0992 18.0578a.0824.0824 0 00.0312.0561c2.0528 1.5076 4.0413 2.4228 5.9929 3.0294a.0777.0777 0 00.0842-.0276c.4616-.6304.8731-1.2952 1.226-1.9942a.076.076 0 00-.0416-.1057c-.6528-.2476-1.2743-.5495-1.8722-.8923a.077.077 0 01-.0076-.1277c.1258-.0943.2517-.1923.3718-.2914a.0743.0743 0 01.0776-.0105c3.9278 1.7933 8.18 1.7933 12.0614 0a.0739.0739 0 01.0785.0095c.1202.099.246.1981.3728.2924a.077.077 0 01-.0066.1276 12.2986 12.2986 0 01-1.873.8914.0766.0766 0 00-.0407.1067c.3604.698.7719 1.3628 1.225 1.9932a.076.076 0 00.0842.0286c1.961-.6067 3.9495-1.5219 6.0023-3.0294a.077.077 0 00.0313-.0552c.5004-5.177-.8382-9.6739-3.5485-13.6604a.061.061 0 00-.0312-.0286zM8.02 15.3312c-1.1825 0-2.1569-1.0857-2.1569-2.419 0-1.3332.9555-2.4189 2.157-2.4189 1.2108 0 2.1757 1.0952 2.1568 2.419 0 1.3332-.9555 2.4189-2.1569 2.4189zm7.9748 0c-1.1825 0-2.1569-1.0857-2.1569-2.419 0-1.3332.9554-2.4189 2.1569-2.4189 1.2108 0 2.1757 1.0952 2.1568 2.419 0 1.3332-.946 2.4189-2.1568 2.4189z" />
|
||||
</svg>
|
||||
);
|
||||
};
|
||||
|
||||
export default DiscordIcon;
|
||||
@@ -1,108 +1,135 @@
|
||||
import React, { useState } from 'react'
|
||||
import { useAuth } from '../contexts/AuthContext'
|
||||
import { UserPlus, Shield, CheckCircle, AlertCircle } from 'lucide-react'
|
||||
import { AlertCircle, CheckCircle, Shield, UserPlus } from "lucide-react";
|
||||
import { useId, useState } from "react";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { useAuth } from "../contexts/AuthContext";
|
||||
|
||||
const FirstTimeAdminSetup = () => {
|
||||
const { login } = useAuth()
|
||||
const { login, setAuthState } = useAuth();
|
||||
const navigate = useNavigate();
|
||||
const firstNameId = useId();
|
||||
const lastNameId = useId();
|
||||
const usernameId = useId();
|
||||
const emailId = useId();
|
||||
const passwordId = useId();
|
||||
const confirmPasswordId = useId();
|
||||
const [formData, setFormData] = useState({
|
||||
username: '',
|
||||
email: '',
|
||||
password: '',
|
||||
confirmPassword: '',
|
||||
firstName: '',
|
||||
lastName: ''
|
||||
})
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [error, setError] = useState('')
|
||||
const [success, setSuccess] = useState(false)
|
||||
username: "",
|
||||
email: "",
|
||||
password: "",
|
||||
confirmPassword: "",
|
||||
firstName: "",
|
||||
lastName: "",
|
||||
});
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState("");
|
||||
const [success, setSuccess] = useState(false);
|
||||
|
||||
const handleInputChange = (e) => {
|
||||
const { name, value } = e.target
|
||||
setFormData(prev => ({
|
||||
const { name, value } = e.target;
|
||||
setFormData((prev) => ({
|
||||
...prev,
|
||||
[name]: value
|
||||
}))
|
||||
[name]: value,
|
||||
}));
|
||||
// Clear error when user starts typing
|
||||
if (error) setError('')
|
||||
}
|
||||
if (error) setError("");
|
||||
};
|
||||
|
||||
const validateForm = () => {
|
||||
if (!formData.firstName.trim()) {
|
||||
setError('First name is required')
|
||||
return false
|
||||
setError("First name is required");
|
||||
return false;
|
||||
}
|
||||
if (!formData.lastName.trim()) {
|
||||
setError('Last name is required')
|
||||
return false
|
||||
setError("Last name is required");
|
||||
return false;
|
||||
}
|
||||
if (!formData.username.trim()) {
|
||||
setError('Username is required')
|
||||
return false
|
||||
setError("Username is required");
|
||||
return false;
|
||||
}
|
||||
if (!formData.email.trim()) {
|
||||
setError('Email address is required')
|
||||
return false
|
||||
setError("Email address is required");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Enhanced email validation
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
if (!emailRegex.test(formData.email.trim())) {
|
||||
setError('Please enter a valid email address (e.g., user@example.com)')
|
||||
return false
|
||||
setError("Please enter a valid email address (e.g., user@example.com)");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (formData.password.length < 8) {
|
||||
setError('Password must be at least 8 characters for security')
|
||||
return false
|
||||
setError("Password must be at least 8 characters for security");
|
||||
return false;
|
||||
}
|
||||
if (formData.password !== formData.confirmPassword) {
|
||||
setError('Passwords do not match')
|
||||
return false
|
||||
}
|
||||
return true
|
||||
setError("Passwords do not match");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
const handleSubmit = async (e) => {
|
||||
e.preventDefault()
|
||||
e.preventDefault();
|
||||
|
||||
if (!validateForm()) return
|
||||
if (!validateForm()) return;
|
||||
|
||||
setIsLoading(true)
|
||||
setError('')
|
||||
setIsLoading(true);
|
||||
setError("");
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/v1/auth/setup-admin', {
|
||||
method: 'POST',
|
||||
const response = await fetch("/api/v1/auth/setup-admin", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
username: formData.username.trim(),
|
||||
email: formData.email.trim(),
|
||||
password: formData.password,
|
||||
firstName: formData.firstName.trim(),
|
||||
lastName: formData.lastName.trim()
|
||||
})
|
||||
})
|
||||
lastName: formData.lastName.trim(),
|
||||
}),
|
||||
});
|
||||
|
||||
const data = await response.json()
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
setSuccess(true)
|
||||
// Auto-login the user after successful setup
|
||||
setSuccess(true);
|
||||
|
||||
// If the response includes a token, use it to automatically log in
|
||||
if (data.token && data.user) {
|
||||
// Set the authentication state immediately
|
||||
setAuthState(data.token, data.user);
|
||||
// Navigate to dashboard after successful setup
|
||||
setTimeout(() => {
|
||||
login(formData.username.trim(), formData.password)
|
||||
}, 2000)
|
||||
navigate("/", { replace: true });
|
||||
}, 100); // Small delay to ensure auth state is set
|
||||
} else {
|
||||
setError(data.error || 'Failed to create admin user')
|
||||
// Fallback to manual login if no token provided
|
||||
setTimeout(async () => {
|
||||
try {
|
||||
await login(formData.username.trim(), formData.password);
|
||||
} catch (error) {
|
||||
console.error("Auto-login failed:", error);
|
||||
setError(
|
||||
"Account created but auto-login failed. Please login manually.",
|
||||
);
|
||||
setSuccess(false);
|
||||
}
|
||||
}, 2000);
|
||||
}
|
||||
} else {
|
||||
setError(data.error || "Failed to create admin user");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Setup error:', error)
|
||||
setError('Network error. Please try again.')
|
||||
console.error("Setup error:", error);
|
||||
setError("Network error. Please try again.");
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
if (success) {
|
||||
return (
|
||||
@@ -118,7 +145,8 @@ const FirstTimeAdminSetup = () => {
|
||||
Admin Account Created!
|
||||
</h1>
|
||||
<p className="text-secondary-600 dark:text-secondary-300 mb-6">
|
||||
Your admin account has been successfully created. You will be automatically logged in shortly.
|
||||
Your admin account has been successfully created and you are now
|
||||
logged in. Redirecting to the dashboard...
|
||||
</p>
|
||||
<div className="flex justify-center">
|
||||
<div className="animate-spin rounded-full h-6 w-6 border-b-2 border-primary-600"></div>
|
||||
@@ -126,7 +154,7 @@ const FirstTimeAdminSetup = () => {
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -151,7 +179,9 @@ const FirstTimeAdminSetup = () => {
|
||||
<div className="mb-6 p-4 bg-danger-50 dark:bg-danger-900 border border-danger-200 dark:border-danger-700 rounded-lg">
|
||||
<div className="flex items-center">
|
||||
<AlertCircle className="h-5 w-5 text-danger-600 dark:text-danger-400 mr-2" />
|
||||
<span className="text-danger-700 dark:text-danger-300 text-sm">{error}</span>
|
||||
<span className="text-danger-700 dark:text-danger-300 text-sm">
|
||||
{error}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
@@ -159,12 +189,15 @@ const FirstTimeAdminSetup = () => {
|
||||
<form onSubmit={handleSubmit} className="space-y-6">
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||
<div>
|
||||
<label htmlFor="firstName" className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2">
|
||||
<label
|
||||
htmlFor={firstNameId}
|
||||
className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2"
|
||||
>
|
||||
First Name
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
id="firstName"
|
||||
id={firstNameId}
|
||||
name="firstName"
|
||||
value={formData.firstName}
|
||||
onChange={handleInputChange}
|
||||
@@ -175,12 +208,15 @@ const FirstTimeAdminSetup = () => {
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label htmlFor="lastName" className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2">
|
||||
<label
|
||||
htmlFor={lastNameId}
|
||||
className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2"
|
||||
>
|
||||
Last Name
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
id="lastName"
|
||||
id={lastNameId}
|
||||
name="lastName"
|
||||
value={formData.lastName}
|
||||
onChange={handleInputChange}
|
||||
@@ -193,12 +229,15 @@ const FirstTimeAdminSetup = () => {
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label htmlFor="username" className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2">
|
||||
<label
|
||||
htmlFor={usernameId}
|
||||
className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2"
|
||||
>
|
||||
Username
|
||||
</label>
|
||||
<input
|
||||
type="text"
|
||||
id="username"
|
||||
id={usernameId}
|
||||
name="username"
|
||||
value={formData.username}
|
||||
onChange={handleInputChange}
|
||||
@@ -210,12 +249,15 @@ const FirstTimeAdminSetup = () => {
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label htmlFor="email" className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2">
|
||||
<label
|
||||
htmlFor={emailId}
|
||||
className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2"
|
||||
>
|
||||
Email Address
|
||||
</label>
|
||||
<input
|
||||
type="email"
|
||||
id="email"
|
||||
id={emailId}
|
||||
name="email"
|
||||
value={formData.email}
|
||||
onChange={handleInputChange}
|
||||
@@ -227,12 +269,15 @@ const FirstTimeAdminSetup = () => {
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label htmlFor="password" className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2">
|
||||
<label
|
||||
htmlFor={passwordId}
|
||||
className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2"
|
||||
>
|
||||
Password
|
||||
</label>
|
||||
<input
|
||||
type="password"
|
||||
id="password"
|
||||
id={passwordId}
|
||||
name="password"
|
||||
value={formData.password}
|
||||
onChange={handleInputChange}
|
||||
@@ -244,12 +289,15 @@ const FirstTimeAdminSetup = () => {
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label htmlFor="confirmPassword" className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2">
|
||||
<label
|
||||
htmlFor={confirmPasswordId}
|
||||
className="block text-sm font-medium text-secondary-700 dark:text-secondary-300 mb-2"
|
||||
>
|
||||
Confirm Password
|
||||
</label>
|
||||
<input
|
||||
type="password"
|
||||
id="confirmPassword"
|
||||
id={confirmPasswordId}
|
||||
name="confirmPassword"
|
||||
value={formData.confirmPassword}
|
||||
onChange={handleInputChange}
|
||||
@@ -284,14 +332,17 @@ const FirstTimeAdminSetup = () => {
|
||||
<Shield className="h-5 w-5 text-blue-600 dark:text-blue-400 mr-2 mt-0.5 flex-shrink-0" />
|
||||
<div className="text-sm text-blue-700 dark:text-blue-300">
|
||||
<p className="font-medium mb-1">Admin Privileges</p>
|
||||
<p>This account will have full administrative access to manage users, hosts, packages, and system settings.</p>
|
||||
<p>
|
||||
This account will have full administrative access to manage
|
||||
users, hosts, packages, and system settings.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export default FirstTimeAdminSetup
|
||||
export default FirstTimeAdminSetup;
|
||||
|
||||
428
frontend/src/components/GlobalSearch.jsx
Normal file
428
frontend/src/components/GlobalSearch.jsx
Normal file
@@ -0,0 +1,428 @@
|
||||
import { GitBranch, Package, Search, Server, User, X } from "lucide-react";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { searchAPI } from "../utils/api";
|
||||
|
||||
const GlobalSearch = () => {
|
||||
const [query, setQuery] = useState("");
|
||||
const [results, setResults] = useState(null);
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [selectedIndex, setSelectedIndex] = useState(-1);
|
||||
const searchRef = useRef(null);
|
||||
const inputRef = useRef(null);
|
||||
const navigate = useNavigate();
|
||||
|
||||
// Debounce search
|
||||
const debounceTimerRef = useRef(null);
|
||||
|
||||
const performSearch = useCallback(async (searchQuery) => {
|
||||
if (!searchQuery || searchQuery.trim().length === 0) {
|
||||
setResults(null);
|
||||
setIsOpen(false);
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const response = await searchAPI.global(searchQuery);
|
||||
setResults(response.data);
|
||||
setIsOpen(true);
|
||||
setSelectedIndex(-1);
|
||||
} catch (error) {
|
||||
console.error("Search error:", error);
|
||||
setResults(null);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleInputChange = (e) => {
|
||||
const value = e.target.value;
|
||||
setQuery(value);
|
||||
|
||||
// Clear previous timer
|
||||
if (debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current);
|
||||
}
|
||||
|
||||
// Set new timer
|
||||
debounceTimerRef.current = setTimeout(() => {
|
||||
performSearch(value);
|
||||
}, 300);
|
||||
};
|
||||
|
||||
const handleClear = () => {
|
||||
// Clear debounce timer to prevent any pending searches
|
||||
if (debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current);
|
||||
}
|
||||
setQuery("");
|
||||
setResults(null);
|
||||
setIsOpen(false);
|
||||
setSelectedIndex(-1);
|
||||
inputRef.current?.focus();
|
||||
};
|
||||
|
||||
const handleResultClick = (result) => {
|
||||
// Navigate based on result type
|
||||
switch (result.type) {
|
||||
case "host":
|
||||
navigate(`/hosts/${result.id}`);
|
||||
break;
|
||||
case "package":
|
||||
navigate(`/packages/${result.id}`);
|
||||
break;
|
||||
case "repository":
|
||||
navigate(`/repositories/${result.id}`);
|
||||
break;
|
||||
case "user":
|
||||
// Users don't have detail pages, so navigate to settings
|
||||
navigate("/settings/users");
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
// Close dropdown and clear
|
||||
handleClear();
|
||||
};
|
||||
|
||||
// Close dropdown when clicking outside
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event) => {
|
||||
if (searchRef.current && !searchRef.current.contains(event.target)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener("mousedown", handleClickOutside);
|
||||
return () => {
|
||||
document.removeEventListener("mousedown", handleClickOutside);
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Keyboard navigation
|
||||
const flattenedResults = [];
|
||||
if (results) {
|
||||
if (results.hosts?.length > 0) {
|
||||
flattenedResults.push({ type: "header", label: "Hosts" });
|
||||
flattenedResults.push(...results.hosts);
|
||||
}
|
||||
if (results.packages?.length > 0) {
|
||||
flattenedResults.push({ type: "header", label: "Packages" });
|
||||
flattenedResults.push(...results.packages);
|
||||
}
|
||||
if (results.repositories?.length > 0) {
|
||||
flattenedResults.push({ type: "header", label: "Repositories" });
|
||||
flattenedResults.push(...results.repositories);
|
||||
}
|
||||
if (results.users?.length > 0) {
|
||||
flattenedResults.push({ type: "header", label: "Users" });
|
||||
flattenedResults.push(...results.users);
|
||||
}
|
||||
}
|
||||
|
||||
const navigableResults = flattenedResults.filter((r) => r.type !== "header");
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
if (!isOpen || !results) return;
|
||||
|
||||
switch (e.key) {
|
||||
case "ArrowDown":
|
||||
e.preventDefault();
|
||||
setSelectedIndex((prev) =>
|
||||
prev < navigableResults.length - 1 ? prev + 1 : prev,
|
||||
);
|
||||
break;
|
||||
case "ArrowUp":
|
||||
e.preventDefault();
|
||||
setSelectedIndex((prev) => (prev > 0 ? prev - 1 : -1));
|
||||
break;
|
||||
case "Enter":
|
||||
e.preventDefault();
|
||||
if (selectedIndex >= 0 && navigableResults[selectedIndex]) {
|
||||
handleResultClick(navigableResults[selectedIndex]);
|
||||
}
|
||||
break;
|
||||
case "Escape":
|
||||
e.preventDefault();
|
||||
setIsOpen(false);
|
||||
setSelectedIndex(-1);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
// Get icon for result type
|
||||
const getResultIcon = (type) => {
|
||||
switch (type) {
|
||||
case "host":
|
||||
return <Server className="h-4 w-4 text-blue-500" />;
|
||||
case "package":
|
||||
return <Package className="h-4 w-4 text-green-500" />;
|
||||
case "repository":
|
||||
return <GitBranch className="h-4 w-4 text-purple-500" />;
|
||||
case "user":
|
||||
return <User className="h-4 w-4 text-orange-500" />;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
// Get display text for result
|
||||
const getResultDisplay = (result) => {
|
||||
switch (result.type) {
|
||||
case "host":
|
||||
return {
|
||||
primary: result.friendly_name || result.hostname,
|
||||
secondary: result.ip || result.hostname,
|
||||
};
|
||||
case "package":
|
||||
return {
|
||||
primary: result.name,
|
||||
secondary: result.description || result.category,
|
||||
};
|
||||
case "repository":
|
||||
return {
|
||||
primary: result.name,
|
||||
secondary: result.distribution,
|
||||
};
|
||||
case "user":
|
||||
return {
|
||||
primary: result.username,
|
||||
secondary: result.email,
|
||||
};
|
||||
default:
|
||||
return { primary: "", secondary: "" };
|
||||
}
|
||||
};
|
||||
|
||||
const hasResults =
|
||||
results &&
|
||||
(results.hosts?.length > 0 ||
|
||||
results.packages?.length > 0 ||
|
||||
results.repositories?.length > 0 ||
|
||||
results.users?.length > 0);
|
||||
|
||||
return (
|
||||
<div ref={searchRef} className="relative w-full max-w-sm">
|
||||
<div className="relative">
|
||||
<div className="pointer-events-none absolute inset-y-0 left-0 flex items-center pl-3">
|
||||
<Search className="h-5 w-5 text-secondary-400" />
|
||||
</div>
|
||||
<input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
className="block w-full rounded-lg border border-secondary-200 bg-white py-2 pl-10 pr-10 text-sm text-secondary-900 placeholder-secondary-500 focus:border-primary-500 focus:outline-none focus:ring-2 focus:ring-primary-500 dark:border-secondary-600 dark:bg-secondary-700 dark:text-white dark:placeholder-secondary-400"
|
||||
placeholder="Search hosts, packages, repos, users..."
|
||||
value={query}
|
||||
onChange={handleInputChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
onFocus={() => {
|
||||
if (query && results) setIsOpen(true);
|
||||
}}
|
||||
/>
|
||||
{query && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleClear}
|
||||
className="absolute inset-y-0 right-0 flex items-center pr-3 text-secondary-400 hover:text-secondary-600"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Dropdown Results */}
|
||||
{isOpen && (
|
||||
<div className="absolute z-50 mt-2 w-full rounded-lg border border-secondary-200 bg-white shadow-lg dark:border-secondary-600 dark:bg-secondary-800">
|
||||
{isLoading ? (
|
||||
<div className="px-4 py-2 text-center text-sm text-secondary-500">
|
||||
Searching...
|
||||
</div>
|
||||
) : hasResults ? (
|
||||
<div className="max-h-96 overflow-y-auto">
|
||||
{/* Hosts */}
|
||||
{results.hosts?.length > 0 && (
|
||||
<div>
|
||||
<div className="sticky top-0 z-10 bg-secondary-50 px-3 py-1.5 text-xs font-semibold uppercase tracking-wider text-secondary-500 dark:bg-secondary-700 dark:text-secondary-400">
|
||||
Hosts
|
||||
</div>
|
||||
{results.hosts.map((host, _idx) => {
|
||||
const display = getResultDisplay(host);
|
||||
const globalIdx = navigableResults.findIndex(
|
||||
(r) => r.id === host.id && r.type === "host",
|
||||
);
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
key={host.id}
|
||||
onClick={() => handleResultClick(host)}
|
||||
className={`flex w-full items-center gap-2 px-3 py-1.5 text-left transition-colors ${
|
||||
globalIdx === selectedIndex
|
||||
? "bg-primary-50 dark:bg-primary-900/20"
|
||||
: "hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||
}`}
|
||||
>
|
||||
{getResultIcon("host")}
|
||||
<div className="flex-1 min-w-0 flex items-center gap-2">
|
||||
<span className="text-sm font-medium text-secondary-900 dark:text-white truncate">
|
||||
{display.primary}
|
||||
</span>
|
||||
<span className="text-xs text-secondary-400">•</span>
|
||||
<span className="text-xs text-secondary-500 dark:text-secondary-400 truncate">
|
||||
{display.secondary}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex-shrink-0 text-xs text-secondary-400">
|
||||
{host.os_type}
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Packages */}
|
||||
{results.packages?.length > 0 && (
|
||||
<div>
|
||||
<div className="sticky top-0 z-10 bg-secondary-50 px-3 py-1.5 text-xs font-semibold uppercase tracking-wider text-secondary-500 dark:bg-secondary-700 dark:text-secondary-400">
|
||||
Packages
|
||||
</div>
|
||||
{results.packages.map((pkg, _idx) => {
|
||||
const display = getResultDisplay(pkg);
|
||||
const globalIdx = navigableResults.findIndex(
|
||||
(r) => r.id === pkg.id && r.type === "package",
|
||||
);
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
key={pkg.id}
|
||||
onClick={() => handleResultClick(pkg)}
|
||||
className={`flex w-full items-center gap-2 px-3 py-1.5 text-left transition-colors ${
|
||||
globalIdx === selectedIndex
|
||||
? "bg-primary-50 dark:bg-primary-900/20"
|
||||
: "hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||
}`}
|
||||
>
|
||||
{getResultIcon("package")}
|
||||
<div className="flex-1 min-w-0 flex items-center gap-2">
|
||||
<span className="text-sm font-medium text-secondary-900 dark:text-white truncate">
|
||||
{display.primary}
|
||||
</span>
|
||||
{display.secondary && (
|
||||
<>
|
||||
<span className="text-xs text-secondary-400">
|
||||
•
|
||||
</span>
|
||||
<span className="text-xs text-secondary-500 dark:text-secondary-400 truncate">
|
||||
{display.secondary}
|
||||
</span>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex-shrink-0 text-xs text-secondary-400">
|
||||
{pkg.host_count} hosts
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Repositories */}
|
||||
{results.repositories?.length > 0 && (
|
||||
<div>
|
||||
<div className="sticky top-0 z-10 bg-secondary-50 px-3 py-1.5 text-xs font-semibold uppercase tracking-wider text-secondary-500 dark:bg-secondary-700 dark:text-secondary-400">
|
||||
Repositories
|
||||
</div>
|
||||
{results.repositories.map((repo, _idx) => {
|
||||
const display = getResultDisplay(repo);
|
||||
const globalIdx = navigableResults.findIndex(
|
||||
(r) => r.id === repo.id && r.type === "repository",
|
||||
);
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
key={repo.id}
|
||||
onClick={() => handleResultClick(repo)}
|
||||
className={`flex w-full items-center gap-2 px-3 py-1.5 text-left transition-colors ${
|
||||
globalIdx === selectedIndex
|
||||
? "bg-primary-50 dark:bg-primary-900/20"
|
||||
: "hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||
}`}
|
||||
>
|
||||
{getResultIcon("repository")}
|
||||
<div className="flex-1 min-w-0 flex items-center gap-2">
|
||||
<span className="text-sm font-medium text-secondary-900 dark:text-white truncate">
|
||||
{display.primary}
|
||||
</span>
|
||||
<span className="text-xs text-secondary-400">•</span>
|
||||
<span className="text-xs text-secondary-500 dark:text-secondary-400 truncate">
|
||||
{display.secondary}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex-shrink-0 text-xs text-secondary-400">
|
||||
{repo.host_count} hosts
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Users */}
|
||||
{results.users?.length > 0 && (
|
||||
<div>
|
||||
<div className="sticky top-0 z-10 bg-secondary-50 px-3 py-1.5 text-xs font-semibold uppercase tracking-wider text-secondary-500 dark:bg-secondary-700 dark:text-secondary-400">
|
||||
Users
|
||||
</div>
|
||||
{results.users.map((user, _idx) => {
|
||||
const display = getResultDisplay(user);
|
||||
const globalIdx = navigableResults.findIndex(
|
||||
(r) => r.id === user.id && r.type === "user",
|
||||
);
|
||||
return (
|
||||
<button
|
||||
type="button"
|
||||
key={user.id}
|
||||
onClick={() => handleResultClick(user)}
|
||||
className={`flex w-full items-center gap-2 px-3 py-1.5 text-left transition-colors ${
|
||||
globalIdx === selectedIndex
|
||||
? "bg-primary-50 dark:bg-primary-900/20"
|
||||
: "hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||
}`}
|
||||
>
|
||||
{getResultIcon("user")}
|
||||
<div className="flex-1 min-w-0 flex items-center gap-2">
|
||||
<span className="text-sm font-medium text-secondary-900 dark:text-white truncate">
|
||||
{display.primary}
|
||||
</span>
|
||||
<span className="text-xs text-secondary-400">•</span>
|
||||
<span className="text-xs text-secondary-500 dark:text-secondary-400 truncate">
|
||||
{display.secondary}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex-shrink-0 text-xs text-secondary-400">
|
||||
{user.role}
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
) : query.trim() ? (
|
||||
<div className="px-4 py-2 text-center text-sm text-secondary-500">
|
||||
No results found for "{query}"
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default GlobalSearch;
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { useState, useRef, useEffect } from 'react';
|
||||
import { Edit2, Check, X } from 'lucide-react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { Check, Edit2, X } from "lucide-react";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { Link } from "react-router-dom";
|
||||
|
||||
const InlineEdit = ({
|
||||
value,
|
||||
@@ -11,12 +11,12 @@ const InlineEdit = ({
|
||||
className = "",
|
||||
disabled = false,
|
||||
validate = null,
|
||||
linkTo = null
|
||||
linkTo = null,
|
||||
}) => {
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
const [editValue, setEditValue] = useState(value);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState('');
|
||||
const [error, setError] = useState("");
|
||||
const inputRef = useRef(null);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -34,13 +34,13 @@ const InlineEdit = ({
|
||||
if (disabled) return;
|
||||
setIsEditing(true);
|
||||
setEditValue(value);
|
||||
setError('');
|
||||
setError("");
|
||||
};
|
||||
|
||||
const handleCancel = () => {
|
||||
setIsEditing(false);
|
||||
setEditValue(value);
|
||||
setError('');
|
||||
setError("");
|
||||
if (onCancel) onCancel();
|
||||
};
|
||||
|
||||
@@ -63,23 +63,23 @@ const InlineEdit = ({
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
setError('');
|
||||
setError("");
|
||||
|
||||
try {
|
||||
await onSave(editValue.trim());
|
||||
setIsEditing(false);
|
||||
} catch (err) {
|
||||
setError(err.message || 'Failed to save');
|
||||
setError(err.message || "Failed to save");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.key === 'Enter') {
|
||||
if (e.key === "Enter") {
|
||||
e.preventDefault();
|
||||
handleSave();
|
||||
} else if (e.key === 'Escape') {
|
||||
} else if (e.key === "Escape") {
|
||||
e.preventDefault();
|
||||
handleCancel();
|
||||
}
|
||||
@@ -98,18 +98,20 @@ const InlineEdit = ({
|
||||
maxLength={maxLength}
|
||||
disabled={isLoading}
|
||||
className={`flex-1 px-2 py-1 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent ${
|
||||
error ? 'border-red-500' : ''
|
||||
} ${isLoading ? 'opacity-50' : ''}`}
|
||||
error ? "border-red-500" : ""
|
||||
} ${isLoading ? "opacity-50" : ""}`}
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleSave}
|
||||
disabled={isLoading || editValue.trim() === ''}
|
||||
disabled={isLoading || editValue.trim() === ""}
|
||||
className="p-1 text-green-600 hover:text-green-700 hover:bg-green-50 dark:hover:bg-green-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
title="Save"
|
||||
>
|
||||
<Check className="h-4 w-4" />
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleCancel}
|
||||
disabled={isLoading}
|
||||
className="p-1 text-red-600 hover:text-red-700 hover:bg-red-50 dark:hover:bg-red-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
@@ -118,7 +120,9 @@ const InlineEdit = ({
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
{error && (
|
||||
<span className="text-xs text-red-600 dark:text-red-400">{error}</span>
|
||||
<span className="text-xs text-red-600 dark:text-red-400">
|
||||
{error}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
@@ -143,6 +147,7 @@ const InlineEdit = ({
|
||||
{displayValue}
|
||||
{!disabled && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleEdit}
|
||||
className="p-1 text-secondary-400 hover:text-secondary-600 dark:hover:text-secondary-300 hover:bg-secondary-100 dark:hover:bg-secondary-700 rounded transition-colors opacity-0 group-hover:opacity-100"
|
||||
title="Edit"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useState, useRef, useEffect, useMemo } from 'react';
|
||||
import { Edit2, Check, X, ChevronDown } from 'lucide-react';
|
||||
import { Check, ChevronDown, Edit2, X } from "lucide-react";
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
|
||||
const InlineGroupEdit = ({
|
||||
value,
|
||||
@@ -8,14 +8,17 @@ const InlineGroupEdit = ({
|
||||
options = [],
|
||||
className = "",
|
||||
disabled = false,
|
||||
placeholder = "Select group..."
|
||||
}) => {
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
const [selectedValue, setSelectedValue] = useState(value);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState('');
|
||||
const [error, setError] = useState("");
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [dropdownPosition, setDropdownPosition] = useState({ top: 0, left: 0, width: 0 });
|
||||
const [dropdownPosition, setDropdownPosition] = useState({
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: 0,
|
||||
});
|
||||
const dropdownRef = useRef(null);
|
||||
const buttonRef = useRef(null);
|
||||
|
||||
@@ -34,16 +37,16 @@ const InlineGroupEdit = ({
|
||||
}, [value, isEditing]);
|
||||
|
||||
// Calculate dropdown position
|
||||
const calculateDropdownPosition = () => {
|
||||
const calculateDropdownPosition = useCallback(() => {
|
||||
if (buttonRef.current) {
|
||||
const rect = buttonRef.current.getBoundingClientRect();
|
||||
setDropdownPosition({
|
||||
top: rect.bottom + window.scrollY + 4,
|
||||
left: rect.left + window.scrollX,
|
||||
width: rect.width
|
||||
width: rect.width,
|
||||
});
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Close dropdown when clicking outside
|
||||
useEffect(() => {
|
||||
@@ -55,22 +58,22 @@ const InlineGroupEdit = ({
|
||||
|
||||
if (isOpen) {
|
||||
calculateDropdownPosition();
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
window.addEventListener('resize', calculateDropdownPosition);
|
||||
window.addEventListener('scroll', calculateDropdownPosition);
|
||||
document.addEventListener("mousedown", handleClickOutside);
|
||||
window.addEventListener("resize", calculateDropdownPosition);
|
||||
window.addEventListener("scroll", calculateDropdownPosition);
|
||||
return () => {
|
||||
document.removeEventListener('mousedown', handleClickOutside);
|
||||
window.removeEventListener('resize', calculateDropdownPosition);
|
||||
window.removeEventListener('scroll', calculateDropdownPosition);
|
||||
document.removeEventListener("mousedown", handleClickOutside);
|
||||
window.removeEventListener("resize", calculateDropdownPosition);
|
||||
window.removeEventListener("scroll", calculateDropdownPosition);
|
||||
};
|
||||
}
|
||||
}, [isOpen]);
|
||||
}, [isOpen, calculateDropdownPosition]);
|
||||
|
||||
const handleEdit = () => {
|
||||
if (disabled) return;
|
||||
setIsEditing(true);
|
||||
setSelectedValue(value);
|
||||
setError('');
|
||||
setError("");
|
||||
// Automatically open dropdown when editing starts
|
||||
setTimeout(() => {
|
||||
setIsOpen(true);
|
||||
@@ -80,7 +83,7 @@ const InlineGroupEdit = ({
|
||||
const handleCancel = () => {
|
||||
setIsEditing(false);
|
||||
setSelectedValue(value);
|
||||
setError('');
|
||||
setError("");
|
||||
setIsOpen(false);
|
||||
if (onCancel) onCancel();
|
||||
};
|
||||
@@ -96,7 +99,7 @@ const InlineGroupEdit = ({
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
setError('');
|
||||
setError("");
|
||||
|
||||
try {
|
||||
await onSave(selectedValue);
|
||||
@@ -105,17 +108,17 @@ const InlineGroupEdit = ({
|
||||
setIsEditing(false);
|
||||
setIsOpen(false);
|
||||
} catch (err) {
|
||||
setError(err.message || 'Failed to save');
|
||||
setError(err.message || "Failed to save");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.key === 'Enter') {
|
||||
if (e.key === "Enter") {
|
||||
e.preventDefault();
|
||||
handleSave();
|
||||
} else if (e.key === 'Escape') {
|
||||
} else if (e.key === "Escape") {
|
||||
e.preventDefault();
|
||||
handleCancel();
|
||||
}
|
||||
@@ -123,20 +126,20 @@ const InlineGroupEdit = ({
|
||||
|
||||
const displayValue = useMemo(() => {
|
||||
if (!value) {
|
||||
return 'Ungrouped';
|
||||
return "Ungrouped";
|
||||
}
|
||||
const option = options.find(opt => opt.id === value);
|
||||
return option ? option.name : 'Unknown Group';
|
||||
const option = options.find((opt) => opt.id === value);
|
||||
return option ? option.name : "Unknown Group";
|
||||
}, [value, options]);
|
||||
|
||||
const displayColor = useMemo(() => {
|
||||
if (!value) return 'bg-secondary-100 text-secondary-800';
|
||||
const option = options.find(opt => opt.id === value);
|
||||
return option ? `text-white` : 'bg-secondary-100 text-secondary-800';
|
||||
if (!value) return "bg-secondary-100 text-secondary-800";
|
||||
const option = options.find((opt) => opt.id === value);
|
||||
return option ? `text-white` : "bg-secondary-100 text-secondary-800";
|
||||
}, [value, options]);
|
||||
|
||||
const selectedOption = useMemo(() => {
|
||||
return options.find(opt => opt.id === value);
|
||||
return options.find((opt) => opt.id === value);
|
||||
}, [value, options]);
|
||||
|
||||
if (isEditing) {
|
||||
@@ -151,11 +154,14 @@ const InlineGroupEdit = ({
|
||||
onKeyDown={handleKeyDown}
|
||||
disabled={isLoading}
|
||||
className={`w-full px-3 py-1 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent flex items-center justify-between ${
|
||||
error ? 'border-red-500' : ''
|
||||
} ${isLoading ? 'opacity-50' : ''}`}
|
||||
error ? "border-red-500" : ""
|
||||
} ${isLoading ? "opacity-50" : ""}`}
|
||||
>
|
||||
<span className="truncate">
|
||||
{selectedValue ? options.find(opt => opt.id === selectedValue)?.name || 'Unknown Group' : 'Ungrouped'}
|
||||
{selectedValue
|
||||
? options.find((opt) => opt.id === selectedValue)?.name ||
|
||||
"Unknown Group"
|
||||
: "Ungrouped"}
|
||||
</span>
|
||||
<ChevronDown className="h-4 w-4 flex-shrink-0" />
|
||||
</button>
|
||||
@@ -167,7 +173,7 @@ const InlineGroupEdit = ({
|
||||
top: `${dropdownPosition.top}px`,
|
||||
left: `${dropdownPosition.left}px`,
|
||||
width: `${dropdownPosition.width}px`,
|
||||
minWidth: '200px'
|
||||
minWidth: "200px",
|
||||
}}
|
||||
>
|
||||
<div className="py-1">
|
||||
@@ -178,7 +184,9 @@ const InlineGroupEdit = ({
|
||||
setIsOpen(false);
|
||||
}}
|
||||
className={`w-full px-3 py-2 text-left text-sm hover:bg-secondary-100 dark:hover:bg-secondary-700 flex items-center ${
|
||||
selectedValue === null ? 'bg-primary-50 dark:bg-primary-900/20' : ''
|
||||
selectedValue === null
|
||||
? "bg-primary-50 dark:bg-primary-900/20"
|
||||
: ""
|
||||
}`}
|
||||
>
|
||||
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800">
|
||||
@@ -194,7 +202,9 @@ const InlineGroupEdit = ({
|
||||
setIsOpen(false);
|
||||
}}
|
||||
className={`w-full px-3 py-2 text-left text-sm hover:bg-secondary-100 dark:hover:bg-secondary-700 flex items-center ${
|
||||
selectedValue === option.id ? 'bg-primary-50 dark:bg-primary-900/20' : ''
|
||||
selectedValue === option.id
|
||||
? "bg-primary-50 dark:bg-primary-900/20"
|
||||
: ""
|
||||
}`}
|
||||
>
|
||||
<span
|
||||
@@ -210,6 +220,7 @@ const InlineGroupEdit = ({
|
||||
)}
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleSave}
|
||||
disabled={isLoading}
|
||||
className="p-1 text-green-600 hover:text-green-700 hover:bg-green-50 dark:hover:bg-green-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
@@ -218,6 +229,7 @@ const InlineGroupEdit = ({
|
||||
<Check className="h-4 w-4" />
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleCancel}
|
||||
disabled={isLoading}
|
||||
className="p-1 text-red-600 hover:text-red-700 hover:bg-red-50 dark:hover:bg-red-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
@@ -227,7 +239,9 @@ const InlineGroupEdit = ({
|
||||
</button>
|
||||
</div>
|
||||
{error && (
|
||||
<span className="text-xs text-red-600 dark:text-red-400 mt-1 block">{error}</span>
|
||||
<span className="text-xs text-red-600 dark:text-red-400 mt-1 block">
|
||||
{error}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
@@ -243,6 +257,7 @@ const InlineGroupEdit = ({
|
||||
</span>
|
||||
{!disabled && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleEdit}
|
||||
className="p-1 text-secondary-400 hover:text-secondary-600 dark:hover:text-secondary-300 hover:bg-secondary-100 dark:hover:bg-secondary-700 rounded transition-colors opacity-0 group-hover:opacity-100"
|
||||
title="Edit group"
|
||||
|
||||
283
frontend/src/components/InlineMultiGroupEdit.jsx
Normal file
283
frontend/src/components/InlineMultiGroupEdit.jsx
Normal file
@@ -0,0 +1,283 @@
|
||||
import { Check, ChevronDown, Edit2, X } from "lucide-react";
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
|
||||
const InlineMultiGroupEdit = ({
|
||||
value = [], // Array of group IDs
|
||||
onSave,
|
||||
onCancel,
|
||||
options = [],
|
||||
className = "",
|
||||
disabled = false,
|
||||
}) => {
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
const [selectedValues, setSelectedValues] = useState(value);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState("");
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [dropdownPosition, setDropdownPosition] = useState({
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: 0,
|
||||
});
|
||||
const dropdownRef = useRef(null);
|
||||
const buttonRef = useRef(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (isEditing && dropdownRef.current) {
|
||||
dropdownRef.current.focus();
|
||||
}
|
||||
}, [isEditing]);
|
||||
|
||||
useEffect(() => {
|
||||
setSelectedValues(value);
|
||||
// Force re-render when value changes
|
||||
if (!isEditing) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
}, [value, isEditing]);
|
||||
|
||||
// Calculate dropdown position
|
||||
const calculateDropdownPosition = useCallback(() => {
|
||||
if (buttonRef.current) {
|
||||
const rect = buttonRef.current.getBoundingClientRect();
|
||||
setDropdownPosition({
|
||||
top: rect.bottom + window.scrollY + 4,
|
||||
left: rect.left + window.scrollX,
|
||||
width: rect.width,
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Close dropdown when clicking outside
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event) => {
|
||||
if (dropdownRef.current && !dropdownRef.current.contains(event.target)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
if (isOpen) {
|
||||
calculateDropdownPosition();
|
||||
document.addEventListener("mousedown", handleClickOutside);
|
||||
window.addEventListener("resize", calculateDropdownPosition);
|
||||
window.addEventListener("scroll", calculateDropdownPosition);
|
||||
return () => {
|
||||
document.removeEventListener("mousedown", handleClickOutside);
|
||||
window.removeEventListener("resize", calculateDropdownPosition);
|
||||
window.removeEventListener("scroll", calculateDropdownPosition);
|
||||
};
|
||||
}
|
||||
}, [isOpen, calculateDropdownPosition]);
|
||||
|
||||
const handleEdit = () => {
|
||||
if (disabled) return;
|
||||
setIsEditing(true);
|
||||
setSelectedValues(value);
|
||||
setError("");
|
||||
// Automatically open dropdown when editing starts
|
||||
setTimeout(() => {
|
||||
setIsOpen(true);
|
||||
}, 0);
|
||||
};
|
||||
|
||||
const handleCancel = () => {
|
||||
setIsEditing(false);
|
||||
setSelectedValues(value);
|
||||
setError("");
|
||||
setIsOpen(false);
|
||||
if (onCancel) onCancel();
|
||||
};
|
||||
|
||||
const handleSave = async () => {
|
||||
if (disabled || isLoading) return;
|
||||
|
||||
// Check if values actually changed
|
||||
const sortedCurrent = [...value].sort();
|
||||
const sortedSelected = [...selectedValues].sort();
|
||||
if (JSON.stringify(sortedCurrent) === JSON.stringify(sortedSelected)) {
|
||||
setIsEditing(false);
|
||||
setIsOpen(false);
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
setError("");
|
||||
|
||||
try {
|
||||
await onSave(selectedValues);
|
||||
setIsEditing(false);
|
||||
setIsOpen(false);
|
||||
} catch (err) {
|
||||
setError(err.message || "Failed to save");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.key === "Enter") {
|
||||
e.preventDefault();
|
||||
handleSave();
|
||||
} else if (e.key === "Escape") {
|
||||
e.preventDefault();
|
||||
handleCancel();
|
||||
}
|
||||
};
|
||||
|
||||
const toggleGroup = (groupId) => {
|
||||
setSelectedValues((prev) => {
|
||||
if (prev.includes(groupId)) {
|
||||
return prev.filter((id) => id !== groupId);
|
||||
} else {
|
||||
return [...prev, groupId];
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const _displayValue = useMemo(() => {
|
||||
if (!value || value.length === 0) {
|
||||
return "Ungrouped";
|
||||
}
|
||||
if (value.length === 1) {
|
||||
const option = options.find((opt) => opt.id === value[0]);
|
||||
return option ? option.name : "Unknown Group";
|
||||
}
|
||||
return `${value.length} groups`;
|
||||
}, [value, options]);
|
||||
|
||||
const displayGroups = useMemo(() => {
|
||||
if (!value || value.length === 0) {
|
||||
return [];
|
||||
}
|
||||
return value
|
||||
.map((groupId) => options.find((opt) => opt.id === groupId))
|
||||
.filter(Boolean);
|
||||
}, [value, options]);
|
||||
|
||||
if (isEditing) {
|
||||
return (
|
||||
<div className={`relative ${className}`} ref={dropdownRef}>
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="relative flex-1">
|
||||
<button
|
||||
ref={buttonRef}
|
||||
type="button"
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
onKeyDown={handleKeyDown}
|
||||
disabled={isLoading}
|
||||
className={`w-full px-3 py-1 text-sm border border-secondary-300 dark:border-secondary-600 rounded-md bg-white dark:bg-secondary-800 text-secondary-900 dark:text-white focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent flex items-center justify-between ${
|
||||
error ? "border-red-500" : ""
|
||||
} ${isLoading ? "opacity-50" : ""}`}
|
||||
>
|
||||
<span className="truncate">
|
||||
{selectedValues.length === 0
|
||||
? "Ungrouped"
|
||||
: selectedValues.length === 1
|
||||
? options.find((opt) => opt.id === selectedValues[0])
|
||||
?.name || "Unknown Group"
|
||||
: `${selectedValues.length} groups selected`}
|
||||
</span>
|
||||
<ChevronDown className="h-4 w-4 flex-shrink-0" />
|
||||
</button>
|
||||
|
||||
{isOpen && (
|
||||
<div
|
||||
className="fixed z-50 bg-white dark:bg-secondary-800 border border-secondary-300 dark:border-secondary-600 rounded-md shadow-lg max-h-60 overflow-auto"
|
||||
style={{
|
||||
top: `${dropdownPosition.top}px`,
|
||||
left: `${dropdownPosition.left}px`,
|
||||
width: `${dropdownPosition.width}px`,
|
||||
minWidth: "200px",
|
||||
}}
|
||||
>
|
||||
<div className="py-1">
|
||||
{options.map((option) => (
|
||||
<label
|
||||
key={option.id}
|
||||
className="w-full px-3 py-2 text-left text-sm hover:bg-secondary-100 dark:hover:bg-secondary-700 flex items-center cursor-pointer"
|
||||
>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={selectedValues.includes(option.id)}
|
||||
onChange={() => toggleGroup(option.id)}
|
||||
className="mr-2 h-4 w-4 text-primary-600 focus:ring-primary-500 border-secondary-300 rounded"
|
||||
/>
|
||||
<span
|
||||
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium text-white"
|
||||
style={{ backgroundColor: option.color }}
|
||||
>
|
||||
{option.name}
|
||||
</span>
|
||||
</label>
|
||||
))}
|
||||
{options.length === 0 && (
|
||||
<div className="px-3 py-2 text-sm text-secondary-500 dark:text-secondary-400">
|
||||
No groups available
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleSave}
|
||||
disabled={isLoading}
|
||||
className="p-1 text-green-600 hover:text-green-700 hover:bg-green-50 dark:hover:bg-green-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
title="Save"
|
||||
>
|
||||
<Check className="h-4 w-4" />
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleCancel}
|
||||
disabled={isLoading}
|
||||
className="p-1 text-red-600 hover:text-red-700 hover:bg-red-50 dark:hover:bg-red-900/20 rounded transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
title="Cancel"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
</div>
|
||||
{error && (
|
||||
<span className="text-xs text-red-600 dark:text-red-400 mt-1 block">
|
||||
{error}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={`flex items-center gap-1 group ${className}`}>
|
||||
{displayGroups.length === 0 ? (
|
||||
<span className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium bg-secondary-100 text-secondary-800">
|
||||
Ungrouped
|
||||
</span>
|
||||
) : (
|
||||
<div className="flex items-center gap-1 flex-wrap">
|
||||
{displayGroups.map((group) => (
|
||||
<span
|
||||
key={group.id}
|
||||
className="inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium text-white"
|
||||
style={{ backgroundColor: group.color }}
|
||||
>
|
||||
{group.name}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{!disabled && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleEdit}
|
||||
className="p-1 text-secondary-400 hover:text-secondary-600 dark:hover:text-secondary-300 hover:bg-secondary-100 dark:hover:bg-secondary-700 rounded transition-colors opacity-0 group-hover:opacity-100"
|
||||
title="Edit groups"
|
||||
>
|
||||
<Edit2 className="h-3 w-3" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default InlineMultiGroupEdit;
|
||||
80
frontend/src/components/InlineToggle.jsx
Normal file
80
frontend/src/components/InlineToggle.jsx
Normal file
@@ -0,0 +1,80 @@
|
||||
import { useState } from "react";
|
||||
|
||||
const InlineToggle = ({
|
||||
value,
|
||||
onSave,
|
||||
className = "",
|
||||
disabled = false,
|
||||
trueLabel = "Yes",
|
||||
falseLabel = "No",
|
||||
}) => {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState("");
|
||||
|
||||
const handleSave = async (newValue) => {
|
||||
if (disabled || isLoading) return;
|
||||
|
||||
// Check if value actually changed
|
||||
if (newValue === value) {
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
setError("");
|
||||
|
||||
try {
|
||||
await onSave(newValue);
|
||||
} catch (err) {
|
||||
setError(err.message || "Failed to save");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleToggle = () => {
|
||||
if (disabled || isLoading) return;
|
||||
handleSave(!value);
|
||||
};
|
||||
|
||||
const displayValue = (
|
||||
<span
|
||||
className={`text-sm font-medium ${
|
||||
value
|
||||
? "text-green-600 dark:text-green-400"
|
||||
: "text-red-600 dark:text-red-400"
|
||||
}`}
|
||||
>
|
||||
{value ? trueLabel : falseLabel}
|
||||
</span>
|
||||
);
|
||||
|
||||
return (
|
||||
<div className={`flex items-center gap-2 group ${className}`}>
|
||||
{displayValue}
|
||||
{!disabled && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleToggle}
|
||||
disabled={isLoading}
|
||||
className={`relative inline-flex h-5 w-9 items-center rounded-full transition-colors focus:outline-none focus:ring-2 focus:ring-primary-500 focus:ring-offset-2 disabled:opacity-50 disabled:cursor-not-allowed ${
|
||||
value
|
||||
? "bg-primary-600 dark:bg-primary-500"
|
||||
: "bg-secondary-200 dark:bg-secondary-600"
|
||||
}`}
|
||||
title={`Toggle ${value ? "off" : "on"}`}
|
||||
>
|
||||
<span
|
||||
className={`inline-block h-3 w-3 transform rounded-full bg-white transition-transform ${
|
||||
value ? "translate-x-5" : "translate-x-1"
|
||||
}`}
|
||||
/>
|
||||
</button>
|
||||
)}
|
||||
{error && (
|
||||
<span className="text-xs text-red-600 dark:text-red-400">{error}</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default InlineToggle;
|
||||
File diff suppressed because one or more lines are too long
44
frontend/src/components/Logo.jsx
Normal file
44
frontend/src/components/Logo.jsx
Normal file
@@ -0,0 +1,44 @@
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { useTheme } from "../contexts/ThemeContext";
|
||||
import { settingsAPI } from "../utils/api";
|
||||
|
||||
const Logo = ({
|
||||
className = "h-8 w-auto",
|
||||
alt = "PatchMon Logo",
|
||||
...props
|
||||
}) => {
|
||||
const { isDark } = useTheme();
|
||||
|
||||
const { data: settings } = useQuery({
|
||||
queryKey: ["settings"],
|
||||
queryFn: () => settingsAPI.get().then((res) => res.data),
|
||||
});
|
||||
|
||||
// Determine which logo to use based on theme
|
||||
const logoSrc = isDark
|
||||
? settings?.logo_dark || "/assets/logo_dark.png"
|
||||
: settings?.logo_light || "/assets/logo_light.png";
|
||||
|
||||
// Add cache-busting parameter using updated_at timestamp
|
||||
const cacheBuster = settings?.updated_at
|
||||
? new Date(settings.updated_at).getTime()
|
||||
: Date.now();
|
||||
const logoSrcWithCache = `${logoSrc}?v=${cacheBuster}`;
|
||||
|
||||
return (
|
||||
<img
|
||||
src={logoSrcWithCache}
|
||||
alt={alt}
|
||||
className={className}
|
||||
onError={(e) => {
|
||||
// Fallback to default logo if custom logo fails to load
|
||||
e.target.src = isDark
|
||||
? "/assets/logo_dark.png"
|
||||
: "/assets/logo_light.png";
|
||||
}}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default Logo;
|
||||
42
frontend/src/components/LogoProvider.jsx
Normal file
42
frontend/src/components/LogoProvider.jsx
Normal file
@@ -0,0 +1,42 @@
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { useEffect } from "react";
|
||||
import { isAuthReady } from "../constants/authPhases";
|
||||
import { useAuth } from "../contexts/AuthContext";
|
||||
import { settingsAPI } from "../utils/api";
|
||||
|
||||
const LogoProvider = ({ children }) => {
|
||||
const { authPhase, isAuthenticated } = useAuth();
|
||||
|
||||
const { data: settings } = useQuery({
|
||||
queryKey: ["settings"],
|
||||
queryFn: () => settingsAPI.get().then((res) => res.data),
|
||||
enabled: isAuthReady(authPhase, isAuthenticated()),
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
// Use custom favicon or fallback to default
|
||||
const faviconUrl = settings?.favicon || "/assets/favicon.svg";
|
||||
|
||||
// Add cache-busting parameter using updated_at timestamp
|
||||
const cacheBuster = settings?.updated_at
|
||||
? new Date(settings.updated_at).getTime()
|
||||
: Date.now();
|
||||
const faviconUrlWithCache = `${faviconUrl}?v=${cacheBuster}`;
|
||||
|
||||
// Update favicon
|
||||
const favicon = document.querySelector('link[rel="icon"]');
|
||||
if (favicon) {
|
||||
favicon.href = faviconUrlWithCache;
|
||||
} else {
|
||||
// Create favicon link if it doesn't exist
|
||||
const link = document.createElement("link");
|
||||
link.rel = "icon";
|
||||
link.href = faviconUrlWithCache;
|
||||
document.head.appendChild(link);
|
||||
}
|
||||
}, [settings?.favicon, settings?.updated_at]);
|
||||
|
||||
return children;
|
||||
};
|
||||
|
||||
export default LogoProvider;
|
||||
@@ -1,20 +1,23 @@
|
||||
import React from 'react'
|
||||
import { Navigate } from 'react-router-dom'
|
||||
import { useAuth } from '../contexts/AuthContext'
|
||||
import { Navigate } from "react-router-dom";
|
||||
import { useAuth } from "../contexts/AuthContext";
|
||||
|
||||
const ProtectedRoute = ({ children, requireAdmin = false, requirePermission = null }) => {
|
||||
const { isAuthenticated, isAdmin, isLoading, hasPermission } = useAuth()
|
||||
const ProtectedRoute = ({
|
||||
children,
|
||||
requireAdmin = false,
|
||||
requirePermission = null,
|
||||
}) => {
|
||||
const { isAuthenticated, isAdmin, isLoading, hasPermission } = useAuth();
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex items-center justify-center h-64">
|
||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-primary-600"></div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (!isAuthenticated()) {
|
||||
return <Navigate to="/login" replace />
|
||||
return <Navigate to="/login" replace />;
|
||||
}
|
||||
|
||||
// Check admin requirement
|
||||
@@ -22,11 +25,15 @@ const ProtectedRoute = ({ children, requireAdmin = false, requirePermission = nu
|
||||
return (
|
||||
<div className="flex items-center justify-center h-64">
|
||||
<div className="text-center">
|
||||
<h2 className="text-xl font-semibold text-secondary-900 mb-2">Access Denied</h2>
|
||||
<p className="text-secondary-600">You don't have permission to access this page.</p>
|
||||
<h2 className="text-xl font-semibold text-secondary-900 mb-2">
|
||||
Access Denied
|
||||
</h2>
|
||||
<p className="text-secondary-600">
|
||||
You don't have permission to access this page.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Check specific permission requirement
|
||||
@@ -34,14 +41,18 @@ const ProtectedRoute = ({ children, requireAdmin = false, requirePermission = nu
|
||||
return (
|
||||
<div className="flex items-center justify-center h-64">
|
||||
<div className="text-center">
|
||||
<h2 className="text-xl font-semibold text-secondary-900 mb-2">Access Denied</h2>
|
||||
<p className="text-secondary-600">You don't have permission to access this page.</p>
|
||||
<h2 className="text-xl font-semibold text-secondary-900 mb-2">
|
||||
Access Denied
|
||||
</h2>
|
||||
<p className="text-secondary-600">
|
||||
You don't have permission to access this page.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return children
|
||||
}
|
||||
return children;
|
||||
};
|
||||
|
||||
export default ProtectedRoute
|
||||
export default ProtectedRoute;
|
||||
|
||||
279
frontend/src/components/SettingsLayout.jsx
Normal file
279
frontend/src/components/SettingsLayout.jsx
Normal file
@@ -0,0 +1,279 @@
|
||||
import {
|
||||
Bell,
|
||||
ChevronLeft,
|
||||
ChevronRight,
|
||||
Code,
|
||||
Folder,
|
||||
Image,
|
||||
RefreshCw,
|
||||
Settings,
|
||||
Shield,
|
||||
UserCircle,
|
||||
Users,
|
||||
Wrench,
|
||||
} from "lucide-react";
|
||||
import { useState } from "react";
|
||||
import { Link, useLocation } from "react-router-dom";
|
||||
import { useAuth } from "../contexts/AuthContext";
|
||||
|
||||
const SettingsLayout = ({ children }) => {
|
||||
const location = useLocation();
|
||||
const { canManageSettings, canViewUsers, canManageUsers } = useAuth();
|
||||
const [sidebarCollapsed, setSidebarCollapsed] = useState(false);
|
||||
|
||||
// Build secondary navigation based on permissions
|
||||
const buildSecondaryNavigation = () => {
|
||||
const nav = [];
|
||||
|
||||
// Users section
|
||||
if (canViewUsers() || canManageUsers()) {
|
||||
nav.push({
|
||||
section: "User Management",
|
||||
items: [
|
||||
{
|
||||
name: "Users",
|
||||
href: "/settings/users",
|
||||
icon: Users,
|
||||
},
|
||||
{
|
||||
name: "Roles",
|
||||
href: "/settings/roles",
|
||||
icon: Shield,
|
||||
},
|
||||
{
|
||||
name: "My Profile",
|
||||
href: "/settings/profile",
|
||||
icon: UserCircle,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
// Host Groups
|
||||
if (canManageSettings()) {
|
||||
nav.push({
|
||||
section: "Hosts Management",
|
||||
items: [
|
||||
{
|
||||
name: "Host Groups",
|
||||
href: "/settings/host-groups",
|
||||
icon: Folder,
|
||||
},
|
||||
{
|
||||
name: "Agent Updates",
|
||||
href: "/settings/agent-config",
|
||||
icon: RefreshCw,
|
||||
},
|
||||
{
|
||||
name: "Agent Version",
|
||||
href: "/settings/agent-version",
|
||||
icon: Settings,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
// Alert Management
|
||||
if (canManageSettings()) {
|
||||
nav.push({
|
||||
section: "Alert Management",
|
||||
items: [
|
||||
{
|
||||
name: "Alert Channels",
|
||||
href: "/settings/alert-channels",
|
||||
icon: Bell,
|
||||
comingSoon: true,
|
||||
},
|
||||
{
|
||||
name: "Notifications",
|
||||
href: "/settings/notifications",
|
||||
icon: Bell,
|
||||
comingSoon: true,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
// Patch Management
|
||||
if (canManageSettings()) {
|
||||
nav.push({
|
||||
section: "Patch Management",
|
||||
items: [
|
||||
{
|
||||
name: "Policies",
|
||||
href: "/settings/patch-management",
|
||||
icon: Settings,
|
||||
comingSoon: true,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
// Server Config
|
||||
if (canManageSettings()) {
|
||||
// Integrations section
|
||||
nav.push({
|
||||
section: "Integrations",
|
||||
items: [
|
||||
{
|
||||
name: "Integrations",
|
||||
href: "/settings/integrations",
|
||||
icon: Wrench,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
nav.push({
|
||||
section: "Server",
|
||||
items: [
|
||||
{
|
||||
name: "URL Config",
|
||||
href: "/settings/server-url",
|
||||
icon: Wrench,
|
||||
},
|
||||
{
|
||||
name: "Branding",
|
||||
href: "/settings/branding",
|
||||
icon: Image,
|
||||
},
|
||||
{
|
||||
name: "Server Version",
|
||||
href: "/settings/server-version",
|
||||
icon: Code,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
return nav;
|
||||
};
|
||||
|
||||
const secondaryNavigation = buildSecondaryNavigation();
|
||||
|
||||
const isActive = (path) => location.pathname === path;
|
||||
|
||||
const _getPageTitle = () => {
|
||||
const path = location.pathname;
|
||||
|
||||
if (path.startsWith("/settings/users")) return "Users";
|
||||
if (path.startsWith("/settings/host-groups")) return "Host Groups";
|
||||
if (path.startsWith("/settings/notifications")) return "Notifications";
|
||||
if (path.startsWith("/settings/agent-config")) return "Agent Config";
|
||||
if (path.startsWith("/settings/server-config")) return "Server Config";
|
||||
|
||||
return "Settings";
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="bg-transparent">
|
||||
{/* Within-page secondary navigation and content */}
|
||||
<div className="px-2 sm:px-4 lg:px-6">
|
||||
<div className="flex gap-4">
|
||||
{/* Left secondary nav (within page) */}
|
||||
<aside
|
||||
className={`${sidebarCollapsed ? "w-14" : "w-56"} transition-all duration-300 flex-shrink-0`}
|
||||
>
|
||||
<div className="bg-white dark:bg-secondary-800 border border-secondary-200 dark:border-secondary-600 rounded-lg">
|
||||
{/* Collapse button */}
|
||||
<div className="flex justify-end p-2 border-b border-secondary-200 dark:border-secondary-600">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setSidebarCollapsed(!sidebarCollapsed)}
|
||||
className="p-1 text-secondary-400 hover:text-secondary-600 dark:text-secondary-500 dark:hover:text-secondary-300 rounded transition-colors"
|
||||
title={
|
||||
sidebarCollapsed ? "Expand sidebar" : "Collapse sidebar"
|
||||
}
|
||||
>
|
||||
{sidebarCollapsed ? (
|
||||
<ChevronRight className="h-4 w-4" />
|
||||
) : (
|
||||
<ChevronLeft className="h-4 w-4" />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className={`${sidebarCollapsed ? "p-2" : "p-3"}`}>
|
||||
<nav>
|
||||
<ul
|
||||
className={`${sidebarCollapsed ? "space-y-2" : "space-y-4"}`}
|
||||
>
|
||||
{secondaryNavigation.map((item) => (
|
||||
<li key={item.section}>
|
||||
{!sidebarCollapsed && (
|
||||
<h4 className="text-xs font-semibold text-secondary-500 dark:text-secondary-300 uppercase tracking-wider mb-2">
|
||||
{item.section}
|
||||
</h4>
|
||||
)}
|
||||
<ul
|
||||
className={`${sidebarCollapsed ? "space-y-1" : "space-y-1"}`}
|
||||
>
|
||||
{item.items.map((subItem) => (
|
||||
<li key={subItem.name}>
|
||||
<Link
|
||||
to={subItem.href}
|
||||
className={`group flex items-center rounded-md text-sm leading-5 font-medium transition-colors ${
|
||||
sidebarCollapsed
|
||||
? "justify-center p-2"
|
||||
: "gap-2 p-2"
|
||||
} ${
|
||||
isActive(subItem.href)
|
||||
? "bg-primary-50 dark:bg-primary-600 text-primary-700 dark:text-white"
|
||||
: "text-secondary-700 dark:text-secondary-200 hover:text-primary-700 dark:hover:text-primary-300 hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||
}`}
|
||||
title={sidebarCollapsed ? subItem.name : ""}
|
||||
>
|
||||
<subItem.icon className="h-4 w-4 flex-shrink-0" />
|
||||
{!sidebarCollapsed && (
|
||||
<span className="truncate flex items-center gap-2">
|
||||
{subItem.name}
|
||||
{subItem.comingSoon && (
|
||||
<span className="text-xs bg-secondary-100 text-secondary-600 px-1.5 py-0.5 rounded">
|
||||
Soon
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
</Link>
|
||||
|
||||
{!sidebarCollapsed && subItem.subTabs && (
|
||||
<ul className="ml-6 mt-1 space-y-1">
|
||||
{subItem.subTabs.map((subTab) => (
|
||||
<li key={subTab.name}>
|
||||
<Link
|
||||
to={subTab.href}
|
||||
className={`block px-3 py-1 text-xs font-medium rounded transition-colors ${
|
||||
isActive(subTab.href)
|
||||
? "bg-primary-100 dark:bg-primary-700 text-primary-700 dark:text-primary-200"
|
||||
: "text-secondary-600 dark:text-secondary-400 hover:text-primary-700 dark:hover:text-primary-300 hover:bg-secondary-50 dark:hover:bg-secondary-700"
|
||||
}`}
|
||||
>
|
||||
{subTab.name}
|
||||
</Link>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
{/* Right content */}
|
||||
<section className="flex-1 min-w-0">
|
||||
<div className="bg-white dark:bg-secondary-800 border border-secondary-200 dark:border-secondary-600 rounded-lg p-4">
|
||||
{children}
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default SettingsLayout;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user