Compare commits
499 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01ee524049 | ||
|
|
af9cb65338 | ||
|
|
8aa11c580b | ||
|
|
ada627f444 | ||
|
|
a7b6d338c3 | ||
|
|
9f00538b97 | ||
|
|
a085015282 | ||
|
|
0b9c220fbb | ||
|
|
0e3d04873d | ||
|
|
b7578d939f | ||
|
|
b5c28de03f | ||
|
|
e17d25c156 | ||
|
|
c25dc1b99c | ||
|
|
a493a574bd | ||
|
|
4284493dce | ||
|
|
25059de8e1 | ||
|
|
1731b05ad0 | ||
|
|
e80dc663ac | ||
|
|
39988a4c2f | ||
|
|
415bff303a | ||
|
|
a65eb62a54 | ||
|
|
03b2982128 | ||
|
|
bff0527857 | ||
|
|
f3b7634254 | ||
|
|
6a9593c0b9 | ||
|
|
edb785b8e5 | ||
|
|
26d757b50a | ||
|
|
535079ee87 | ||
|
|
ac380c29c1 | ||
|
|
3fd212f26c | ||
|
|
04a3abc651 | ||
|
|
6caf85ddd1 | ||
|
|
16e4071508 | ||
|
|
69e7c4324b | ||
|
|
a1c4a8cbe5 | ||
|
|
e37f6cfda7 | ||
|
|
989c804409 | ||
|
|
7345bc3c82 | ||
|
|
69bee35700 | ||
|
|
598e24df7c | ||
|
|
0ae669201e | ||
|
|
f52a8a4642 | ||
|
|
9c40b61ef2 | ||
|
|
72dabcda83 | ||
|
|
161a06dbcc | ||
|
|
8ed3d4e70c | ||
|
|
a4223ccc8a | ||
|
|
ca85923855 | ||
|
|
52bfe7c493 | ||
|
|
4786bd0cbe | ||
|
|
cadab160ff | ||
|
|
6a7f17b2b0 | ||
|
|
4986a4d775 | ||
|
|
903af0c2cf | ||
|
|
3282fa803c | ||
|
|
67cc47608d | ||
|
|
0411704b8b | ||
|
|
1de85b2c69 | ||
|
|
33b012f29d | ||
|
|
1357584df3 | ||
|
|
e15809e271 | ||
|
|
0da1950427 | ||
|
|
e590b921be | ||
|
|
09462692f5 | ||
|
|
c1d1b5f762 | ||
|
|
6b9c87b858 | ||
|
|
485b6eb904 | ||
|
|
057630bdb5 | ||
|
|
6b02873b30 | ||
|
|
0fa0fc6d6b | ||
|
|
339ec07465 | ||
|
|
cd2e798fea | ||
|
|
d5cadbeae2 | ||
|
|
8046a3ccae | ||
|
|
bf91d60b31 | ||
|
|
539c047ec8 | ||
|
|
290c18fa87 | ||
|
|
98c46f5e57 | ||
|
|
f8bd5b5b4e | ||
|
|
816d32edad | ||
|
|
8453835c05 | ||
|
|
9328c356c8 | ||
|
|
89e3c1fc94 | ||
|
|
67e54cd15d | ||
|
|
278ea24786 | ||
|
|
aba1662631 | ||
|
|
61eeb60c19 | ||
|
|
5e9a8f4806 | ||
|
|
4cb274e9bc | ||
|
|
8b9b1a6a35 | ||
|
|
2655964113 | ||
|
|
188bad061b | ||
|
|
3af4c329aa | ||
|
|
6c13395f7d | ||
|
|
77b32ba360 | ||
|
|
91dba291ac | ||
|
|
a6bc293640 | ||
|
|
53882d6e5f | ||
|
|
d68adfbf10 | ||
|
|
498a392d7f | ||
|
|
740f6c05db | ||
|
|
d810ce301f | ||
|
|
5ef6a14d24 | ||
|
|
a13f6f1e68 | ||
|
|
d2d0f1aaee | ||
|
|
e64c72cc89 | ||
|
|
9ab915a08b | ||
|
|
e26fbf0328 | ||
|
|
d9a52c4a2a | ||
|
|
7b2ec90de9 | ||
|
|
d310bf8bbf | ||
|
|
2abc6cc939 | ||
|
|
56d4e694a2 | ||
|
|
5f002c9cdc | ||
|
|
759daf4b4a | ||
|
|
3a8d9568e3 | ||
|
|
ff22a9d94a | ||
|
|
a6e42d5374 | ||
|
|
a2f74e0488 | ||
|
|
ee44240569 | ||
|
|
d0828744a2 | ||
|
|
6e2e576b29 | ||
|
|
bf61e27f8a | ||
|
|
c441c30b46 | ||
|
|
0e741230ea | ||
|
|
1bfe9ac2db | ||
|
|
6812e72348 | ||
|
|
b6449d2f5b | ||
|
|
7e3ea20dce | ||
|
|
c9d6fe9dcd | ||
|
|
4a649a6b8b | ||
|
|
8fef184963 | ||
|
|
69583ca3c0 | ||
|
|
6038a68e91 | ||
|
|
fa8bd8db87 | ||
|
|
18b4f0ed0f | ||
|
|
461f9d66c9 | ||
|
|
2155103c7a | ||
|
|
c9a6839c45 | ||
|
|
9fbe331a80 | ||
|
|
a56389c4ce | ||
|
|
64656784cb | ||
|
|
6eff2c181e | ||
|
|
1aa48c6d62 | ||
|
|
c7ca1a346d | ||
|
|
fa0ec7b502 | ||
|
|
768438c136 | ||
|
|
9badea0b3c | ||
|
|
43263a1650 | ||
|
|
821e02dc75 | ||
|
|
ed011ecf28 | ||
|
|
d861de4c2f | ||
|
|
3a3b2449dc | ||
|
|
d2614406ca | ||
|
|
0798d098ae | ||
|
|
dab7ddc2bb | ||
|
|
081a96e281 | ||
|
|
a7dd881d79 | ||
|
|
8134d5e24d | ||
|
|
ba6756cd45 | ||
|
|
5d8fce21ac | ||
|
|
e7e4a5bcd4 | ||
|
|
55f33357ea | ||
|
|
90568bba31 | ||
|
|
5d6e2dc2e4 | ||
|
|
6bb33f2559 | ||
|
|
ced92554ed | ||
|
|
dff3383158 | ||
|
|
bf03c89cb2 | ||
|
|
9f1484bbef | ||
|
|
3899680e26 | ||
|
|
6bb2eb25a1 | ||
|
|
f8dfd8edb3 | ||
|
|
042be624a3 | ||
|
|
6bafa4c79a | ||
|
|
58b42fac5c | ||
|
|
3b47b9558a | ||
|
|
ccf9636296 | ||
|
|
96942719f2 | ||
|
|
69cf1c1adc | ||
|
|
d77cba40b8 | ||
|
|
968735b555 | ||
|
|
ceed9d29eb | ||
|
|
41329039ee | ||
|
|
f68b102ca8 | ||
|
|
fa36e54298 | ||
|
|
b689f57435 | ||
|
|
885fa0ff56 | ||
|
|
303acb72a3 | ||
|
|
b2a46cd0cd | ||
|
|
5a5ecb3ee3 | ||
|
|
60b4ab6a63 | ||
|
|
e4b096a08f | ||
|
|
343f55049b | ||
|
|
6b46025261 | ||
|
|
5ea503f23e | ||
|
|
ce95f9ac23 | ||
|
|
c3fb87501b | ||
|
|
dc6a343612 | ||
|
|
3a61053957 | ||
|
|
570129e4d4 | ||
|
|
3315c7045f | ||
|
|
5ae50e242c | ||
|
|
bbcf449719 | ||
|
|
aab10f7184 | ||
|
|
8d43488cb8 | ||
|
|
0a9c647e19 | ||
|
|
40db5d4aa8 | ||
|
|
9254532baa | ||
|
|
7abed47cf0 | ||
|
|
5c6ac758f7 | ||
|
|
007677962c | ||
|
|
9c4aeab64a | ||
|
|
48e6fc0efe | ||
|
|
c8be713d11 | ||
|
|
ae887c8648 | ||
|
|
5daac2531b | ||
|
|
68def00327 | ||
|
|
67e7976710 | ||
|
|
35747e937e | ||
|
|
fb439787a4 | ||
|
|
8fa368f473 | ||
|
|
c84a9d07b1 | ||
|
|
7fb46cdfc4 | ||
|
|
52985e5ddc | ||
|
|
e880935dc3 | ||
|
|
cc22b1bca5 | ||
|
|
49a5128918 | ||
|
|
fedc7dcb44 | ||
|
|
cd32b20215 | ||
|
|
15cd9832c4 | ||
|
|
f25d4e4553 | ||
|
|
12d1c82b63 | ||
|
|
aebe855078 | ||
|
|
3416a71ebd | ||
|
|
94b3fea528 | ||
|
|
ad1a9ecca1 | ||
|
|
715accfb8a | ||
|
|
a8e03c6138 | ||
|
|
f69446b648 | ||
|
|
eedfbe5846 | ||
|
|
153351cc9f | ||
|
|
1b1eec40a7 | ||
|
|
763877541a | ||
|
|
1fad7d72a2 | ||
|
|
51ea2ea879 | ||
|
|
d77a478bf0 | ||
|
|
e413c0264a | ||
|
|
f88e7f898c | ||
|
|
d07bd4a6db | ||
|
|
fb34c099d5 | ||
|
|
1d2ee56a15 | ||
|
|
86665f7f09 | ||
|
|
0d2b4af986 | ||
|
|
dc2b2eeb9f | ||
|
|
e5dbb66d53 | ||
|
|
3474b1c471 | ||
|
|
3886de5b7c | ||
|
|
2b3cec06b3 | ||
|
|
8536754d14 | ||
|
|
1f36235801 | ||
|
|
a4194b14f9 | ||
|
|
2dcc629d9d | ||
|
|
98ddadc6bc | ||
|
|
f6e47b7383 | ||
|
|
f073ddc906 | ||
|
|
3e00631925 | ||
|
|
9b7ac58562 | ||
|
|
f242ddd801 | ||
|
|
c129886fe2 | ||
|
|
f577e814cf | ||
|
|
c860a0cedd | ||
|
|
ae7e28e492 | ||
|
|
90a63234ad | ||
|
|
14bca52e8f | ||
|
|
2f3c3361cf | ||
|
|
4034134055 | ||
|
|
c04f94cb7b | ||
|
|
fd1bbc7925 | ||
|
|
ff69bed394 | ||
|
|
d6e8c5146f | ||
|
|
9a04cf99d7 | ||
|
|
86e7c11e71 | ||
|
|
361cc08faa | ||
|
|
70dc771052 | ||
|
|
c14873a799 | ||
|
|
bba5abd74b | ||
|
|
a224e79c1f | ||
|
|
c305d98186 | ||
|
|
7c5a473e71 | ||
|
|
5e0f5d1eed | ||
|
|
238b269bc4 | ||
|
|
0ad121b9d2 | ||
|
|
7088acd9fd | ||
|
|
e0a900d4b6 | ||
|
|
a0fe2f0c7d | ||
|
|
d5b9bc2f26 | ||
|
|
584254e6ca | ||
|
|
a2963ed7bb | ||
|
|
2a3c2e133d | ||
|
|
3e7dcb2755 | ||
|
|
faeec00b39 | ||
|
|
eeed81392f | ||
|
|
95dce9e992 | ||
|
|
502bd2a191 | ||
|
|
17ac92a9d0 | ||
|
|
ba028cde0c | ||
|
|
6e751e7a9b | ||
|
|
948b56d0e6 | ||
|
|
4bf2dc9ece | ||
|
|
125823f8ab | ||
|
|
24d33397e9 | ||
|
|
2c553825f4 | ||
|
|
198c485e9a | ||
|
|
0138505507 | ||
|
|
5d50dcc600 | ||
|
|
7bdd8c4626 | ||
|
|
fc82c35f0c | ||
|
|
426ebad300 | ||
|
|
1afe61c593 | ||
|
|
c20751829b | ||
|
|
a3b8ee8392 | ||
|
|
156c0fe7f6 | ||
|
|
216f7a38cf | ||
|
|
fd04dc10d4 | ||
|
|
d39bdce926 | ||
|
|
c6e01245b0 | ||
|
|
c168ee7ba4 | ||
|
|
7575253000 | ||
|
|
c28c1efbb1 | ||
|
|
e6aa2c3b78 | ||
|
|
ab7c481f83 | ||
|
|
84ad1c352d | ||
|
|
e9aad39ac9 | ||
|
|
c3444a87bc | ||
|
|
67b224b340 | ||
|
|
bded14d36b | ||
|
|
73fa0b6631 | ||
|
|
2f07337588 | ||
|
|
da163d44e7 | ||
|
|
56fbf8ae0c | ||
|
|
327eb4b39b | ||
|
|
ae7873a7e3 | ||
|
|
9a5f01813b | ||
|
|
0605a3b725 | ||
|
|
09c535f159 | ||
|
|
7fb11da5df | ||
|
|
9c9a46499a | ||
|
|
6fca60261e | ||
|
|
00537b32ef | ||
|
|
8636758a90 | ||
|
|
e39dfbd624 | ||
|
|
6e048b2a12 | ||
|
|
f9657599c2 | ||
|
|
42ae3bba9b | ||
|
|
2fd56a4bfe | ||
|
|
824bcc5603 | ||
|
|
4fbb613aaa | ||
|
|
9eb45270f2 | ||
|
|
75c61c53e8 | ||
|
|
2688a47436 | ||
|
|
fe3bf4b189 | ||
|
|
456cb5ebb2 | ||
|
|
3d91d574b4 | ||
|
|
54876c5499 | ||
|
|
d256585284 | ||
|
|
bd8f100b43 | ||
|
|
44f05f2dcc | ||
|
|
43f7f82bdc | ||
|
|
e902f63211 | ||
|
|
129f68e194 | ||
|
|
4b37fe12d7 | ||
|
|
6de79922c5 | ||
|
|
e1a9791f44 | ||
|
|
81795f51c6 | ||
|
|
68dfb11155 | ||
|
|
39fc1beb89 | ||
|
|
fe0ddec0f9 | ||
|
|
9b52b4efd9 | ||
|
|
e90e527603 | ||
|
|
a510854741 | ||
|
|
8935ce4ccf | ||
|
|
f9edc9059a | ||
|
|
db8917a769 | ||
|
|
c2d70cc1c2 | ||
|
|
3b13c7f9ce | ||
|
|
b7150d8026 | ||
|
|
041830a7f8 | ||
|
|
a18daf0195 | ||
|
|
5d3dfceb22 | ||
|
|
c82855e732 | ||
|
|
956f156018 | ||
|
|
9b13c35e7f | ||
|
|
bc8e637bba | ||
|
|
f03c28c906 | ||
|
|
e4b1f39fdc | ||
|
|
4780af910c | ||
|
|
d61ce5c524 | ||
|
|
20ab151f4d | ||
|
|
8a7be7543a | ||
|
|
3f806aec9c | ||
|
|
6c273b32bb | ||
|
|
b986f9d6ee | ||
|
|
c98cca6b7b | ||
|
|
fbec78ede5 | ||
|
|
c1d9a2d1f1 | ||
|
|
8a10036f32 | ||
|
|
924a3aec0e | ||
|
|
3b3ac31541 | ||
|
|
e0cb2f9d0f | ||
|
|
549b4edb59 | ||
|
|
67c912aca2 | ||
|
|
a74dde5d9e | ||
|
|
f7bcd24726 | ||
|
|
337c900770 | ||
|
|
e83e73ead4 | ||
|
|
24f6f9b063 | ||
|
|
5dc999360e | ||
|
|
9ec2f6b64d | ||
|
|
f970592efe | ||
|
|
7592c11e99 | ||
|
|
759b05e137 | ||
|
|
42ebd9ffce | ||
|
|
bc0fc33966 | ||
|
|
f4aab16e39 | ||
|
|
e91425287c | ||
|
|
f05908f570 | ||
|
|
8b351edf9c | ||
|
|
93c06eaba0 | ||
|
|
a8d9fa75d4 | ||
|
|
159ecd3e4f | ||
|
|
717803c665 | ||
|
|
0d40589e8a | ||
|
|
8c5544bfad | ||
|
|
0c9be9f84f | ||
|
|
497729ecd6 | ||
|
|
21a8efa3b8 | ||
|
|
c2f942a51e | ||
|
|
63b4b95240 | ||
|
|
955f37e005 | ||
|
|
cd2ae89b0e | ||
|
|
0b013fa438 | ||
|
|
478b657354 | ||
|
|
65b6aabe69 | ||
|
|
3fabae5b5f | ||
|
|
96c46a9e12 | ||
|
|
381b93e8eb | ||
|
|
f51e5b6fbf | ||
|
|
20befd1ca2 | ||
|
|
ac6c6130f8 | ||
|
|
d776a2325c | ||
|
|
4aec4257da | ||
|
|
d654f856d1 | ||
|
|
8d3b0a2069 | ||
|
|
54a96f35e8 | ||
|
|
2dc56d72f6 | ||
|
|
4b6ddb535a | ||
|
|
697e2250d4 | ||
|
|
6a75035b04 | ||
|
|
46b166bc41 | ||
|
|
6bbc0987ad | ||
|
|
8c480b43e2 | ||
|
|
079f6731dd | ||
|
|
f99d5754cd | ||
|
|
bf8c41e362 | ||
|
|
7f7bc06eb4 | ||
|
|
b507e59359 | ||
|
|
72078ac6bf | ||
|
|
0db9e082e2 | ||
|
|
0c44394a76 | ||
|
|
e20aa0cf04 | ||
|
|
fa30a50a91 | ||
|
|
f6629ff12c | ||
|
|
4128e4db73 | ||
|
|
34cac5685f | ||
|
|
4c9b91d536 | ||
|
|
95b95a8998 | ||
|
|
617738bb28 | ||
|
|
f6ac15d790 | ||
|
|
79e1324ead | ||
|
|
4ef9f010f0 | ||
|
|
e6e8865708 | ||
|
|
33cd8f9b0d | ||
|
|
a7138e019c | ||
|
|
049b72bd50 | ||
|
|
f3f1987515 | ||
|
|
a9395d89cd | ||
|
|
bc2fcee8ba | ||
|
|
242ff2ceca | ||
|
|
70790ac762 | ||
|
|
0f98869b61 | ||
|
|
9ddc02140f | ||
|
|
ee631b3d20 | ||
|
|
32f56e60d8 | ||
|
|
6102b51d9e | ||
|
|
2baee27859 | ||
|
|
144a3dedbb | ||
|
|
f90d966f1a | ||
|
|
b188e2ea97 |
28
.devcontainer/.env.example
Normal file
28
.devcontainer/.env.example
Normal file
@@ -0,0 +1,28 @@
|
||||
COMPOSE_PROJECT_NAME=trmm
|
||||
|
||||
IMAGE_REPO=tacticalrmm/
|
||||
VERSION=latest
|
||||
|
||||
# tactical credentials (Used to login to dashboard)
|
||||
TRMM_USER=tactical
|
||||
TRMM_PASS=tactical
|
||||
|
||||
# dns settings
|
||||
APP_HOST=rmm.example.com
|
||||
API_HOST=api.example.com
|
||||
MESH_HOST=mesh.example.com
|
||||
|
||||
# mesh settings
|
||||
MESH_USER=tactical
|
||||
MESH_PASS=tactical
|
||||
MONGODB_USER=mongouser
|
||||
MONGODB_PASSWORD=mongopass
|
||||
|
||||
# database settings
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASS=postgrespass
|
||||
|
||||
# DEV SETTINGS
|
||||
APP_PORT=8000
|
||||
API_PORT=8080
|
||||
HTTP_PROTOCOL=https
|
||||
28
.devcontainer/api.dockerfile
Normal file
28
.devcontainer/api.dockerfile
Normal file
@@ -0,0 +1,28 @@
|
||||
FROM python:3.8-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV WORKSPACE_DIR /workspace
|
||||
ENV TACTICAL_USER tactical
|
||||
ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy Go Files
|
||||
COPY --from=golang:1.15 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
|
||||
# Copy Dev python reqs
|
||||
COPY ./requirements.txt /
|
||||
|
||||
# Copy Docker Entrypoint
|
||||
COPY ./entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm
|
||||
19
.devcontainer/docker-compose.debug.yml
Normal file
19
.devcontainer/docker-compose.debug.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 --nothreading --noreload"]
|
||||
ports:
|
||||
- 8000:8000
|
||||
- 5678:5678
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-backend
|
||||
209
.devcontainer/docker-compose.yml
Normal file
209
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,209 @@
|
||||
version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-api"]
|
||||
environment:
|
||||
API_PORT: ${API_PORT}
|
||||
ports:
|
||||
- "8000:${API_PORT}"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-backend
|
||||
|
||||
app-dev:
|
||||
image: node:12-alpine
|
||||
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
working_dir: /workspace/web
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
ports:
|
||||
- "8080:${APP_PORT}"
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-frontend
|
||||
|
||||
# nats
|
||||
nats-dev:
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
API_HOST: ${API_HOST}
|
||||
API_PORT: ${API_PORT}
|
||||
DEV: 1
|
||||
ports:
|
||||
- "4222:4222"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- ${API_HOST}
|
||||
- tactical-nats
|
||||
|
||||
# meshcentral container
|
||||
meshcentral-dev:
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
NGINX_HOST_IP: 172.21.0.20
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-meshcentral
|
||||
- ${MESH_HOST}
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- mesh-data-dev:/home/node/app/meshcentral-data
|
||||
depends_on:
|
||||
- mongodb-dev
|
||||
|
||||
# mongodb container for meshcentral
|
||||
mongodb-dev:
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
|
||||
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
|
||||
MONGO_INITDB_DATABASE: meshcentral
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-mongodb
|
||||
volumes:
|
||||
- mongo-dev-data:/data/db
|
||||
|
||||
# postgres database for api service
|
||||
postgres-dev:
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_DB: tacticalrmm
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASS}
|
||||
volumes:
|
||||
- postgres-data-dev:/var/lib/postgresql/data
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-postgres
|
||||
|
||||
# redis container for celery tasks
|
||||
redis-dev:
|
||||
restart: always
|
||||
image: redis:6.0-alpine
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-redis
|
||||
|
||||
init-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
restart: on-failure
|
||||
command: ["tactical-init-dev"]
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
TRMM_USER: ${TRMM_USER}
|
||||
TRMM_PASS: ${TRMM_PASS}
|
||||
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
|
||||
APP_PORT: ${APP_PORT}
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- meshcentral-dev
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
|
||||
# container for celery worker service
|
||||
celery-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celery-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for celery beat service
|
||||
celerybeat-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celerybeat-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
nginx-dev:
|
||||
# container for tactical reverse proxy
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
CERT_PUB_KEY: ${CERT_PUB_KEY}
|
||||
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||
APP_PORT: ${APP_PORT}
|
||||
API_PORT: ${API_PORT}
|
||||
networks:
|
||||
dev:
|
||||
ipv4_address: 172.21.0.20
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
|
||||
volumes:
|
||||
tactical-data-dev:
|
||||
postgres-data-dev:
|
||||
mongo-dev-data:
|
||||
mesh-data-dev:
|
||||
|
||||
networks:
|
||||
dev:
|
||||
driver: bridge
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.21.0.0/24
|
||||
169
.devcontainer/entrypoint.sh
Normal file
169
.devcontainer/entrypoint.sh
Normal file
@@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
: "${TRMM_USER:=tactical}"
|
||||
: "${TRMM_PASS:=tactical}"
|
||||
: "${POSTGRES_HOST:=tactical-postgres}"
|
||||
: "${POSTGRES_PORT:=5432}"
|
||||
: "${POSTGRES_USER:=tactical}"
|
||||
: "${POSTGRES_PASS:=tactical}"
|
||||
: "${POSTGRES_DB:=tacticalrmm}"
|
||||
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||
: "${MESH_USER:=meshcentral}"
|
||||
: "${MESH_PASS:=meshcentralpass}"
|
||||
: "${MESH_HOST:=tactical-meshcentral}"
|
||||
: "${API_HOST:=tactical-backend}"
|
||||
: "${APP_HOST:=tactical-frontend}"
|
||||
: "${REDIS_HOST:=tactical-redis}"
|
||||
: "${HTTP_PROTOCOL:=http}"
|
||||
: "${APP_PORT:=8080}"
|
||||
: "${API_PORT:=8000}"
|
||||
|
||||
# Add python venv to path
|
||||
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
||||
|
||||
function check_tactical_ready {
|
||||
sleep 15
|
||||
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||
echo "waiting for init container to finish install or update..."
|
||||
sleep 10
|
||||
done
|
||||
}
|
||||
|
||||
function django_setup {
|
||||
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
|
||||
echo "waiting for meshcentral container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
echo "setting up django environment"
|
||||
|
||||
# configure django settings
|
||||
MESH_TOKEN=$(cat ${TACTICAL_DIR}/tmp/mesh_token)
|
||||
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
localvars="$(cat << EOF
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
|
||||
DEBUG = True
|
||||
|
||||
DOCKER_BUILD = True
|
||||
|
||||
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
|
||||
KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
||||
|
||||
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
|
||||
|
||||
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
||||
|
||||
ADMIN_URL = 'admin/'
|
||||
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': '${POSTGRES_DB}',
|
||||
'USER': '${POSTGRES_USER}',
|
||||
'PASSWORD': '${POSTGRES_PASS}',
|
||||
'HOST': '${POSTGRES_HOST}',
|
||||
'PORT': '${POSTGRES_PORT}',
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
||||
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'knox.auth.TokenAuthentication',
|
||||
),
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
REST_FRAMEWORK.update({
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
)
|
||||
})
|
||||
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
REDIS_HOST = '${REDIS_HOST}'
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
|
||||
# run migrations and init scripts
|
||||
python manage.py migrate --no-input
|
||||
python manage.py collectstatic --no-input
|
||||
python manage.py initial_db_setup
|
||||
python manage.py initial_mesh_setup
|
||||
python manage.py load_chocos
|
||||
python manage.py load_community_scripts
|
||||
python manage.py reload_nats
|
||||
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
|
||||
}
|
||||
|
||||
if [ "$1" = 'tactical-init-dev' ]; then
|
||||
|
||||
# make directories if they don't exist
|
||||
mkdir -p ${TACTICAL_DIR}/tmp
|
||||
|
||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||
|
||||
# setup Python virtual env and install dependencies
|
||||
test -f ${VIRTUAL_ENV} && python -m venv --copies ${VIRTUAL_ENV}
|
||||
pip install --no-cache-dir -r /requirements.txt
|
||||
|
||||
django_setup
|
||||
|
||||
# create .env file for frontend
|
||||
webenv="$(cat << EOF
|
||||
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
APP_URL = https://${APP_HOST}
|
||||
EOF
|
||||
)"
|
||||
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
||||
|
||||
# chown everything to tactical user
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
|
||||
|
||||
# create install ready file
|
||||
su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}"
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-api' ]; then
|
||||
cp ${WORKSPACE_DIR}/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
chmod +x /usr/local/bin/goversioninfo
|
||||
|
||||
check_tactical_ready
|
||||
python manage.py runserver 0.0.0.0:${API_PORT}
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celery-dev' ]; then
|
||||
check_tactical_ready
|
||||
env/bin/celery -A tacticalrmm worker -l debug
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||
check_tactical_ready
|
||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||
env/bin/celery -A tacticalrmm beat -l debug
|
||||
fi
|
||||
44
.devcontainer/requirements.txt
Normal file
44
.devcontainer/requirements.txt
Normal file
@@ -0,0 +1,44 @@
|
||||
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||
amqp==2.6.1
|
||||
asgiref==3.3.1
|
||||
asyncio-nats-client==0.11.4
|
||||
billiard==3.6.3.0
|
||||
celery==4.4.6
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.3
|
||||
chardet==3.0.4
|
||||
cryptography==3.2.1
|
||||
decorator==4.4.2
|
||||
Django==3.1.4
|
||||
django-cors-headers==3.5.0
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.2
|
||||
future==0.18.2
|
||||
idna==2.10
|
||||
kombu==4.6.11
|
||||
loguru==0.5.3
|
||||
msgpack==1.0.0
|
||||
packaging==20.4
|
||||
psycopg2-binary==2.8.6
|
||||
pycparser==2.20
|
||||
pycryptodome==3.9.9
|
||||
pyotp==2.4.1
|
||||
pyparsing==2.4.7
|
||||
pytz==2020.4
|
||||
qrcode==6.1
|
||||
redis==3.5.3
|
||||
requests==2.25.0
|
||||
six==1.15.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.49.0
|
||||
urllib3==1.26.2
|
||||
validators==0.18.1
|
||||
vine==1.3.0
|
||||
websockets==8.1
|
||||
zipp==3.4.0
|
||||
black
|
||||
Werkzeug
|
||||
django-extensions
|
||||
coverage
|
||||
coveralls
|
||||
model_bakery
|
||||
25
.dockerignore
Normal file
25
.dockerignore
Normal file
@@ -0,0 +1,25 @@
|
||||
**/__pycache__
|
||||
**/.classpath
|
||||
**/.dockerignore
|
||||
**/.env
|
||||
**/.git
|
||||
**/.gitignore
|
||||
**/.project
|
||||
**/.settings
|
||||
**/.toolstarget
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/*.*proj.user
|
||||
**/*.dbmdl
|
||||
**/*.jfm
|
||||
**/azds.yaml
|
||||
**/charts
|
||||
**/docker-compose*
|
||||
**/Dockerfile*
|
||||
**/node_modules
|
||||
**/npm-debug.log
|
||||
**/obj
|
||||
**/secrets.dev.yaml
|
||||
**/values.dev.yaml
|
||||
**/env
|
||||
README.md
|
||||
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: wh1te909
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
78
.github/workflows/docker-build-push.yml
vendored
Normal file
78
.github/workflows/docker-build-push.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
name: Publish Tactical Docker Images
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
jobs:
|
||||
docker:
|
||||
name: Build and Push Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get Github Tag
|
||||
id: prep
|
||||
run: |
|
||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/v}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Tactical Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical:${{ steps.prep.outputs.version }},tacticalrmm/tactical:latest
|
||||
|
||||
- name: Build and Push Tactical MeshCentral Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-meshcentral/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-meshcentral:${{ steps.prep.outputs.version }},tacticalrmm/tactical-meshcentral:latest
|
||||
|
||||
- name: Build and Push Tactical NATS Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nats/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||
|
||||
- name: Build and Push Tactical Frontend Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-frontend/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-frontend:${{ steps.prep.outputs.version }},tacticalrmm/tactical-frontend:latest
|
||||
|
||||
- name: Build and Push Tactical Nginx Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nginx/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nginx:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nginx:latest
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -34,6 +34,7 @@ app.ini
|
||||
create_services.py
|
||||
gen_random.py
|
||||
sync_salt_modules.py
|
||||
change_times.py
|
||||
rmm-*.exe
|
||||
rmm-*.ps1
|
||||
api/tacticalrmm/accounts/management/commands/*.json
|
||||
@@ -41,3 +42,6 @@ api/tacticalrmm/accounts/management/commands/random_data.py
|
||||
versioninfo.go
|
||||
resource.syso
|
||||
htmlcov/
|
||||
docker-compose.dev.yml
|
||||
docs/.vuepress/dist
|
||||
nats-rmm.conf
|
||||
|
||||
43
.travis.yml
43
.travis.yml
@@ -1,43 +0,0 @@
|
||||
dist: focal
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- language: node_js
|
||||
node_js: "12"
|
||||
before_install:
|
||||
- cd web
|
||||
install:
|
||||
- npm install
|
||||
script:
|
||||
- npm run test:unit
|
||||
|
||||
- language: python
|
||||
python: "3.8"
|
||||
services:
|
||||
- redis
|
||||
|
||||
addons:
|
||||
postgresql: "13"
|
||||
apt:
|
||||
packages:
|
||||
- postgresql-13
|
||||
|
||||
before_script:
|
||||
- psql -c 'CREATE DATABASE travisci;' -U postgres
|
||||
- psql -c "CREATE USER travisci WITH PASSWORD 'travisSuperSekret6645';" -U postgres
|
||||
- psql -c 'GRANT ALL PRIVILEGES ON DATABASE travisci TO travisci;' -U postgres
|
||||
- psql -c 'ALTER USER travisci CREATEDB;' -U postgres
|
||||
|
||||
before_install:
|
||||
- cd api/tacticalrmm
|
||||
|
||||
install:
|
||||
- pip install --no-cache-dir --upgrade pip
|
||||
- pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
|
||||
- pip install --no-cache-dir -r requirements.txt -r requirements-test.txt
|
||||
|
||||
script:
|
||||
- coverage run manage.py test -v 2
|
||||
|
||||
after_success:
|
||||
- coveralls
|
||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -14,6 +14,20 @@
|
||||
"0.0.0.0:8000"
|
||||
],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Django: Docker Remote Attach",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"port": 5678,
|
||||
"host": "localhost",
|
||||
"preLaunchTask": "docker debug",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}/api/tacticalrmm",
|
||||
"remoteRoot": "/workspace/api/tacticalrmm"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
21
.vscode/settings.json
vendored
21
.vscode/settings.json
vendored
@@ -2,7 +2,7 @@
|
||||
"python.pythonPath": "api/tacticalrmm/env/bin/python",
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": [
|
||||
"api/tacticalrmm"
|
||||
"api/tacticalrmm",
|
||||
],
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.formatting.provider": "black",
|
||||
@@ -41,4 +41,23 @@
|
||||
"**/*.zip": true
|
||||
},
|
||||
},
|
||||
"go.useLanguageServer": true,
|
||||
"[go]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": false,
|
||||
},
|
||||
"editor.snippetSuggestions": "none",
|
||||
},
|
||||
"[go.mod]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true,
|
||||
},
|
||||
},
|
||||
"gopls": {
|
||||
"usePlaceholders": true,
|
||||
"completeUnimported": true,
|
||||
"staticcheck": true,
|
||||
}
|
||||
}
|
||||
23
.vscode/tasks.json
vendored
Normal file
23
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "docker debug",
|
||||
"type": "shell",
|
||||
"command": "docker-compose",
|
||||
"args": [
|
||||
"-p",
|
||||
"trmm",
|
||||
"-f",
|
||||
".devcontainer/docker-compose.yml",
|
||||
"-f",
|
||||
".devcontainer/docker-compose.debug.yml",
|
||||
"up",
|
||||
"-d",
|
||||
"--build"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
30
README.md
30
README.md
@@ -1,13 +1,12 @@
|
||||
# Tactical RMM
|
||||
|
||||
[](https://travis-ci.com/wh1te909/tacticalrmm)
|
||||
[](https://dev.azure.com/dcparsi/Tactical%20RMM/_build/latest?definitionId=4&branchName=develop)
|
||||
[](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://github.com/python/black)
|
||||
|
||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
@@ -37,7 +36,7 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
||||
## Installation
|
||||
|
||||
### Requirements
|
||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04)
|
||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10)
|
||||
- A domain you own with at least 3 subdomains
|
||||
- Google Authenticator app (2 factor is NOT optional)
|
||||
|
||||
@@ -63,7 +62,7 @@ sudo ufw default allow outgoing
|
||||
sudo ufw allow ssh
|
||||
sudo ufw allow http
|
||||
sudo ufw allow https
|
||||
sudo ufw allow proto tcp from any to any port 4505,4506
|
||||
sudo ufw allow proto tcp from any to any port 4222
|
||||
sudo ufw enable && sudo ufw reload
|
||||
```
|
||||
|
||||
@@ -78,7 +77,7 @@ Create A record ```mesh.tacticalrmm.com``` for meshcentral
|
||||
Download the install script and run it
|
||||
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/install.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh
|
||||
chmod +x install.sh
|
||||
./install.sh
|
||||
```
|
||||
@@ -92,17 +91,17 @@ chmod +x install.sh
|
||||
From the app's dashboard, choose Agents > Install Agent to generate an installer.
|
||||
|
||||
## Updating
|
||||
Download and run [update.sh](./update.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh))
|
||||
Download and run [update.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh)
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh
|
||||
chmod +x update.sh
|
||||
./update.sh
|
||||
```
|
||||
|
||||
## Backup
|
||||
Download [backup.sh](./backup.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh))
|
||||
Download [backup.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh)
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh
|
||||
```
|
||||
Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section)
|
||||
|
||||
@@ -121,7 +120,7 @@ Copy backup file to new server
|
||||
|
||||
Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps.
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/restore.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh
|
||||
```
|
||||
|
||||
Run the restore script, passing it the backup tar file as the first argument
|
||||
@@ -129,14 +128,3 @@ Run the restore script, passing it the backup tar file as the first argument
|
||||
chmod +x restore.sh
|
||||
./restore.sh rmm-backup-xxxxxxx.tar
|
||||
```
|
||||
|
||||
## Using another ssl certificate
|
||||
During the install you can opt out of using the Let's Encrypt certificate. If you do this the script will create a self-signed certificate, so that https continues to work. You can replace the certificates in /certs/example.com/(privkey.pem | pubkey.pem) with your own.
|
||||
|
||||
If you are migrating from Let's Encrypt to another certificate provider, you can create the /certs directory and copy your certificates there. It is recommended to do this because this directory will be backed up with the backup script provided. Then modify the nginx configurations to use your new certificates
|
||||
|
||||
The cert that is generated is a wildcard certificate and is used in the nginx configurations: rmm.conf, api.conf, and mesh.conf. If you can't generate wildcard certificates you can create a cert for each subdomain and configure each nginx configuration file to use its own certificate. Then restart nginx:
|
||||
|
||||
```
|
||||
sudo systemctl restart nginx
|
||||
```
|
||||
@@ -1,457 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
import psutil
|
||||
import os
|
||||
import datetime
|
||||
import zlib
|
||||
import json
|
||||
import base64
|
||||
import wmi
|
||||
import win32evtlog
|
||||
import win32con
|
||||
import win32evtlogutil
|
||||
import winerror
|
||||
from time import sleep
|
||||
import requests
|
||||
import subprocess
|
||||
import random
|
||||
import platform
|
||||
|
||||
ARCH = "64" if platform.machine().endswith("64") else "32"
|
||||
PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent")
|
||||
TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe")
|
||||
NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe")
|
||||
TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp")
|
||||
SYS_DRIVE = os.environ["SystemDrive"]
|
||||
PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe")
|
||||
SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat")
|
||||
|
||||
|
||||
def get_services():
|
||||
# see https://github.com/wh1te909/tacticalrmm/issues/38
|
||||
# for why I am manually implementing the svc.as_dict() method of psutil
|
||||
ret = []
|
||||
for svc in psutil.win_service_iter():
|
||||
i = {}
|
||||
try:
|
||||
i["display_name"] = svc.display_name()
|
||||
i["binpath"] = svc.binpath()
|
||||
i["username"] = svc.username()
|
||||
i["start_type"] = svc.start_type()
|
||||
i["status"] = svc.status()
|
||||
i["pid"] = svc.pid()
|
||||
i["name"] = svc.name()
|
||||
i["description"] = svc.description()
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
ret.append(i)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def run_python_script(filename, timeout, script_type="userdefined"):
|
||||
# no longer used in agent version 0.11.0
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
if script_type == "userdefined":
|
||||
__salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path)
|
||||
else:
|
||||
__salt__["cp.get_file"](f"salt://scripts/{filename}", file_path)
|
||||
|
||||
return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def run_script(filepath, filename, shell, timeout, args=[], bg=False):
|
||||
if shell == "powershell" or shell == "cmd":
|
||||
if args:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath,
|
||||
args=" ".join(map(lambda x: f'"{x}"', args)),
|
||||
shell=shell,
|
||||
timeout=timeout,
|
||||
bg=bg,
|
||||
)
|
||||
else:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath, shell=shell, timeout=timeout, bg=bg
|
||||
)
|
||||
|
||||
elif shell == "python":
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
__salt__["cp.get_file"](filepath, file_path)
|
||||
|
||||
salt_cmd = "cmd.run_bg" if bg else "cmd.run_all"
|
||||
|
||||
if args:
|
||||
a = " ".join(map(lambda x: f'"{x}"', args))
|
||||
cmd = f"{PY_BIN} {file_path} {a}"
|
||||
return __salt__[salt_cmd](cmd, timeout=timeout)
|
||||
else:
|
||||
return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def uninstall_agent():
|
||||
remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe")
|
||||
__salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def update_salt():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
cmd = [TAC_RMM, "-m", "updatesalt"]
|
||||
p = Popen(
|
||||
cmd,
|
||||
stdin=PIPE,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
close_fds=True,
|
||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
||||
)
|
||||
return p.pid
|
||||
|
||||
|
||||
def run_manual_checks():
|
||||
__salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def install_updates():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"])
|
||||
|
||||
|
||||
def _wait_for_service(svc, status, retries=10):
|
||||
attempts = 0
|
||||
while 1:
|
||||
try:
|
||||
service = psutil.win_service_get(svc)
|
||||
except psutil.NoSuchProcess:
|
||||
stat = "fail"
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
stat = service.status()
|
||||
if stat != status:
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts == 0 or attempts > retries:
|
||||
break
|
||||
|
||||
return stat
|
||||
|
||||
|
||||
def agent_update_v2(inno, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself (because we call it twice with salt run_bg)
|
||||
# so if more than 2 running, don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update_v2" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 20)) # don't flood the rmm
|
||||
|
||||
exe = os.path.join(TEMP_DIR, inno)
|
||||
|
||||
if os.path.exists(exe):
|
||||
try:
|
||||
os.remove(exe)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120)
|
||||
|
||||
tac = _wait_for_service(svc="tacticalagent", status="running")
|
||||
if tac != "running":
|
||||
subprocess.run([NSSM, "start", "tacticalagent"], timeout=30)
|
||||
|
||||
chk = _wait_for_service(svc="checkrunner", status="running")
|
||||
if chk != "running":
|
||||
subprocess.run([NSSM, "start", "checkrunner"], timeout=30)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update_v2(inno, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update_v2",
|
||||
f"inno={inno}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def agent_update(version, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself so if more than 2 running,
|
||||
# don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 60)) # don't flood the rmm
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe")
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
services = ("tacticalagent", "checkrunner")
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "stop", svc], timeout=120)
|
||||
|
||||
sleep(10)
|
||||
r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300)
|
||||
sleep(30)
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "start", svc], timeout=120)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update(version, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update",
|
||||
f"version={version}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class SystemDetail:
|
||||
def __init__(self):
|
||||
self.c = wmi.WMI()
|
||||
self.comp_sys_prod = self.c.Win32_ComputerSystemProduct()
|
||||
self.comp_sys = self.c.Win32_ComputerSystem()
|
||||
self.memory = self.c.Win32_PhysicalMemory()
|
||||
self.os = self.c.Win32_OperatingSystem()
|
||||
self.base_board = self.c.Win32_BaseBoard()
|
||||
self.bios = self.c.Win32_BIOS()
|
||||
self.disk = self.c.Win32_DiskDrive()
|
||||
self.network_adapter = self.c.Win32_NetworkAdapter()
|
||||
self.network_config = self.c.Win32_NetworkAdapterConfiguration()
|
||||
self.desktop_monitor = self.c.Win32_DesktopMonitor()
|
||||
self.cpu = self.c.Win32_Processor()
|
||||
self.usb = self.c.Win32_USBController()
|
||||
|
||||
def get_all(self, obj):
|
||||
ret = []
|
||||
for i in obj:
|
||||
tmp = [
|
||||
{j: getattr(i, j)}
|
||||
for j in list(i.properties)
|
||||
if getattr(i, j) is not None
|
||||
]
|
||||
ret.append(tmp)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def system_info():
|
||||
info = SystemDetail()
|
||||
return {
|
||||
"comp_sys_prod": info.get_all(info.comp_sys_prod),
|
||||
"comp_sys": info.get_all(info.comp_sys),
|
||||
"mem": info.get_all(info.memory),
|
||||
"os": info.get_all(info.os),
|
||||
"base_board": info.get_all(info.base_board),
|
||||
"bios": info.get_all(info.bios),
|
||||
"disk": info.get_all(info.disk),
|
||||
"network_adapter": info.get_all(info.network_adapter),
|
||||
"network_config": info.get_all(info.network_config),
|
||||
"desktop_monitor": info.get_all(info.desktop_monitor),
|
||||
"cpu": info.get_all(info.cpu),
|
||||
"usb": info.get_all(info.usb),
|
||||
}
|
||||
|
||||
|
||||
def local_sys_info():
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"])
|
||||
|
||||
|
||||
def get_procs():
|
||||
ret = []
|
||||
|
||||
# setup
|
||||
for proc in psutil.process_iter():
|
||||
with proc.oneshot():
|
||||
proc.cpu_percent(interval=None)
|
||||
|
||||
# need time for psutil to record cpu percent
|
||||
sleep(1)
|
||||
|
||||
for c, proc in enumerate(psutil.process_iter(), 1):
|
||||
x = {}
|
||||
with proc.oneshot():
|
||||
if proc.pid == 0 or not proc.name():
|
||||
continue
|
||||
|
||||
x["name"] = proc.name()
|
||||
x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count()
|
||||
x["memory_percent"] = proc.memory_percent()
|
||||
x["pid"] = proc.pid
|
||||
x["ppid"] = proc.ppid()
|
||||
x["status"] = proc.status()
|
||||
x["username"] = proc.username()
|
||||
x["id"] = c
|
||||
|
||||
ret.append(x)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _compress_json(j):
|
||||
return {
|
||||
"wineventlog": base64.b64encode(
|
||||
zlib.compress(json.dumps(j).encode("utf-8", errors="ignore"))
|
||||
).decode("ascii", errors="ignore")
|
||||
}
|
||||
|
||||
|
||||
def get_eventlog(logtype, last_n_days):
|
||||
|
||||
start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days)
|
||||
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
|
||||
|
||||
status_dict = {
|
||||
win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE",
|
||||
win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS",
|
||||
win32con.EVENTLOG_INFORMATION_TYPE: "INFO",
|
||||
win32con.EVENTLOG_WARNING_TYPE: "WARNING",
|
||||
win32con.EVENTLOG_ERROR_TYPE: "ERROR",
|
||||
0: "INFO",
|
||||
}
|
||||
|
||||
computer = "localhost"
|
||||
hand = win32evtlog.OpenEventLog(computer, logtype)
|
||||
total = win32evtlog.GetNumberOfEventLogRecords(hand)
|
||||
log = []
|
||||
uid = 0
|
||||
done = False
|
||||
|
||||
try:
|
||||
while 1:
|
||||
events = win32evtlog.ReadEventLog(hand, flags, 0)
|
||||
for ev_obj in events:
|
||||
|
||||
uid += 1
|
||||
# return once total number of events reach or we'll be stuck in an infinite loop
|
||||
if uid >= total:
|
||||
done = True
|
||||
break
|
||||
|
||||
the_time = ev_obj.TimeGenerated.Format()
|
||||
time_obj = datetime.datetime.strptime(the_time, "%c")
|
||||
if time_obj < start_time:
|
||||
done = True
|
||||
break
|
||||
|
||||
computer = str(ev_obj.ComputerName)
|
||||
src = str(ev_obj.SourceName)
|
||||
evt_type = str(status_dict[ev_obj.EventType])
|
||||
evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID))
|
||||
evt_category = str(ev_obj.EventCategory)
|
||||
record = str(ev_obj.RecordNumber)
|
||||
msg = (
|
||||
str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype))
|
||||
.replace("<", "")
|
||||
.replace(">", "")
|
||||
)
|
||||
|
||||
event_dict = {
|
||||
"computer": computer,
|
||||
"source": src,
|
||||
"eventType": evt_type,
|
||||
"eventID": evt_id,
|
||||
"eventCategory": evt_category,
|
||||
"message": msg,
|
||||
"time": the_time,
|
||||
"record": record,
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
log.append(event_dict)
|
||||
|
||||
if done:
|
||||
break
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
win32evtlog.CloseEventLog(hand)
|
||||
return _compress_json(log)
|
||||
@@ -20,6 +20,5 @@ omit =
|
||||
*/urls.py
|
||||
*/tests.py
|
||||
*/test.py
|
||||
api/*.py
|
||||
checks/utils.py
|
||||
|
||||
|
||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0002_auto_20200810_0544'),
|
||||
("accounts", "0002_auto_20200810_0544"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,24 +6,24 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0003_auto_20200922_1344'),
|
||||
("accounts", "0003_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0004_auto_20201002_1257'),
|
||||
("accounts", "0004_auto_20201002_1257"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
26
api/tacticalrmm/accounts/migrations/0006_user_agent.py
Normal file
26
api/tacticalrmm/accounts/migrations/0006_user_agent.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-10 20:24
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0024_auto_20201101_2319"),
|
||||
("accounts", "0005_auto_20201002_1303"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="agent",
|
||||
field=models.OneToOneField(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="user",
|
||||
to="agents.agent",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-01 22:54
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def link_agents_to_users(apps, schema_editor):
|
||||
Agent = apps.get_model("agents", "Agent")
|
||||
User = apps.get_model("accounts", "User")
|
||||
for agent in Agent.objects.all():
|
||||
user = User.objects.filter(username=agent.agent_id).first()
|
||||
|
||||
if user:
|
||||
user.agent = agent
|
||||
user.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0006_user_agent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(link_agents_to_users, migrations.RunPython.noop),
|
||||
]
|
||||
18
api/tacticalrmm/accounts/migrations/0008_user_dark_mode.py
Normal file
18
api/tacticalrmm/accounts/migrations/0008_user_dark_mode.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-12 00:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0007_update_agent_primary_key"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="dark_mode",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-10 17:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0008_user_dark_mode"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="show_community_scripts",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-14 01:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0009_user_show_community_scripts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="agent_dblclick_action",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("editagent", "Edit Agent"),
|
||||
("takecontrol", "Take Control"),
|
||||
("remotebg", "Remote Background"),
|
||||
],
|
||||
default="editagent",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.5 on 2021-01-18 09:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0010_user_agent_dblclick_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="default_agent_tbl_tab",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("server", "Servers"),
|
||||
("workstation", "Workstations"),
|
||||
("mixed", "Mixed"),
|
||||
],
|
||||
default="server",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -3,10 +3,38 @@ from django.contrib.auth.models import AbstractUser
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
AGENT_DBLCLICK_CHOICES = [
|
||||
("editagent", "Edit Agent"),
|
||||
("takecontrol", "Take Control"),
|
||||
("remotebg", "Remote Background"),
|
||||
]
|
||||
|
||||
AGENT_TBL_TAB_CHOICES = [
|
||||
("server", "Servers"),
|
||||
("workstation", "Workstations"),
|
||||
("mixed", "Mixed"),
|
||||
]
|
||||
|
||||
|
||||
class User(AbstractUser, BaseAuditModel):
|
||||
is_active = models.BooleanField(default=True)
|
||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||
dark_mode = models.BooleanField(default=True)
|
||||
show_community_scripts = models.BooleanField(default=True)
|
||||
agent_dblclick_action = models.CharField(
|
||||
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
||||
)
|
||||
default_agent_tbl_tab = models.CharField(
|
||||
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
||||
)
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
related_name="user",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def serialize(user):
|
||||
|
||||
@@ -155,6 +155,33 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
data = {
|
||||
"id": self.john.pk,
|
||||
"username": "john",
|
||||
"email": "johndoe@xlawgaming.com",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_not_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
data = {
|
||||
"id": self.john.pk,
|
||||
"username": "john",
|
||||
"email": "johndoe@xlawgaming.com",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_delete(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
r = self.client.delete(url)
|
||||
@@ -166,6 +193,19 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_delete_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_delete_non_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
|
||||
class TestUserAction(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -184,6 +224,21 @@ class TestUserAction(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_post_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_post_non_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_put(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
@@ -195,6 +250,44 @@ class TestUserAction(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
user = User.objects.get(pk=self.john.pk)
|
||||
self.assertEqual(user.totp_key, "")
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_non_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_user_ui(self):
|
||||
url = "/accounts/users/ui/"
|
||||
data = {"dark_mode": False}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {"show_community_scripts": True}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {
|
||||
"userui": True,
|
||||
"agent_dblclick_action": "editagent",
|
||||
"default_agent_tbl_tab": "mixed",
|
||||
}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestTOTPSetup(TacticalTestCase):
|
||||
def setUp(self):
|
||||
|
||||
@@ -7,4 +7,5 @@ urlpatterns = [
|
||||
path("users/reset/", views.UserActions.as_view()),
|
||||
path("users/reset_totp/", views.UserActions.as_view()),
|
||||
path("users/setup_totp/", views.TOTPSetup.as_view()),
|
||||
path("users/ui/", views.UserUI.as_view()),
|
||||
]
|
||||
|
||||
@@ -60,7 +60,7 @@ class LoginView(KnoxLoginView):
|
||||
|
||||
if settings.DEBUG and token == "sekret":
|
||||
valid = True
|
||||
elif totp.verify(token, valid_window=1):
|
||||
elif totp.verify(token, valid_window=10):
|
||||
valid = True
|
||||
|
||||
if valid:
|
||||
@@ -74,8 +74,7 @@ class LoginView(KnoxLoginView):
|
||||
|
||||
class GetAddUsers(APIView):
|
||||
def get(self, request):
|
||||
agents = Agent.objects.values_list("agent_id", flat=True)
|
||||
users = User.objects.exclude(username__in=agents)
|
||||
users = User.objects.filter(agent=None)
|
||||
|
||||
return Response(UserSerializer(users, many=True).data)
|
||||
|
||||
@@ -109,6 +108,13 @@ class GetUpdateDeleteUser(APIView):
|
||||
def put(self, request, pk):
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
@@ -116,7 +122,15 @@ class GetUpdateDeleteUser(APIView):
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(User, pk=pk).delete()
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be deleted from the UI")
|
||||
|
||||
user.delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -125,8 +139,14 @@ class UserActions(APIView):
|
||||
|
||||
# reset password
|
||||
def post(self, request):
|
||||
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user.set_password(request.data["password"])
|
||||
user.save()
|
||||
|
||||
@@ -134,8 +154,14 @@ class UserActions(APIView):
|
||||
|
||||
# reset two factor token
|
||||
def put(self, request):
|
||||
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user.totp_key = ""
|
||||
user.save()
|
||||
|
||||
@@ -157,3 +183,23 @@ class TOTPSetup(APIView):
|
||||
return Response(TOTPSetupSerializer(user).data)
|
||||
|
||||
return Response("totp token already set")
|
||||
|
||||
|
||||
class UserUI(APIView):
|
||||
def patch(self, request):
|
||||
user = request.user
|
||||
|
||||
if "dark_mode" in request.data.keys():
|
||||
user.dark_mode = request.data["dark_mode"]
|
||||
user.save(update_fields=["dark_mode"])
|
||||
|
||||
if "show_community_scripts" in request.data.keys():
|
||||
user.show_community_scripts = request.data["show_community_scripts"]
|
||||
user.save(update_fields=["show_community_scripts"])
|
||||
|
||||
if "userui" in request.data.keys():
|
||||
user.agent_dblclick_action = request.data["agent_dblclick_action"]
|
||||
user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"]
|
||||
user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -1,14 +1,35 @@
|
||||
from .models import Agent
|
||||
import random
|
||||
import string
|
||||
import os
|
||||
import json
|
||||
|
||||
from model_bakery.recipe import Recipe, seq
|
||||
from itertools import cycle
|
||||
from django.utils import timezone as djangotime
|
||||
from django.conf import settings
|
||||
|
||||
from .models import Agent
|
||||
|
||||
|
||||
def generate_agent_id(hostname):
|
||||
rand = "".join(random.choice(string.ascii_letters) for _ in range(35))
|
||||
return f"{rand}-{hostname}"
|
||||
|
||||
|
||||
def get_wmi_data():
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json")
|
||||
) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
agent = Recipe(
|
||||
Agent,
|
||||
client="Default",
|
||||
site="Default",
|
||||
hostname=seq("TestHostname"),
|
||||
hostname="DESKTOP-TEST123",
|
||||
version="1.3.0",
|
||||
monitoring_type=cycle(["workstation", "server"]),
|
||||
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
||||
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
||||
)
|
||||
|
||||
server_agent = agent.extend(
|
||||
@@ -49,3 +70,5 @@ agent_with_services = agent.extend(
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
agent_with_wmi = agent.extend(wmi=get_wmi_data())
|
||||
|
||||
26
api/tacticalrmm/agents/migrations/0021_agent_site_link.py
Normal file
26
api/tacticalrmm/agents/migrations/0021_agent_site_link.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-01 22:53
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("clients", "0006_deployment"),
|
||||
("agents", "0020_auto_20201025_2129"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="agent",
|
||||
name="site_link",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="agents",
|
||||
to="clients.site",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,31 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-01 22:54
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def link_sites_to_agents(apps, schema_editor):
|
||||
Agent = apps.get_model("agents", "Agent")
|
||||
Site = apps.get_model("clients", "Site")
|
||||
for agent in Agent.objects.all():
|
||||
site = Site.objects.get(client__client=agent.client, site=agent.site)
|
||||
agent.site_link = site
|
||||
agent.save()
|
||||
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
Agent = apps.get_model("agents", "Agent")
|
||||
for agent in Agent.objects.all():
|
||||
agent.site = agent.site_link.site
|
||||
agent.client = agent.site_link.client.client
|
||||
agent.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0021_agent_site_link"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(link_sites_to_agents, reverse),
|
||||
]
|
||||
21
api/tacticalrmm/agents/migrations/0023_auto_20201101_2312.py
Normal file
21
api/tacticalrmm/agents/migrations/0023_auto_20201101_2312.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-01 23:12
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0022_update_site_primary_key"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="agent",
|
||||
name="client",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="agent",
|
||||
name="site",
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/agents/migrations/0024_auto_20201101_2319.py
Normal file
18
api/tacticalrmm/agents/migrations/0024_auto_20201101_2319.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-01 23:19
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0023_auto_20201101_2312"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="agent",
|
||||
old_name="site_link",
|
||||
new_name="site",
|
||||
),
|
||||
]
|
||||
27
api/tacticalrmm/agents/migrations/0025_auto_20201122_0407.py
Normal file
27
api/tacticalrmm/agents/migrations/0025_auto_20201122_0407.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-22 04:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0024_auto_20201101_2319"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="recoveryaction",
|
||||
name="mode",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("salt", "Salt"),
|
||||
("mesh", "Mesh"),
|
||||
("command", "Command"),
|
||||
("rpc", "Nats RPC"),
|
||||
],
|
||||
default="mesh",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/agents/migrations/0026_auto_20201125_2334.py
Normal file
28
api/tacticalrmm/agents/migrations/0026_auto_20201125_2334.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-25 23:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0025_auto_20201122_0407"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="recoveryaction",
|
||||
name="mode",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("salt", "Salt"),
|
||||
("mesh", "Mesh"),
|
||||
("command", "Command"),
|
||||
("rpc", "Nats RPC"),
|
||||
("checkrunner", "Checkrunner"),
|
||||
],
|
||||
default="mesh",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,3 @@
|
||||
import requests
|
||||
import datetime as dt
|
||||
import time
|
||||
import base64
|
||||
from Crypto.Cipher import AES
|
||||
@@ -7,13 +5,15 @@ from Crypto.Random import get_random_bytes
|
||||
from Crypto.Hash import SHA3_384
|
||||
from Crypto.Util.Padding import pad
|
||||
import validators
|
||||
import random
|
||||
import msgpack
|
||||
import re
|
||||
import string
|
||||
from collections import Counter
|
||||
from typing import List
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from distutils.version import LooseVersion
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
@@ -44,9 +44,7 @@ class Agent(BaseAuditModel):
|
||||
boot_time = models.FloatField(null=True, blank=True)
|
||||
logged_in_username = models.CharField(null=True, blank=True, max_length=255)
|
||||
last_logged_in_user = models.CharField(null=True, blank=True, max_length=255)
|
||||
client = models.CharField(max_length=200)
|
||||
antivirus = models.CharField(default="n/a", max_length=255) # deprecated
|
||||
site = models.CharField(max_length=150)
|
||||
monitoring_type = models.CharField(max_length=30)
|
||||
description = models.CharField(null=True, blank=True, max_length=255)
|
||||
mesh_node_id = models.CharField(null=True, blank=True, max_length=255)
|
||||
@@ -62,6 +60,13 @@ class Agent(BaseAuditModel):
|
||||
max_length=255, choices=TZ_CHOICES, null=True, blank=True
|
||||
)
|
||||
maintenance_mode = models.BooleanField(default=False)
|
||||
site = models.ForeignKey(
|
||||
"clients.Site",
|
||||
related_name="agents",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
policy = models.ForeignKey(
|
||||
"automation.Policy",
|
||||
related_name="agents",
|
||||
@@ -73,6 +78,18 @@ class Agent(BaseAuditModel):
|
||||
def __str__(self):
|
||||
return self.hostname
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
return self.site.client
|
||||
|
||||
@property
|
||||
def has_nats(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.0")
|
||||
|
||||
@property
|
||||
def has_gotasks(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.1")
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
# return the default timezone unless the timezone is explicity set per agent
|
||||
@@ -86,9 +103,9 @@ class Agent(BaseAuditModel):
|
||||
@property
|
||||
def arch(self):
|
||||
if self.operating_system is not None:
|
||||
if "64 bit" in self.operating_system:
|
||||
if "64 bit" in self.operating_system or "64bit" in self.operating_system:
|
||||
return "64"
|
||||
elif "32 bit" in self.operating_system:
|
||||
elif "32 bit" in self.operating_system or "32bit" in self.operating_system:
|
||||
return "32"
|
||||
return None
|
||||
|
||||
@@ -100,14 +117,6 @@ class Agent(BaseAuditModel):
|
||||
return settings.DL_32
|
||||
return None
|
||||
|
||||
@property
|
||||
def winsalt_dl(self):
|
||||
if self.arch == "64":
|
||||
return settings.SALT_64
|
||||
elif self.arch == "32":
|
||||
return settings.SALT_32
|
||||
return None
|
||||
|
||||
@property
|
||||
def win_inno_exe(self):
|
||||
if self.arch == "64":
|
||||
@@ -133,11 +142,7 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def has_patches_pending(self):
|
||||
|
||||
if self.winupdates.filter(action="approve").filter(installed=False).exists():
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return self.winupdates.filter(action="approve").filter(installed=False).exists()
|
||||
|
||||
@property
|
||||
def checks(self):
|
||||
@@ -151,13 +156,11 @@ class Agent(BaseAuditModel):
|
||||
elif i.status == "failing":
|
||||
failing += 1
|
||||
|
||||
has_failing_checks = True if failing > 0 else False
|
||||
|
||||
ret = {
|
||||
"total": total,
|
||||
"passing": passing,
|
||||
"failing": failing,
|
||||
"has_failing_checks": has_failing_checks,
|
||||
"has_failing_checks": failing > 0,
|
||||
}
|
||||
return ret
|
||||
|
||||
@@ -281,11 +284,9 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# returns agent policy merged with a client or site specific policy
|
||||
def get_patch_policy(self):
|
||||
from clients.models import Client, Site
|
||||
|
||||
# check if site has a patch policy and if so use it
|
||||
client = Client.objects.get(client=self.client)
|
||||
site = Site.objects.get(client=client, site=self.site)
|
||||
site = self.site
|
||||
core_settings = CoreSettings.objects.first()
|
||||
patch_policy = None
|
||||
agent_policy = self.winupdatepolicy.get()
|
||||
@@ -373,14 +374,15 @@ class Agent(BaseAuditModel):
|
||||
|
||||
return patch_policy
|
||||
|
||||
# clear is used to delete managed policy checks from agent
|
||||
# parent_checks specifies a list of checks to delete from agent with matching parent_check field
|
||||
def generate_checks_from_policies(self, clear=False):
|
||||
from automation.models import Policy
|
||||
def get_approved_update_guids(self) -> List[str]:
|
||||
return list(
|
||||
self.winupdates.filter(action="approve", installed=False).values_list(
|
||||
"guid", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
# Clear agent checks managed by policy
|
||||
if clear:
|
||||
self.agentchecks.filter(managed_by_policy=True).delete()
|
||||
def generate_checks_from_policies(self):
|
||||
from automation.models import Policy
|
||||
|
||||
# Clear agent checks that have overriden_by_policy set
|
||||
self.agentchecks.update(overriden_by_policy=False)
|
||||
@@ -388,17 +390,9 @@ class Agent(BaseAuditModel):
|
||||
# Generate checks based on policies
|
||||
Policy.generate_policy_checks(self)
|
||||
|
||||
# clear is used to delete managed policy tasks from agent
|
||||
# parent_tasks specifies a list of tasks to delete from agent with matching parent_task field
|
||||
def generate_tasks_from_policies(self, clear=False):
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
def generate_tasks_from_policies(self):
|
||||
from automation.models import Policy
|
||||
|
||||
# Clear agent tasks managed by policy
|
||||
if clear:
|
||||
for task in self.autotasks.filter(managed_by_policy=True):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
|
||||
# Generate tasks based on policies
|
||||
Policy.generate_policy_tasks(self)
|
||||
|
||||
@@ -426,76 +420,36 @@ class Agent(BaseAuditModel):
|
||||
except Exception:
|
||||
return "err"
|
||||
|
||||
def salt_api_cmd(self, **kwargs):
|
||||
|
||||
# salt should always timeout first before the requests' timeout
|
||||
async def nats_cmd(self, data, timeout=30, wait=True):
|
||||
nc = NATS()
|
||||
options = {
|
||||
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": "tacticalrmm",
|
||||
"password": settings.SECRET_KEY,
|
||||
"connect_timeout": 3,
|
||||
"max_reconnect_attempts": 2,
|
||||
}
|
||||
try:
|
||||
timeout = kwargs["timeout"]
|
||||
except KeyError:
|
||||
# default timeout
|
||||
timeout = 15
|
||||
salt_timeout = 12
|
||||
else:
|
||||
if timeout < 8:
|
||||
timeout = 8
|
||||
salt_timeout = 5
|
||||
await nc.connect(**options)
|
||||
except:
|
||||
return "natsdown"
|
||||
|
||||
if wait:
|
||||
try:
|
||||
msg = await nc.request(
|
||||
self.agent_id, msgpack.dumps(data), timeout=timeout
|
||||
)
|
||||
except ErrTimeout:
|
||||
ret = "timeout"
|
||||
else:
|
||||
salt_timeout = timeout - 3
|
||||
ret = msgpack.loads(msg.data)
|
||||
|
||||
json = {
|
||||
"client": "local",
|
||||
"tgt": self.salt_id,
|
||||
"fun": kwargs["func"],
|
||||
"timeout": salt_timeout,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[json],
|
||||
timeout=timeout,
|
||||
)
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
try:
|
||||
ret = resp.json()["return"][0][self.salt_id]
|
||||
except Exception as e:
|
||||
logger.error(f"{self.salt_id}: {e}")
|
||||
return "error"
|
||||
else:
|
||||
await nc.close()
|
||||
return ret
|
||||
|
||||
def salt_api_async(self, **kwargs):
|
||||
|
||||
json = {
|
||||
"client": "local_async",
|
||||
"tgt": self.salt_id,
|
||||
"fun": kwargs["func"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
return resp
|
||||
else:
|
||||
await nc.publish(self.agent_id, msgpack.dumps(data))
|
||||
await nc.flush()
|
||||
await nc.close()
|
||||
|
||||
@staticmethod
|
||||
def serialize(agent):
|
||||
@@ -504,92 +458,9 @@ class Agent(BaseAuditModel):
|
||||
|
||||
ret = AgentEditSerializer(agent).data
|
||||
del ret["all_timezones"]
|
||||
del ret["client"]
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def salt_batch_async(**kwargs):
|
||||
assert isinstance(kwargs["minions"], list)
|
||||
|
||||
json = {
|
||||
"client": "local_async",
|
||||
"tgt_type": "list",
|
||||
"tgt": kwargs["minions"],
|
||||
"fun": kwargs["func"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
return resp
|
||||
|
||||
def schedule_reboot(self, obj):
|
||||
|
||||
start_date = dt.datetime.strftime(obj, "%Y-%m-%d")
|
||||
start_time = dt.datetime.strftime(obj, "%H:%M")
|
||||
|
||||
# let windows task scheduler automatically delete the task after it runs
|
||||
end_obj = obj + dt.timedelta(minutes=15)
|
||||
end_date = dt.datetime.strftime(end_obj, "%Y-%m-%d")
|
||||
end_time = dt.datetime.strftime(end_obj, "%H:%M")
|
||||
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
r = self.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Windows\\System32\\shutdown.exe"',
|
||||
'arguments="/r /t 5 /f"',
|
||||
"trigger_type=Once",
|
||||
f'start_date="{start_date}"',
|
||||
f'start_time="{start_time}"',
|
||||
f'end_date="{end_date}"',
|
||||
f'end_time="{end_time}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"delete_after=Immediately",
|
||||
],
|
||||
)
|
||||
|
||||
if r == "error" or (isinstance(r, bool) and not r):
|
||||
return "failed"
|
||||
elif r == "timeout":
|
||||
return "timeout"
|
||||
elif isinstance(r, bool) and r:
|
||||
from logs.models import PendingAction
|
||||
|
||||
details = {
|
||||
"taskname": task_name,
|
||||
"time": str(obj),
|
||||
}
|
||||
PendingAction(agent=self, action_type="schedreboot", details=details).save()
|
||||
|
||||
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
return {"msg": {"time": nice_time, "agent": self.hostname}}
|
||||
else:
|
||||
return "failed"
|
||||
|
||||
def not_supported(self, version_added):
|
||||
if pyver.parse(self.version) < pyver.parse(version_added):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
pks = [] # list of pks to delete
|
||||
@@ -642,6 +513,13 @@ class Agent(BaseAuditModel):
|
||||
elif action.details["action"] == "taskdelete":
|
||||
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
||||
|
||||
# for clearing duplicate pending actions on agent
|
||||
def remove_matching_pending_task_actions(self, task_id):
|
||||
# remove any other pending actions on agent with same task_id
|
||||
for action in self.pendingactions.exclude(status="completed"):
|
||||
if action.details["task_id"] == task_id:
|
||||
action.delete()
|
||||
|
||||
|
||||
class AgentOutage(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
@@ -667,10 +545,10 @@ class AgentOutage(models.Model):
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
f"{self.agent.client}, {self.agent.site}, {self.agent.hostname} - data overdue",
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue",
|
||||
(
|
||||
f"Data has not been received from client {self.agent.client}, "
|
||||
f"site {self.agent.site}, "
|
||||
f"Data has not been received from client {self.agent.client.name}, "
|
||||
f"site {self.agent.site.name}, "
|
||||
f"agent {self.agent.hostname} "
|
||||
"within the expected time."
|
||||
),
|
||||
@@ -681,10 +559,10 @@ class AgentOutage(models.Model):
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_mail(
|
||||
f"{self.agent.client}, {self.agent.site}, {self.agent.hostname} - data received",
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received",
|
||||
(
|
||||
f"Data has been received from client {self.agent.client}, "
|
||||
f"site {self.agent.site}, "
|
||||
f"Data has been received from client {self.agent.client.name}, "
|
||||
f"site {self.agent.site.name}, "
|
||||
f"agent {self.agent.hostname} "
|
||||
"after an interruption in data transmission."
|
||||
),
|
||||
@@ -695,7 +573,7 @@ class AgentOutage(models.Model):
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
f"{self.agent.client}, {self.agent.site}, {self.agent.hostname} - data overdue"
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue"
|
||||
)
|
||||
|
||||
def send_recovery_sms(self):
|
||||
@@ -703,7 +581,7 @@ class AgentOutage(models.Model):
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
f"{self.agent.client}, {self.agent.site}, {self.agent.hostname} - data received"
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received"
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
@@ -714,6 +592,8 @@ RECOVERY_CHOICES = [
|
||||
("salt", "Salt"),
|
||||
("mesh", "Mesh"),
|
||||
("command", "Command"),
|
||||
("rpc", "Nats RPC"),
|
||||
("checkrunner", "Checkrunner"),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import pytz
|
||||
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import ReadOnlyField
|
||||
|
||||
from .models import Agent, Note
|
||||
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from clients.serializers import ClientSerializer
|
||||
|
||||
|
||||
class AgentSerializer(serializers.ModelSerializer):
|
||||
@@ -19,6 +21,8 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
checks = serializers.ReadOnlyField()
|
||||
timezone = serializers.ReadOnlyField()
|
||||
all_timezones = serializers.SerializerMethodField()
|
||||
client_name = serializers.ReadOnlyField(source="client.name")
|
||||
site_name = serializers.ReadOnlyField(source="site.name")
|
||||
|
||||
def get_all_timezones(self, obj):
|
||||
return pytz.all_timezones
|
||||
@@ -30,19 +34,44 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
class AgentOverdueActionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = ["pk", "overdue_email_alert", "overdue_text_alert"]
|
||||
|
||||
|
||||
class AgentTableSerializer(serializers.ModelSerializer):
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
pending_actions = serializers.SerializerMethodField()
|
||||
status = serializers.ReadOnlyField()
|
||||
checks = serializers.ReadOnlyField()
|
||||
last_seen = serializers.SerializerMethodField()
|
||||
client_name = serializers.ReadOnlyField(source="client.name")
|
||||
site_name = serializers.ReadOnlyField(source="site.name")
|
||||
logged_username = serializers.SerializerMethodField()
|
||||
italic = serializers.SerializerMethodField()
|
||||
|
||||
def get_last_seen(self, obj):
|
||||
def get_pending_actions(self, obj):
|
||||
return obj.pendingactions.filter(status="pending").count()
|
||||
|
||||
def get_last_seen(self, obj) -> str:
|
||||
if obj.time_zone is not None:
|
||||
agent_tz = pytz.timezone(obj.time_zone)
|
||||
else:
|
||||
agent_tz = self.context["default_tz"]
|
||||
|
||||
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S")
|
||||
return obj.last_seen.astimezone(agent_tz).timestamp()
|
||||
|
||||
def get_logged_username(self, obj) -> str:
|
||||
if obj.logged_in_username == "None" and obj.status == "online":
|
||||
return obj.last_logged_in_user
|
||||
elif obj.logged_in_username != "None":
|
||||
return obj.logged_in_username
|
||||
else:
|
||||
return "-"
|
||||
|
||||
def get_italic(self, obj) -> bool:
|
||||
return obj.logged_in_username == "None" and obj.status == "online"
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
@@ -50,27 +79,30 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"id",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"client",
|
||||
"site",
|
||||
"site_name",
|
||||
"client_name",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"patches_pending",
|
||||
"pending_actions",
|
||||
"status",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"checks",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"maintenance_mode",
|
||||
"logged_username",
|
||||
"italic",
|
||||
]
|
||||
depth = 2
|
||||
|
||||
|
||||
class AgentEditSerializer(serializers.ModelSerializer):
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
all_timezones = serializers.SerializerMethodField()
|
||||
client = ClientSerializer(read_only=True)
|
||||
|
||||
def get_all_timezones(self, obj):
|
||||
return pytz.all_timezones
|
||||
@@ -107,6 +139,9 @@ class WinAgentSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class AgentHostnameSerializer(serializers.ModelSerializer):
|
||||
client = serializers.ReadOnlyField(source="client.name")
|
||||
site = serializers.ReadOnlyField(source="site.name")
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = (
|
||||
|
||||
@@ -1,234 +1,116 @@
|
||||
import asyncio
|
||||
from loguru import logger
|
||||
from time import sleep
|
||||
import random
|
||||
import requests
|
||||
from packaging import version as pyver
|
||||
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from scripts.models import Script
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
from agents.models import Agent, AgentOutage
|
||||
from core.models import CoreSettings
|
||||
from logs.models import PendingAction
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
OLD_64_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2.exe"
|
||||
OLD_32_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2-x86.exe"
|
||||
|
||||
def agent_update(pk: int) -> str:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(f"Unable to determine arch on {agent.hostname}. Skipping.")
|
||||
return "noarch"
|
||||
|
||||
@app.task
|
||||
def send_agent_update_task(pks, version):
|
||||
assert isinstance(pks, list)
|
||||
# removed sqlite in 1.4.0 to get rid of cgo dependency
|
||||
# 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.3.0"):
|
||||
version = settings.LATEST_AGENT_VER
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
else:
|
||||
version = "1.3.0"
|
||||
inno = (
|
||||
"winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe"
|
||||
)
|
||||
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
|
||||
|
||||
q = Agent.objects.filter(pk__in=pks)
|
||||
agents = [i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)]
|
||||
if agent.has_nats:
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
action = agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
if pyver.parse(action.details["version"]) < pyver.parse(version):
|
||||
action.delete()
|
||||
else:
|
||||
return "pending"
|
||||
|
||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
continue
|
||||
|
||||
# golang agent only backwards compatible with py agent 0.11.2
|
||||
# force an upgrade to the latest python agent if version < 0.11.2
|
||||
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
|
||||
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
|
||||
inno = (
|
||||
"winagent-v0.11.2.exe"
|
||||
if agent.arch == "64"
|
||||
else "winagent-v0.11.2-x86.exe"
|
||||
)
|
||||
else:
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
)
|
||||
sleep(10)
|
||||
else:
|
||||
nats_data = {
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
}
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
return "created"
|
||||
else:
|
||||
logger.warning(
|
||||
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to update."
|
||||
)
|
||||
|
||||
return "not supported"
|
||||
|
||||
|
||||
@app.task
|
||||
def auto_self_agent_update_task(test=False):
|
||||
def send_agent_update_task(pks: List[int], version: str) -> None:
|
||||
q = Agent.objects.filter(pk__in=pks)
|
||||
agents: List[int] = [
|
||||
i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)
|
||||
]
|
||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
|
||||
@app.task
|
||||
def auto_self_agent_update_task() -> None:
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.agent_auto_update:
|
||||
return
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
agents = [
|
||||
pks: List[int] = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
|
||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
continue
|
||||
|
||||
# golang agent only backwards compatible with py agent 0.11.2
|
||||
# force an upgrade to the latest python agent if version < 0.11.2
|
||||
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
|
||||
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
|
||||
inno = (
|
||||
"winagent-v0.11.2.exe"
|
||||
if agent.arch == "64"
|
||||
else "winagent-v0.11.2-x86.exe"
|
||||
)
|
||||
else:
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
if not test:
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def update_salt_minion_task():
|
||||
q = Agent.objects.all()
|
||||
agents = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) >= pyver.parse("0.11.0")
|
||||
and pyver.parse(i.salt_ver) < pyver.parse(settings.LATEST_SALT_VER)
|
||||
]
|
||||
|
||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_async(func="win_agent.update_salt")
|
||||
sleep(20)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_detail_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_async(timeout=30, func="win_agent.local_sys_info")
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def sync_salt_modules_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules")
|
||||
# successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]}
|
||||
# successful sync with no new/changed files: {'return': [{'MINION-15': []}]}
|
||||
if r == "timeout" or r == "error":
|
||||
return f"Unable to sync modules {agent.salt_id}"
|
||||
|
||||
return f"Successfully synced salt modules on {agent.hostname}"
|
||||
|
||||
|
||||
@app.task
|
||||
def batch_sync_modules_task():
|
||||
# sync modules, split into chunks of 50 agents to not overload salt
|
||||
agents = Agent.objects.all()
|
||||
online = [i.salt_id for i in agents if i.status == "online"]
|
||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||
for chunk in chunks:
|
||||
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def batch_sysinfo_task():
|
||||
# update system info using WMI
|
||||
agents = Agent.objects.all()
|
||||
online = [
|
||||
i.salt_id
|
||||
for i in agents
|
||||
if not i.not_supported("0.11.0") and i.status == "online"
|
||||
]
|
||||
chunks = (online[i : i + 30] for i in range(0, len(online), 30))
|
||||
for chunk in chunks:
|
||||
Agent.salt_batch_async(minions=chunk, func="win_agent.local_sys_info")
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def uninstall_agent_task(salt_id):
|
||||
attempts = 0
|
||||
error = False
|
||||
|
||||
while 1:
|
||||
try:
|
||||
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "local",
|
||||
"tgt": salt_id,
|
||||
"fun": "win_agent.uninstall_agent",
|
||||
"timeout": 8,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=10,
|
||||
)
|
||||
ret = r.json()["return"][0][salt_id]
|
||||
except Exception:
|
||||
attempts += 1
|
||||
else:
|
||||
if ret != "ok":
|
||||
attempts += 1
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts >= 10:
|
||||
error = True
|
||||
break
|
||||
elif attempts == 0:
|
||||
break
|
||||
|
||||
if error:
|
||||
logger.error(f"{salt_id} uninstall failed")
|
||||
else:
|
||||
logger.info(f"{salt_id} was successfully uninstalled")
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "wheel",
|
||||
"fun": "key.delete",
|
||||
"match": salt_id,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
logger.error(f"{salt_id} unable to remove salt-key")
|
||||
|
||||
return "ok"
|
||||
agent_update(pk)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -269,19 +151,104 @@ def agent_recovery_sms_task(pk):
|
||||
|
||||
@app.task
|
||||
def agent_outages_task():
|
||||
agents = Agent.objects.only("pk")
|
||||
agents = Agent.objects.only(
|
||||
"pk", "last_seen", "overdue_time", "overdue_email_alert", "overdue_text_alert"
|
||||
)
|
||||
|
||||
for agent in agents:
|
||||
if agent.status == "overdue":
|
||||
outages = AgentOutage.objects.filter(agent=agent)
|
||||
if outages and outages.last().is_active:
|
||||
continue
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
outages = AgentOutage.objects.filter(agent=agent)
|
||||
if outages and outages.last().is_active:
|
||||
continue
|
||||
|
||||
outage = AgentOutage(agent=agent)
|
||||
outage.save()
|
||||
outage = AgentOutage(agent=agent)
|
||||
outage.save()
|
||||
|
||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||
agent_outage_email_task.delay(pk=outage.pk)
|
||||
# add a null check history to allow gaps in graph
|
||||
for check in agent.agentchecks.all():
|
||||
check.add_check_history(None)
|
||||
|
||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||
agent_outage_sms_task.delay(pk=outage.pk)
|
||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||
agent_outage_email_task.delay(pk=outage.pk)
|
||||
|
||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||
agent_outage_sms_task.delay(pk=outage.pk)
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_agent_recovery_task(pk: int) -> None:
|
||||
sleep(10)
|
||||
from agents.models import RecoveryAction
|
||||
|
||||
action = RecoveryAction.objects.get(pk=pk)
|
||||
if action.mode == "command":
|
||||
data = {"func": "recoverycmd", "recoverycommand": action.command}
|
||||
else:
|
||||
data = {"func": "recover", "payload": {"mode": action.mode}}
|
||||
|
||||
asyncio.run(action.agent.nats_cmd(data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def run_script_email_results_task(
|
||||
agentpk: int, scriptpk: int, nats_timeout: int, nats_data: dict, emails: List[str]
|
||||
):
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
nats_data["func"] = "runscriptfull"
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=nats_timeout))
|
||||
if r == "timeout":
|
||||
logger.error(f"{agent.hostname} timed out running script.")
|
||||
return
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
subject = f"{agent.hostname} {script.name} Results"
|
||||
exec_time = "{:.4f}".format(r["execution_time"])
|
||||
body = (
|
||||
subject
|
||||
+ f"\nReturn code: {r['retcode']}\nExecution time: {exec_time} seconds\nStdout: {r['stdout']}\nStderr: {r['stderr']}"
|
||||
)
|
||||
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
|
||||
msg = EmailMessage()
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = CORE.smtp_from_email
|
||||
|
||||
if emails:
|
||||
msg["To"] = ", ".join(emails)
|
||||
else:
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||
|
||||
msg.set_content(body)
|
||||
|
||||
try:
|
||||
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||
if CORE.smtp_requires_auth:
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
else:
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
@app.task
|
||||
def remove_salt_task() -> None:
|
||||
if hasattr(settings, "KEEP_SALT") and settings.KEEP_SALT:
|
||||
return
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
agents = [i for i in q if pyver.parse(i.version) >= pyver.parse("1.3.0")]
|
||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||
for chunk in chunks:
|
||||
for agent in chunk:
|
||||
asyncio.run(agent.nats_cmd({"func": "removesalt"}, wait=False))
|
||||
sleep(0.1)
|
||||
sleep(4)
|
||||
|
||||
@@ -5,21 +5,35 @@ from unittest.mock import patch
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
from django.test import TestCase, override_settings
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from logs.models import PendingAction
|
||||
|
||||
from tacticalrmm.test import BaseTestCase, TacticalTestCase
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import AgentSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from .models import Agent
|
||||
from .tasks import auto_self_agent_update_task, OLD_64_PY_AGENT, OLD_32_PY_AGENT
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
|
||||
|
||||
class TestAgentViews(BaseTestCase):
|
||||
class TestAgentViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
client = baker.make("clients.Client", name="Google")
|
||||
site = baker.make("clients.Site", client=client, name="LA Office")
|
||||
self.agent = baker.make_recipe(
|
||||
"agents.online_agent", site=site, version="1.1.1"
|
||||
)
|
||||
baker.make_recipe("winupdate.winupdate_policy", agent=self.agent)
|
||||
|
||||
def test_get_patch_policy(self):
|
||||
# make sure get_patch_policy doesn't error out when agent has policy with
|
||||
# an empty patch policy
|
||||
self.agent.policy = self.policy
|
||||
policy = baker.make("automation.Policy")
|
||||
self.agent.policy = policy
|
||||
self.agent.save(update_fields=["policy"])
|
||||
_ = self.agent.get_patch_policy()
|
||||
|
||||
@@ -30,8 +44,8 @@ class TestAgentViews(BaseTestCase):
|
||||
self.agent.policy = None
|
||||
self.agent.save(update_fields=["policy"])
|
||||
|
||||
self.coresettings.server_policy = self.policy
|
||||
self.coresettings.workstation_policy = self.policy
|
||||
self.coresettings.server_policy = policy
|
||||
self.coresettings.workstation_policy = policy
|
||||
self.coresettings.save(update_fields=["server_policy", "workstation_policy"])
|
||||
_ = self.agent.get_patch_policy()
|
||||
|
||||
@@ -59,29 +73,29 @@ class TestAgentViews(BaseTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_ping(self, mock_ret):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_ping(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/ping/"
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||
self.assertEqual(r.json(), ret)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "natsdown"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||
self.assertEqual(r.json(), ret)
|
||||
|
||||
mock_ret.return_value = True
|
||||
nats_cmd.return_value = "pong"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "online"}
|
||||
self.assertEqual(r.json(), ret)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "asdasjdaksdasd"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||
@@ -89,35 +103,29 @@ class TestAgentViews(BaseTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
||||
def test_uninstall(self, mock_task):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.views.reload_nats")
|
||||
def test_uninstall(self, reload_nats, nats_cmd):
|
||||
url = "/agents/uninstall/"
|
||||
data = {"pk": self.agent.pk}
|
||||
|
||||
r = self.client.delete(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_task.assert_called_with(self.agent.salt_id)
|
||||
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
|
||||
reload_nats.assert_called_once()
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
||||
def test_uninstall_catch_no_user(self, mock_task):
|
||||
url = "/agents/uninstall/"
|
||||
data = {"pk": self.agent.pk}
|
||||
|
||||
self.agent_user.delete()
|
||||
|
||||
r = self.client.delete(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_task.assert_called_with(self.agent.salt_id)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_processes(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/getprocs/"
|
||||
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
|
||||
url_old = f"/agents/{agent_old.pk}/getprocs/"
|
||||
r = self.client.get(url_old)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
||||
url = f"/agents/{agent.pk}/getprocs/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/procs.json")
|
||||
@@ -127,90 +135,93 @@ class TestAgentViews(BaseTestCase):
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
assert any(i["name"] == "Registry" for i in mock_ret.return_value)
|
||||
assert any(
|
||||
i["memory_percent"] == 0.004843281375620747 for i in mock_ret.return_value
|
||||
)
|
||||
assert any(i["membytes"] == 434655234324 for i in mock_ret.return_value)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_kill_proc(self, mock_ret):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_kill_proc(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/8234/killproc/"
|
||||
|
||||
mock_ret.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "process doesn't exist"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_get_event_log(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/30/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/eventlograw.json")
|
||||
) as f:
|
||||
mock_ret.return_value = json.load(f)
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_event_log(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/22/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
|
||||
) as f:
|
||||
decoded = json.load(f)
|
||||
nats_cmd.return_value = json.load(f)
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(decoded, r.json())
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "eventlog",
|
||||
"timeout": 30,
|
||||
"payload": {
|
||||
"logname": "Application",
|
||||
"days": str(22),
|
||||
},
|
||||
},
|
||||
timeout=32,
|
||||
)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Security/6/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "eventlog",
|
||||
"timeout": 180,
|
||||
"payload": {
|
||||
"logname": "Security",
|
||||
"days": str(6),
|
||||
},
|
||||
},
|
||||
timeout=182,
|
||||
)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_power_action(self, mock_ret):
|
||||
url = f"/agents/poweraction/"
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_reboot_now(self, nats_cmd):
|
||||
url = f"/agents/reboot/"
|
||||
|
||||
data = {"pk": self.agent.pk, "action": "rebootnow"}
|
||||
mock_ret.return_value = True
|
||||
data = {"pk": self.agent.pk}
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "rebootnow"}, timeout=10)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_send_raw_cmd(self, mock_ret):
|
||||
url = f"/agents/sendrawcmd/"
|
||||
|
||||
@@ -229,36 +240,39 @@ class TestAgentViews(BaseTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_reboot_later(self, mock_ret):
|
||||
url = f"/agents/rebootlater/"
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_reboot_later(self, nats_cmd):
|
||||
url = f"/agents/reboot/"
|
||||
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"datetime": "2025-08-29 18:41",
|
||||
}
|
||||
|
||||
mock_ret.return_value = True
|
||||
r = self.client.post(url, data, format="json")
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
|
||||
self.assertEqual(r.data["agent"], self.agent.hostname)
|
||||
|
||||
mock_ret.return_value = "failed"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"trigger": "once",
|
||||
"name": r.data["task_name"],
|
||||
"year": 2025,
|
||||
"month": "August",
|
||||
"day": 29,
|
||||
"hour": 18,
|
||||
"min": 41,
|
||||
},
|
||||
}
|
||||
nats_cmd.assert_called_with(nats_data, timeout=10)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "error creating task"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@@ -266,21 +280,22 @@ class TestAgentViews(BaseTestCase):
|
||||
"pk": self.agent.pk,
|
||||
"datetime": "rm -rf /",
|
||||
}
|
||||
r = self.client.post(url, data_invalid, format="json")
|
||||
r = self.client.patch(url, data_invalid, format="json")
|
||||
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "Invalid date")
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("os.path.exists")
|
||||
@patch("subprocess.run")
|
||||
def test_install_agent(self, mock_subprocess, mock_file_exists):
|
||||
url = f"/agents/installagent/"
|
||||
|
||||
site = baker.make("clients.Site")
|
||||
data = {
|
||||
"client": "Google",
|
||||
"site": "LA Office",
|
||||
"client": site.client.id,
|
||||
"site": site.id,
|
||||
"arch": "64",
|
||||
"expires": 23,
|
||||
"installMethod": "exe",
|
||||
@@ -338,7 +353,7 @@ class TestAgentViews(BaseTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data["mode"] = "salt"
|
||||
data["mode"] = "mesh"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("pending", r.json())
|
||||
@@ -358,7 +373,7 @@ class TestAgentViews(BaseTestCase):
|
||||
|
||||
self.agent.version = "0.9.4"
|
||||
self.agent.save(update_fields=["version"])
|
||||
data["mode"] = "salt"
|
||||
data["mode"] = "mesh"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("0.9.5", r.json())
|
||||
@@ -382,12 +397,14 @@ class TestAgentViews(BaseTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_edit_agent(self):
|
||||
# setup data
|
||||
site = baker.make("clients.Site", name="Ny Office")
|
||||
|
||||
url = "/agents/editagent/"
|
||||
|
||||
edit = {
|
||||
"id": self.agent.pk,
|
||||
"client": "Facebook",
|
||||
"site": "NY Office",
|
||||
"site": site.id,
|
||||
"monitoring_type": "workstation",
|
||||
"description": "asjdk234andasd",
|
||||
"overdue_time": 300,
|
||||
@@ -417,7 +434,7 @@ class TestAgentViews(BaseTestCase):
|
||||
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
data = AgentSerializer(agent).data
|
||||
self.assertEqual(data["site"], "NY Office")
|
||||
self.assertEqual(data["site"], site.id)
|
||||
|
||||
policy = WinUpdatePolicy.objects.get(agent=self.agent)
|
||||
data = WinUpdatePolicySerializer(policy).data
|
||||
@@ -438,9 +455,18 @@ class TestAgentViews(BaseTestCase):
|
||||
self.assertIn("&viewmode=13", r.data["file"])
|
||||
self.assertIn("&viewmode=12", r.data["terminal"])
|
||||
self.assertIn("&viewmode=11", r.data["control"])
|
||||
self.assertIn("mstsc.html?login=", r.data["webrdp"])
|
||||
|
||||
self.assertIn("&gotonode=", r.data["file"])
|
||||
self.assertIn("&gotonode=", r.data["terminal"])
|
||||
self.assertIn("&gotonode=", r.data["control"])
|
||||
|
||||
self.assertIn("?login=", r.data["file"])
|
||||
self.assertIn("?login=", r.data["terminal"])
|
||||
self.assertIn("?login=", r.data["control"])
|
||||
|
||||
self.assertEqual(self.agent.hostname, r.data["hostname"])
|
||||
self.assertEqual(self.agent.client.name, r.data["client"])
|
||||
self.assertEqual(self.agent.site.name, r.data["site"])
|
||||
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -451,70 +477,48 @@ class TestAgentViews(BaseTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_by_client(self):
|
||||
url = "/agents/byclient/Google/"
|
||||
url = f"/agents/byclient/{self.agent.client.id}/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(r.data)
|
||||
|
||||
url = f"/agents/byclient/Majh3 Akj34 ad/"
|
||||
url = f"/agents/byclient/500/"
|
||||
r = self.client.get(url)
|
||||
self.assertFalse(r.data) # returns empty list
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_by_site(self):
|
||||
url = f"/agents/bysite/Google/Main Office/"
|
||||
url = f"/agents/bysite/{self.agent.site.id}/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(r.data)
|
||||
|
||||
url = f"/agents/bysite/Google/Ajdaksd Office/"
|
||||
url = f"/agents/bysite/500/"
|
||||
r = self.client.get(url)
|
||||
self.assertFalse(r.data)
|
||||
self.assertEqual(r.data, [])
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_overdue_action(self):
|
||||
url = "/agents/overdueaction/"
|
||||
|
||||
payload = {"pk": self.agent.pk, "alertType": "email", "action": "enabled"}
|
||||
payload = {"pk": self.agent.pk, "overdue_email_alert": True}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "email", "action": "disabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "enabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "disabled"})
|
||||
payload = {"pk": self.agent.pk, "overdue_text_alert": False}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "email", "action": "523423"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
payload.update({"alertType": "text", "action": "asdasd3434asdasd"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_list_agents_no_detail(self):
|
||||
@@ -535,15 +539,18 @@ class TestAgentViews(BaseTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||
""" @patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_script_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_command_task.delay")
|
||||
@patch("agents.models.Agent.salt_batch_async")
|
||||
def test_bulk_cmd_script(self, mock_ret, mock_update):
|
||||
def test_bulk_cmd_script(
|
||||
self, salt_batch_async, bulk_command, bulk_script, mock_update
|
||||
):
|
||||
url = "/agents/bulk/"
|
||||
|
||||
mock_ret.return_value = "ok"
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "all",
|
||||
"target": "agents",
|
||||
"client": None,
|
||||
"site": None,
|
||||
@@ -556,10 +563,12 @@ class TestAgentViews(BaseTestCase):
|
||||
}
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "servers",
|
||||
"target": "agents",
|
||||
"client": None,
|
||||
"site": None,
|
||||
@@ -574,12 +583,11 @@ class TestAgentViews(BaseTestCase):
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "workstations",
|
||||
"target": "client",
|
||||
"client": "Google",
|
||||
"client": self.agent.client.id,
|
||||
"site": None,
|
||||
"agentPKs": [
|
||||
self.agent.pk,
|
||||
],
|
||||
"agentPKs": [],
|
||||
"cmd": "gpupdate /force",
|
||||
"timeout": 300,
|
||||
"shell": "cmd",
|
||||
@@ -587,12 +595,14 @@ class TestAgentViews(BaseTestCase):
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "all",
|
||||
"target": "client",
|
||||
"client": "Google",
|
||||
"site": "Main Office",
|
||||
"client": self.agent.client.id,
|
||||
"site": self.agent.site.id,
|
||||
"agentPKs": [
|
||||
self.agent.pk,
|
||||
],
|
||||
@@ -603,31 +613,11 @@ class TestAgentViews(BaseTestCase):
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"target": "site",
|
||||
"client": "A ASJDHkjASHDASD",
|
||||
"site": "asdasdasdasda",
|
||||
"agentPKs": [
|
||||
self.agent.pk,
|
||||
],
|
||||
"cmd": "gpupdate /force",
|
||||
"timeout": 300,
|
||||
"shell": "cmd",
|
||||
}
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
payload["client"] = "Google"
|
||||
payload["site"] = "Main Office"
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
|
||||
payload = {
|
||||
"mode": "scan",
|
||||
"monType": "all",
|
||||
"target": "agents",
|
||||
"client": None,
|
||||
"site": None,
|
||||
@@ -635,20 +625,21 @@ class TestAgentViews(BaseTestCase):
|
||||
self.agent.pk,
|
||||
],
|
||||
}
|
||||
mock_ret.return_value = "ok"
|
||||
r = self.client.post(url, payload, format="json")
|
||||
mock_update.assert_called_once()
|
||||
mock_update.assert_called_with(minions=[self.agent.salt_id])
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload = {
|
||||
"mode": "install",
|
||||
"monType": "all",
|
||||
"target": "client",
|
||||
"client": "Google",
|
||||
"client": self.agent.client.id,
|
||||
"site": None,
|
||||
"agentPKs": [
|
||||
self.agent.pk,
|
||||
],
|
||||
}
|
||||
salt_batch_async.return_value = "ok"
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -662,43 +653,20 @@ class TestAgentViews(BaseTestCase):
|
||||
|
||||
# TODO mock the script
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("post", url) """
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_restart_mesh(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/restartmesh/"
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = True
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_recover_mesh(self, mock_ret):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_recover_mesh(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/recovermesh/"
|
||||
mock_ret.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn(self.agent.hostname, r.data)
|
||||
nats_cmd.assert_called_with(
|
||||
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=45
|
||||
)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@@ -712,7 +680,6 @@ class TestAgentViews(BaseTestCase):
|
||||
class TestAgentViewsNew(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_agent_counts(self):
|
||||
url = "/agents/agent_counts/"
|
||||
@@ -748,13 +715,13 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
|
||||
def test_agent_maintenance_mode(self):
|
||||
url = "/agents/maintenance/"
|
||||
# create data
|
||||
client = baker.make("clients.Client", client="Default")
|
||||
site = baker.make("clients.Site", client=client, site="Site")
|
||||
agent = baker.make_recipe("agents.agent", client=client.client, site=site.site)
|
||||
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
|
||||
# Test client toggle maintenance mode
|
||||
data = {"type": "Client", "id": client.id, "action": True}
|
||||
data = {"type": "Client", "id": site.client.id, "action": True}
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
@@ -782,8 +749,91 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async")
|
||||
def test_auto_self_agent_update_task(self, salt_api_async):
|
||||
|
||||
class TestAgentTasks(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_agent_update(self, nats_cmd):
|
||||
from agents.tasks import agent_update
|
||||
|
||||
agent_noarch = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Error getting OS",
|
||||
version="1.1.11",
|
||||
)
|
||||
r = agent_update(agent_noarch.pk)
|
||||
self.assertEqual(r, "noarch")
|
||||
self.assertEqual(
|
||||
PendingAction.objects.filter(
|
||||
agent=agent_noarch, action_type="agentupdate"
|
||||
).count(),
|
||||
0,
|
||||
)
|
||||
|
||||
agent64_111 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.11",
|
||||
)
|
||||
|
||||
r = agent_update(agent64_111.pk)
|
||||
self.assertEqual(r, "created")
|
||||
action = PendingAction.objects.get(agent__pk=agent64_111.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
self.assertEqual(
|
||||
action.details["url"],
|
||||
"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
)
|
||||
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
|
||||
self.assertEqual(action.details["version"], "1.3.0")
|
||||
|
||||
agent_64_130 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.3.0",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent_64_130.pk)
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": settings.DL_64,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
|
||||
agent64_old = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.2.1",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent64_old.pk)
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
"version": "1.3.0",
|
||||
"inno": "winagent-v1.3.0.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
|
||||
""" @patch("agents.models.Agent.salt_api_async")
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
|
||||
# test 64bit golang agent
|
||||
self.agent64 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
@@ -791,7 +841,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
version="1.0.0",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s(test=True).apply()
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
@@ -810,7 +860,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
version="1.0.0",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s(test=True).apply()
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
@@ -828,7 +878,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
operating_system=None,
|
||||
version="1.0.0",
|
||||
)
|
||||
ret = auto_self_agent_update_task.s(test=True).apply()
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_not_called()
|
||||
self.agentNone.delete()
|
||||
salt_api_async.reset_mock()
|
||||
@@ -841,7 +891,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
)
|
||||
self.coresettings.agent_auto_update = False
|
||||
self.coresettings.save(update_fields=["agent_auto_update"])
|
||||
ret = auto_self_agent_update_task.s(test=True).apply()
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_not_called()
|
||||
|
||||
# reset core settings
|
||||
@@ -857,7 +907,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
version="0.11.1",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s(test=True).apply()
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
@@ -876,7 +926,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
version="0.11.1",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s(test=True).apply()
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
@@ -884,4 +934,4 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
"url": OLD_32_PY_AGENT,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.assertEqual(ret.status, "SUCCESS") """
|
||||
|
||||
@@ -5,14 +5,13 @@ urlpatterns = [
|
||||
path("listagents/", views.AgentsTableList.as_view()),
|
||||
path("listagentsnodetail/", views.list_agents_no_detail),
|
||||
path("<int:pk>/agenteditdetails/", views.agent_edit_details),
|
||||
path("byclient/<client>/", views.by_client),
|
||||
path("bysite/<client>/<site>/", views.by_site),
|
||||
path("byclient/<int:clientpk>/", views.by_client),
|
||||
path("bysite/<int:sitepk>/", views.by_site),
|
||||
path("overdueaction/", views.overdue_action),
|
||||
path("sendrawcmd/", views.send_raw_cmd),
|
||||
path("<pk>/agentdetail/", views.agent_detail),
|
||||
path("<int:pk>/meshcentral/", views.meshcentral),
|
||||
path("<str:arch>/getmeshexe/", views.get_mesh_exe),
|
||||
path("poweraction/", views.power_action),
|
||||
path("uninstall/", views.uninstall),
|
||||
path("editagent/", views.edit_agent),
|
||||
path("<pk>/geteventlog/<logtype>/<days>/", views.get_event_log),
|
||||
@@ -20,16 +19,16 @@ urlpatterns = [
|
||||
path("updateagents/", views.update_agents),
|
||||
path("<pk>/getprocs/", views.get_processes),
|
||||
path("<pk>/<pid>/killproc/", views.kill_proc),
|
||||
path("rebootlater/", views.reboot_later),
|
||||
path("reboot/", views.Reboot.as_view()),
|
||||
path("installagent/", views.install_agent),
|
||||
path("<int:pk>/ping/", views.ping),
|
||||
path("recover/", views.recover),
|
||||
path("runscript/", views.run_script),
|
||||
path("<int:pk>/restartmesh/", views.restart_mesh),
|
||||
path("<int:pk>/recovermesh/", views.recover_mesh),
|
||||
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
|
||||
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
|
||||
path("bulk/", views.bulk),
|
||||
path("agent_counts/", views.agent_counts),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import asyncio
|
||||
from loguru import logger
|
||||
import os
|
||||
import subprocess
|
||||
import zlib
|
||||
import json
|
||||
import base64
|
||||
import pytz
|
||||
import random
|
||||
import string
|
||||
import datetime as dt
|
||||
from packaging import version as pyver
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -18,12 +19,9 @@ from rest_framework.response import Response
|
||||
from rest_framework import status, generics
|
||||
|
||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from clients.models import Client, Site
|
||||
from accounts.models import User
|
||||
from core.models import CoreSettings
|
||||
from scripts.models import Script
|
||||
from logs.models import AuditLog
|
||||
from logs.models import AuditLog, PendingAction
|
||||
|
||||
from .serializers import (
|
||||
AgentSerializer,
|
||||
@@ -32,14 +30,18 @@ from .serializers import (
|
||||
AgentEditSerializer,
|
||||
NoteSerializer,
|
||||
NotesSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
)
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .tasks import uninstall_agent_task, send_agent_update_task
|
||||
from winupdate.tasks import bulk_check_for_updates_task
|
||||
from scripts.tasks import run_script_bg_task, run_bulk_script_task
|
||||
from .tasks import (
|
||||
send_agent_update_task,
|
||||
run_script_email_results_task,
|
||||
)
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
from tacticalrmm.utils import notify_error, reload_nats
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -66,50 +68,48 @@ def update_agents(request):
|
||||
@api_view()
|
||||
def ping(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=5, func="test.ping")
|
||||
status = "offline"
|
||||
if agent.has_nats:
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
return Response({"name": agent.hostname, "status": "offline"})
|
||||
|
||||
if isinstance(r, bool) and r:
|
||||
return Response({"name": agent.hostname, "status": "online"})
|
||||
else:
|
||||
return Response({"name": agent.hostname, "status": "offline"})
|
||||
return Response({"name": agent.hostname, "status": status})
|
||||
|
||||
|
||||
@api_view(["DELETE"])
|
||||
def uninstall(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
# just in case agent-user gets deleted accidentaly from django-admin
|
||||
# we can still remove the agent
|
||||
try:
|
||||
user = User.objects.get(username=agent.agent_id)
|
||||
user.delete()
|
||||
except Exception as e:
|
||||
logger.warning(e)
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
|
||||
salt_id = agent.salt_id
|
||||
name = agent.hostname
|
||||
agent.delete()
|
||||
|
||||
uninstall_agent_task.delay(salt_id)
|
||||
reload_nats()
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
old_site = agent.site.pk
|
||||
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer.is_valid(raise_exception=True)
|
||||
a_serializer.save()
|
||||
|
||||
policy = WinUpdatePolicy.objects.get(agent=agent)
|
||||
policy = agent.winupdatepolicy.get()
|
||||
p_serializer = WinUpdatePolicySerializer(
|
||||
instance=policy, data=request.data["winupdatepolicy"][0]
|
||||
)
|
||||
p_serializer.is_valid(raise_exception=True)
|
||||
p_serializer.save()
|
||||
|
||||
# check if site changed and initiate generating correct policies
|
||||
if old_site != request.data["site"]:
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -125,16 +125,9 @@ def meshcentral(request, pk):
|
||||
if token == "err":
|
||||
return notify_error("Invalid mesh token")
|
||||
|
||||
control = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
)
|
||||
terminal = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
)
|
||||
file = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
)
|
||||
webrdp = f"{core.mesh_site}/mstsc.html?login={token}&node={agent.mesh_node_id}"
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
|
||||
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname)
|
||||
|
||||
@@ -143,8 +136,9 @@ def meshcentral(request, pk):
|
||||
"control": control,
|
||||
"terminal": terminal,
|
||||
"file": file,
|
||||
"webrdp": webrdp,
|
||||
"status": agent.status,
|
||||
"client": agent.client.name,
|
||||
"site": agent.site.name,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
@@ -158,28 +152,29 @@ def agent_detail(request, pk):
|
||||
@api_view()
|
||||
def get_processes(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=20, func="win_agent.get_procs")
|
||||
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
|
||||
return notify_error("Requires agent version 1.2.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view()
|
||||
def kill_proc(request, pk, pid):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=25, func="ps.kill_pid", arg=int(pid))
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(
|
||||
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
||||
)
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
if isinstance(r, bool) and not r:
|
||||
return notify_error("Unable to kill the process")
|
||||
elif r != "ok":
|
||||
return notify_error(r)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -187,55 +182,42 @@ def kill_proc(request, pk, pid):
|
||||
@api_view()
|
||||
def get_event_log(request, pk, logtype, days):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=30,
|
||||
func="win_agent.get_eventlog",
|
||||
arg=[logtype, int(days)],
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = 180 if logtype == "Security" else 30
|
||||
data = {
|
||||
"func": "eventlog",
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"logname": logtype,
|
||||
"days": str(days),
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(json.loads(zlib.decompress(base64.b64decode(r["wineventlog"]))))
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def power_action(request):
|
||||
pk = request.data["pk"]
|
||||
action = request.data["action"]
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if action == "rebootnow":
|
||||
logger.info(f"{agent.hostname} was scheduled for immediate reboot")
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=30,
|
||||
func="system.reboot",
|
||||
arg=3,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response("ok")
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def send_raw_cmd(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=request.data["timeout"],
|
||||
func="cmd.run",
|
||||
kwargs={
|
||||
"cmd": request.data["cmd"],
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = int(request.data["timeout"])
|
||||
data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"command": request.data["cmd"],
|
||||
"shell": request.data["shell"],
|
||||
"timeout": request.data["timeout"],
|
||||
},
|
||||
)
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error" or not r:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
AuditLog.audit_raw_command(
|
||||
username=request.user.username,
|
||||
@@ -244,29 +226,31 @@ def send_raw_cmd(request):
|
||||
shell=request.data["shell"],
|
||||
)
|
||||
|
||||
logger.info(f"The command {request.data['cmd']} was sent on agent {agent.hostname}")
|
||||
return Response(r)
|
||||
|
||||
|
||||
class AgentsTableList(generics.ListAPIView):
|
||||
queryset = Agent.objects.prefetch_related("agentchecks").only(
|
||||
"pk",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"client",
|
||||
"site",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_time",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"time_zone",
|
||||
"maintenance_mode",
|
||||
queryset = (
|
||||
Agent.objects.select_related("site")
|
||||
.prefetch_related("agentchecks")
|
||||
.only(
|
||||
"pk",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"site",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_time",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"time_zone",
|
||||
"maintenance_mode",
|
||||
)
|
||||
)
|
||||
serializer_class = AgentTableSerializer
|
||||
|
||||
@@ -281,7 +265,7 @@ class AgentsTableList(generics.ListAPIView):
|
||||
|
||||
@api_view()
|
||||
def list_agents_no_detail(request):
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.select_related("site").only("pk", "hostname", "site")
|
||||
return Response(AgentHostnameSerializer(agents, many=True).data)
|
||||
|
||||
|
||||
@@ -292,15 +276,15 @@ def agent_edit_details(request, pk):
|
||||
|
||||
|
||||
@api_view()
|
||||
def by_client(request, client):
|
||||
def by_client(request, clientpk):
|
||||
agents = (
|
||||
Agent.objects.filter(client=client)
|
||||
Agent.objects.select_related("site")
|
||||
.filter(site__client_id=clientpk)
|
||||
.prefetch_related("agentchecks")
|
||||
.only(
|
||||
"pk",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"client",
|
||||
"site",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
@@ -321,15 +305,15 @@ def by_client(request, client):
|
||||
|
||||
|
||||
@api_view()
|
||||
def by_site(request, client, site):
|
||||
def by_site(request, sitepk):
|
||||
agents = (
|
||||
Agent.objects.filter(client=client, site=site)
|
||||
Agent.objects.filter(site_id=sitepk)
|
||||
.select_related("site")
|
||||
.prefetch_related("agentchecks")
|
||||
.only(
|
||||
"pk",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"client",
|
||||
"site",
|
||||
"monitoring_type",
|
||||
"description",
|
||||
@@ -351,55 +335,80 @@ def by_site(request, client, site):
|
||||
|
||||
@api_view(["POST"])
|
||||
def overdue_action(request):
|
||||
pk = request.data["pk"]
|
||||
alert_type = request.data["alertType"]
|
||||
action = request.data["action"]
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if alert_type == "email" and action == "enabled":
|
||||
agent.overdue_email_alert = True
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "email" and action == "disabled":
|
||||
agent.overdue_email_alert = False
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "text" and action == "enabled":
|
||||
agent.overdue_text_alert = True
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
elif alert_type == "text" and action == "disabled":
|
||||
agent.overdue_text_alert = False
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Something went wrong"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
serializer = AgentOverdueActionSerializer(
|
||||
instance=agent, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response(agent.hostname)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def reboot_later(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
date_time = request.data["datetime"]
|
||||
class Reboot(APIView):
|
||||
# reboot now
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
try:
|
||||
obj = dt.datetime.strptime(date_time, "%Y-%m-%d %H:%M")
|
||||
except Exception:
|
||||
return notify_error("Invalid date")
|
||||
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
r = agent.schedule_reboot(obj)
|
||||
return Response("ok")
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "failed":
|
||||
return notify_error("Something went wrong")
|
||||
# reboot later
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
return Response(r["msg"])
|
||||
try:
|
||||
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
|
||||
except Exception:
|
||||
return notify_error("Invalid date")
|
||||
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"trigger": "once",
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(obj, "%Y")),
|
||||
"month": dt.datetime.strftime(obj, "%B"),
|
||||
"day": int(dt.datetime.strftime(obj, "%d")),
|
||||
"hour": int(dt.datetime.strftime(obj, "%H")),
|
||||
"min": int(dt.datetime.strftime(obj, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error(r)
|
||||
|
||||
details = {"taskname": task_name, "time": str(obj)}
|
||||
PendingAction.objects.create(
|
||||
agent=agent, action_type="schedreboot", details=details
|
||||
)
|
||||
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
return Response(
|
||||
{"time": nice_time, "agent": agent.hostname, "task_name": task_name}
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def install_agent(request):
|
||||
from knox.models import AuthToken
|
||||
|
||||
client = get_object_or_404(Client, client=request.data["client"])
|
||||
site = get_object_or_404(Site, client=client, site=request.data["site"])
|
||||
client_id = request.data["client"]
|
||||
site_id = request.data["site"]
|
||||
version = settings.LATEST_AGENT_VER
|
||||
arch = request.data["arch"]
|
||||
|
||||
@@ -452,10 +461,10 @@ def install_agent(request):
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"build",
|
||||
f"-ldflags=\"-X 'main.Inno={inno}'",
|
||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
||||
f"-X 'main.Api={api}'",
|
||||
f"-X 'main.Client={client.pk}'",
|
||||
f"-X 'main.Site={site.pk}'",
|
||||
f"-X 'main.Client={client_id}'",
|
||||
f"-X 'main.Site={site_id}'",
|
||||
f"-X 'main.Atype={atype}'",
|
||||
f"-X 'main.Rdp={rdp}'",
|
||||
f"-X 'main.Ping={ping}'",
|
||||
@@ -552,7 +561,7 @@ def install_agent(request):
|
||||
"&&",
|
||||
"timeout",
|
||||
"/t",
|
||||
"20",
|
||||
"10",
|
||||
"/nobreak",
|
||||
">",
|
||||
"NUL",
|
||||
@@ -563,9 +572,9 @@ def install_agent(request):
|
||||
"--api",
|
||||
request.data["api"],
|
||||
"--client-id",
|
||||
client.pk,
|
||||
client_id,
|
||||
"--site-id",
|
||||
site.pk,
|
||||
site_id,
|
||||
"--agent-type",
|
||||
request.data["agenttype"],
|
||||
"--auth",
|
||||
@@ -582,8 +591,6 @@ def install_agent(request):
|
||||
resp = {
|
||||
"cmd": " ".join(str(i) for i in cmd),
|
||||
"url": download_url,
|
||||
"salt64": settings.SALT_64,
|
||||
"salt32": settings.SALT_32,
|
||||
}
|
||||
|
||||
return Response(resp)
|
||||
@@ -597,8 +604,8 @@ def install_agent(request):
|
||||
|
||||
replace_dict = {
|
||||
"innosetupchange": inno,
|
||||
"clientchange": str(client.pk),
|
||||
"sitechange": str(site.pk),
|
||||
"clientchange": str(client_id),
|
||||
"sitechange": str(site_id),
|
||||
"apichange": request.data["api"],
|
||||
"atypechange": request.data["agenttype"],
|
||||
"powerchange": str(request.data["power"]),
|
||||
@@ -638,35 +645,55 @@ def install_agent(request):
|
||||
@api_view(["POST"])
|
||||
def recover(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
mode = request.data["mode"]
|
||||
|
||||
if pyver.parse(agent.version) <= pyver.parse("0.9.5"):
|
||||
return notify_error("Only available in agent version greater than 0.9.5")
|
||||
|
||||
if not agent.has_nats:
|
||||
if mode == "tacagent" or mode == "rpc":
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
||||
if agent.has_nats:
|
||||
if mode == "tacagent" or mode == "mesh":
|
||||
data = {"func": "recover", "payload": {"mode": mode}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||
if r == "ok":
|
||||
return Response("Successfully completed recovery")
|
||||
|
||||
if agent.recoveryactions.filter(last_run=None).exists():
|
||||
return notify_error(
|
||||
"A recovery action is currently pending. Please wait for the next agent check-in."
|
||||
)
|
||||
|
||||
if request.data["mode"] == "command" and not request.data["cmd"]:
|
||||
if mode == "command" and not request.data["cmd"]:
|
||||
return notify_error("Command is required")
|
||||
|
||||
# if we've made it this far and realtime recovery didn't work,
|
||||
# tacagent service is the fallback recovery so we obv can't use that to recover itself if it's down
|
||||
if mode == "tacagent":
|
||||
return notify_error(
|
||||
"Requires RPC service to be functional. Please recover that first"
|
||||
)
|
||||
|
||||
# we should only get here if all other methods fail
|
||||
RecoveryAction(
|
||||
agent=agent,
|
||||
mode=request.data["mode"],
|
||||
command=request.data["cmd"] if request.data["mode"] == "command" else None,
|
||||
mode=mode,
|
||||
command=request.data["cmd"] if mode == "command" else None,
|
||||
).save()
|
||||
|
||||
return Response(f"Recovery will be attempted on the agent's next check-in")
|
||||
return Response("Recovery will be attempted on the agent's next check-in")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def run_script(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
|
||||
output = request.data["output"]
|
||||
args = request.data["args"]
|
||||
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
@@ -675,75 +702,48 @@ def run_script(request):
|
||||
script=script.name,
|
||||
)
|
||||
|
||||
data = {
|
||||
"func": "runscript",
|
||||
"timeout": request.data["timeout"],
|
||||
"script_args": request.data["args"],
|
||||
"payload": {
|
||||
"code": script.code,
|
||||
"shell": script.shell,
|
||||
},
|
||||
}
|
||||
|
||||
if output == "wait":
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=req_timeout,
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
"shell": script.shell,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": args,
|
||||
},
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=req_timeout))
|
||||
return Response(r)
|
||||
elif output == "email":
|
||||
if not pyver.parse(agent.version) >= pyver.parse("1.1.12"):
|
||||
return notify_error("Requires agent version 1.1.12 or greater")
|
||||
|
||||
emails = (
|
||||
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||
)
|
||||
run_script_email_results_task.delay(
|
||||
agentpk=agent.pk,
|
||||
scriptpk=script.pk,
|
||||
nats_timeout=req_timeout,
|
||||
nats_data=data,
|
||||
emails=emails,
|
||||
)
|
||||
|
||||
if isinstance(r, dict):
|
||||
if r["stdout"]:
|
||||
return Response(r["stdout"])
|
||||
elif r["stderr"]:
|
||||
return Response(r["stderr"])
|
||||
else:
|
||||
try:
|
||||
r["retcode"]
|
||||
except KeyError:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
return Response(f"Return code: {r['retcode']}")
|
||||
|
||||
else:
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
else:
|
||||
return notify_error(str(r))
|
||||
|
||||
else:
|
||||
data = {
|
||||
"agentpk": agent.pk,
|
||||
"scriptpk": script.pk,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": args,
|
||||
}
|
||||
run_script_bg_task.delay(data)
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
def restart_mesh(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(func="service.restart", arg="mesh agent", timeout=30)
|
||||
if r == "timeout" or r == "error":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif isinstance(r, bool) and r:
|
||||
return Response(f"Restarted Mesh Agent on {agent.hostname}")
|
||||
else:
|
||||
return notify_error(f"Failed to restart the Mesh Agent on {agent.hostname}")
|
||||
asyncio.run(agent.nats_cmd(data, wait=False))
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
def recover_mesh(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=60,
|
||||
func="cmd.run",
|
||||
kwargs={
|
||||
"cmd": r'"C:\\Program Files\\TacticalAgent\\tacticalrmm.exe" -m recovermesh',
|
||||
"timeout": 55,
|
||||
},
|
||||
)
|
||||
if r == "timeout" or r == "error":
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
data = {"func": "recover", "payload": {"mode": "mesh"}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=45))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(f"Repaired mesh agent on {agent.hostname}")
|
||||
@@ -807,76 +807,46 @@ def bulk(request):
|
||||
return notify_error("Must select at least 1 agent")
|
||||
|
||||
if request.data["target"] == "client":
|
||||
client = get_object_or_404(Client, client=request.data["client"])
|
||||
agents = Agent.objects.filter(client=client.client)
|
||||
q = Agent.objects.filter(site__client_id=request.data["client"])
|
||||
elif request.data["target"] == "site":
|
||||
client = get_object_or_404(Client, client=request.data["client"])
|
||||
site = (
|
||||
Site.objects.filter(client=client).filter(site=request.data["site"]).get()
|
||||
)
|
||||
agents = Agent.objects.filter(client=client.client).filter(site=site.site)
|
||||
q = Agent.objects.filter(site_id=request.data["site"])
|
||||
elif request.data["target"] == "agents":
|
||||
agents = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
elif request.data["target"] == "all":
|
||||
agents = Agent.objects.all()
|
||||
q = Agent.objects.only("pk", "monitoring_type")
|
||||
else:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
minions = [agent.salt_id for agent in agents]
|
||||
if request.data["monType"] == "servers":
|
||||
q = q.filter(monitoring_type="server")
|
||||
elif request.data["monType"] == "workstations":
|
||||
q = q.filter(monitoring_type="workstation")
|
||||
|
||||
agents: List[int] = [agent.pk for agent in q]
|
||||
|
||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||
|
||||
if request.data["mode"] == "command":
|
||||
r = Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="cmd.run_bg",
|
||||
kwargs={
|
||||
"cmd": request.data["cmd"],
|
||||
"shell": request.data["shell"],
|
||||
"timeout": request.data["timeout"],
|
||||
},
|
||||
handle_bulk_command_task.delay(
|
||||
agents, request.data["cmd"], request.data["shell"], request.data["timeout"]
|
||||
)
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
return Response(f"Command will now be run on {len(minions)} agents")
|
||||
return Response(f"Command will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "script":
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
|
||||
if script.shell == "python":
|
||||
r = Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
"shell": script.shell,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": request.data["args"],
|
||||
"bg": True,
|
||||
},
|
||||
)
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
else:
|
||||
data = {
|
||||
"minions": minions,
|
||||
"scriptpk": script.pk,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": request.data["args"],
|
||||
}
|
||||
run_bulk_script_task.delay(data)
|
||||
|
||||
return Response(f"{script.name} will now be run on {len(minions)} agents")
|
||||
handle_bulk_script_task.delay(
|
||||
script.pk, agents, request.data["args"], request.data["timeout"]
|
||||
)
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "install":
|
||||
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(minions)} agents"
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["mode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(minions=minions)
|
||||
return Response(f"Patch status scan will now run on {len(minions)} agents")
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
@@ -904,14 +874,12 @@ def agent_counts(request):
|
||||
@api_view(["POST"])
|
||||
def agent_maintenance(request):
|
||||
if request.data["type"] == "Client":
|
||||
client = Client.objects.get(pk=request.data["id"])
|
||||
Agent.objects.filter(client=client.client).update(
|
||||
Agent.objects.filter(site__client_id=request.data["id"]).update(
|
||||
maintenance_mode=request.data["action"]
|
||||
)
|
||||
|
||||
elif request.data["type"] == "Site":
|
||||
site = Site.objects.get(pk=request.data["id"])
|
||||
Agent.objects.filter(client=site.client.client, site=site.site).update(
|
||||
Agent.objects.filter(site_id=request.data["id"]).update(
|
||||
maintenance_mode=request.data["action"]
|
||||
)
|
||||
|
||||
@@ -924,3 +892,15 @@ def agent_maintenance(request):
|
||||
return notify_error("Invalid data")
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WMI(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.1.2"):
|
||||
return notify_error("Requires agent version 1.1.2 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response("ok")
|
||||
|
||||
@@ -7,19 +7,25 @@ import django.db.models.deletion
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0010_auto_20200922_1344'),
|
||||
('alerts', '0002_auto_20200815_1618'),
|
||||
("checks", "0010_auto_20200922_1344"),
|
||||
("alerts", "0002_auto_20200815_1618"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='assigned_check',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='checks.check'),
|
||||
model_name="alert",
|
||||
name="assigned_check",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="alert",
|
||||
to="checks.check",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alert',
|
||||
name='alert_time',
|
||||
model_name="alert",
|
||||
name="alert_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -37,7 +37,7 @@ class Alert(models.Model):
|
||||
@classmethod
|
||||
def create_availability_alert(cls, agent):
|
||||
pass
|
||||
|
||||
|
||||
@classmethod
|
||||
def create_check_alert(cls, check):
|
||||
pass
|
||||
pass
|
||||
|
||||
@@ -16,4 +16,4 @@ class AlertSerializer(ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Alert
|
||||
fields = "__all__"
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = "api"
|
||||
@@ -1,11 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
from apiv3 import views as v3_views
|
||||
|
||||
urlpatterns = [
|
||||
path("triggerpatchscan/", views.trigger_patch_scan),
|
||||
path("<int:pk>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<int:pk>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("<int:pk>/saltinfo/", views.SaltInfo.as_view()),
|
||||
path("<int:pk>/meshinfo/", v3_views.MeshInfo.as_view()),
|
||||
]
|
||||
@@ -1,149 +0,0 @@
|
||||
from loguru import logger
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.decorators import (
|
||||
api_view,
|
||||
authentication_classes,
|
||||
permission_classes,
|
||||
)
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
from winupdate.tasks import check_for_updates_task
|
||||
|
||||
from autotasks.serializers import TaskRunnerGetSerializer, TaskRunnerPatchSerializer
|
||||
from checks.serializers import CheckRunnerGetSerializer, CheckResultsSerializer
|
||||
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
@authentication_classes((TokenAuthentication,))
|
||||
@permission_classes((IsAuthenticated,))
|
||||
def trigger_patch_scan(request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
reboot_policy = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
else:
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
else:
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
"""
|
||||
For windows agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
checks = Check.objects.filter(agent__pk=pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request, pk):
|
||||
check = get_object_or_404(Check, pk=pk)
|
||||
|
||||
if check.check_type != "cpuload" and check.check_type != "memory":
|
||||
serializer = CheckResultsSerializer(
|
||||
instance=check, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
|
||||
else:
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
|
||||
check.handle_check(request.data)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class TaskRunner(APIView):
|
||||
"""
|
||||
For the windows python agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
return Response(TaskRunnerGetSerializer(task).data)
|
||||
|
||||
def patch(self, request, pk):
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
serializer = TaskRunnerPatchSerializer(
|
||||
instance=task, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SaltInfo(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
ret = {
|
||||
"latestVer": settings.LATEST_SALT_VER,
|
||||
"currentVer": agent.salt_ver,
|
||||
"salt_id": agent.salt_id,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.salt_ver = request.data["ver"]
|
||||
agent.save(update_fields=["salt_ver"])
|
||||
return Response("ok")
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class Apiv2Config(AppConfig):
|
||||
name = 'apiv2'
|
||||
@@ -1,35 +0,0 @@
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
class TestAPIv2(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
self.agent_setup()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_sync_modules(self, mock_ret):
|
||||
url = "/api/v2/saltminion/"
|
||||
payload = {"agent_id": self.agent.agent_id}
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = []
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Modules are already in sync")
|
||||
|
||||
mock_ret.return_value = ["modules.win_agent"]
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
||||
|
||||
mock_ret.return_value = ["askdjaskdjasd", "modules.win_agent"]
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
from apiv3 import views as v3_views
|
||||
|
||||
urlpatterns = [
|
||||
path("newagent/", v3_views.NewAgent.as_view()),
|
||||
path("meshexe/", v3_views.MeshExe.as_view()),
|
||||
path("saltminion/", v3_views.SaltMinion.as_view()),
|
||||
path("<str:agentid>/saltminion/", v3_views.SaltMinion.as_view()),
|
||||
path("sysinfo/", v3_views.SysInfo.as_view()),
|
||||
path("hello/", v3_views.Hello.as_view()),
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
]
|
||||
@@ -1,41 +0,0 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
|
||||
from checks.serializers import CheckRunnerGetSerializerV2
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
"""
|
||||
For the windows python agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializerV2(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request):
|
||||
check = get_object_or_404(Check, pk=request.data["id"])
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
status = check.handle_checkv2(request.data)
|
||||
return Response(status)
|
||||
@@ -2,11 +2,18 @@ import os
|
||||
import json
|
||||
|
||||
from django.conf import settings
|
||||
from tacticalrmm.test import BaseTestCase
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from unittest.mock import patch
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
|
||||
class TestAPIv3(BaseTestCase):
|
||||
class TestAPIv3(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
|
||||
def test_get_checks(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/checkrunner/"
|
||||
|
||||
@@ -19,34 +26,13 @@ class TestAPIv3(BaseTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_salt_minion(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/saltminion/"
|
||||
url2 = f"/api/v2/{self.agent.agent_id}/saltminion/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn("latestVer", r.json().keys())
|
||||
self.assertIn("currentVer", r.json().keys())
|
||||
self.assertIn("salt_id", r.json().keys())
|
||||
self.assertIn("downloadURL", r.json().keys())
|
||||
|
||||
r2 = self.client.get(url2)
|
||||
self.assertEqual(r2.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url2)
|
||||
|
||||
def test_get_mesh_info(self):
|
||||
url = f"/api/v3/{self.agent.pk}/meshinfo/"
|
||||
url2 = f"/api/v1/{self.agent.pk}/meshinfo/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
r = self.client.get(url2)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url2)
|
||||
|
||||
def test_get_winupdater(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/winupdater/"
|
||||
@@ -58,7 +44,7 @@ class TestAPIv3(BaseTestCase):
|
||||
def test_sysinfo(self):
|
||||
# TODO replace this with golang wmi sample data
|
||||
|
||||
url = f"/api/v3/sysinfo/"
|
||||
url = "/api/v3/sysinfo/"
|
||||
with open(
|
||||
os.path.join(
|
||||
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
|
||||
@@ -74,7 +60,7 @@ class TestAPIv3(BaseTestCase):
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_hello_patch(self):
|
||||
url = f"/api/v3/hello/"
|
||||
url = "/api/v3/hello/"
|
||||
payload = {
|
||||
"agent_id": self.agent.agent_id,
|
||||
"logged_in_username": "None",
|
||||
@@ -89,3 +75,12 @@ class TestAPIv3(BaseTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_checkrunner_interval(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
r.json(),
|
||||
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
|
||||
)
|
||||
|
||||
@@ -2,16 +2,18 @@ from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("checkin/", views.CheckIn.as_view()),
|
||||
path("hello/", views.Hello.as_view()),
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("saltminion/", views.SaltMinion.as_view()),
|
||||
path("<str:agentid>/saltminion/", views.SaltMinion.as_view()),
|
||||
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
|
||||
path("meshexe/", views.MeshExe.as_view()),
|
||||
path("sysinfo/", views.SysInfo.as_view()),
|
||||
path("newagent/", views.NewAgent.as_view()),
|
||||
path("winupdater/", views.WinUpdater.as_view()),
|
||||
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
|
||||
path("software/", views.Software.as_view()),
|
||||
path("installer/", views.Installer.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import asyncio
|
||||
import os
|
||||
import requests
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -16,11 +18,10 @@ from rest_framework.authtoken.models import Token
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
from winupdate.models import WinUpdate
|
||||
from accounts.models import User
|
||||
from clients.models import Client, Site
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from checks.serializers import CheckRunnerGetSerializerV3
|
||||
from software.models import InstalledSoftware
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
from winupdate.serializers import ApprovedUpdateSerializer
|
||||
@@ -28,18 +29,110 @@ from winupdate.serializers import ApprovedUpdateSerializer
|
||||
from agents.tasks import (
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
get_wmi_detail_task,
|
||||
sync_salt_modules_task,
|
||||
)
|
||||
from winupdate.tasks import check_for_updates_task
|
||||
from software.tasks import get_installed_software, install_chocolatey
|
||||
from checks.utils import bytes2human
|
||||
from tacticalrmm.utils import notify_error
|
||||
from tacticalrmm.utils import notify_error, reload_nats, filter_software, SoftwareList
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class CheckIn(APIView):
|
||||
"""
|
||||
The agent's checkin endpoint
|
||||
patch: called every 45 to 110 seconds, handles agent updates and recovery
|
||||
put: called every 5 to 10 minutes, handles basic system info
|
||||
post: called once on windows service startup
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.version = request.data["version"]
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["version", "last_seen"])
|
||||
|
||||
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
||||
last_outage = agent.agentoutages.last()
|
||||
last_outage.recovery_time = djangotime.now()
|
||||
last_outage.save(update_fields=["recovery_time"])
|
||||
|
||||
if agent.overdue_email_alert:
|
||||
agent_recovery_email_task.delay(pk=last_outage.pk)
|
||||
if agent.overdue_text_alert:
|
||||
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
||||
|
||||
recovery = agent.recoveryactions.filter(last_run=None).last()
|
||||
if recovery is not None:
|
||||
recovery.last_run = djangotime.now()
|
||||
recovery.save(update_fields=["last_run"])
|
||||
return Response(recovery.send())
|
||||
|
||||
# handle agent update
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
update = agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
update.status = "completed"
|
||||
update.save(update_fields=["status"])
|
||||
return Response(update.details)
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists():
|
||||
agent.handle_pending_actions()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
if "disks" in request.data.keys():
|
||||
disks = request.data["disks"]
|
||||
new = []
|
||||
# python agent
|
||||
if isinstance(disks, dict):
|
||||
for k, v in disks.items():
|
||||
new.append(v)
|
||||
else:
|
||||
# golang agent
|
||||
for disk in disks:
|
||||
tmp = {}
|
||||
for k, v in disk.items():
|
||||
tmp["device"] = disk["device"]
|
||||
tmp["fstype"] = disk["fstype"]
|
||||
tmp["total"] = bytes2human(disk["total"])
|
||||
tmp["used"] = bytes2human(disk["used"])
|
||||
tmp["free"] = bytes2human(disk["free"])
|
||||
tmp["percent"] = int(disk["percent"])
|
||||
new.append(tmp)
|
||||
|
||||
serializer.save(disks=new)
|
||||
return Response("ok")
|
||||
|
||||
if "logged_in_username" in request.data.keys():
|
||||
if request.data["logged_in_username"] != "None":
|
||||
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||
return Response("ok")
|
||||
|
||||
serializer.save()
|
||||
return Response("ok")
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Hello(APIView):
|
||||
#### DEPRECATED, for agents <= 1.1.9 ####
|
||||
"""
|
||||
The agent's checkin endpoint
|
||||
patch: called every 30 to 120 seconds
|
||||
@@ -98,6 +191,17 @@ class Hello(APIView):
|
||||
recovery.save(update_fields=["last_run"])
|
||||
return Response(recovery.send())
|
||||
|
||||
# handle agent update
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
update = agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).last()
|
||||
update.status = "completed"
|
||||
update.save(update_fields=["status"])
|
||||
return Response(update.details)
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists():
|
||||
agent.handle_pending_actions()
|
||||
@@ -110,17 +214,6 @@ class Hello(APIView):
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
|
||||
sync_salt_modules_task.delay(agent.pk)
|
||||
get_installed_software.delay(agent.pk)
|
||||
get_wmi_detail_task.delay(agent.pk)
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
||||
)
|
||||
|
||||
if not agent.choco_installed:
|
||||
install_chocolatey.delay(agent.pk, wait=True)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -134,14 +227,12 @@ class CheckRunner(APIView):
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializerV3(checks, many=True).data,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
@@ -150,9 +241,19 @@ class CheckRunner(APIView):
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
status = check.handle_checkv2(request.data)
|
||||
|
||||
return Response(status)
|
||||
|
||||
|
||||
class CheckRunnerInterval(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
return Response({"agent": agent.pk, "check_interval": agent.check_interval})
|
||||
|
||||
|
||||
class TaskRunner(APIView):
|
||||
"""
|
||||
For the windows golang agent
|
||||
@@ -167,6 +268,8 @@ class TaskRunner(APIView):
|
||||
return Response(TaskGOGetSerializer(task).data)
|
||||
|
||||
def patch(self, request, pk, agentid):
|
||||
from logs.models import AuditLog
|
||||
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
@@ -175,77 +278,17 @@ class TaskRunner(APIView):
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
return Response("ok")
|
||||
|
||||
new_task = AutomatedTask.objects.get(pk=task.pk)
|
||||
AuditLog.objects.create(
|
||||
username=agent.hostname,
|
||||
agent=agent.hostname,
|
||||
object_type="agent",
|
||||
action="task_run",
|
||||
message=f"Scheduled Task {task.name} was run on {agent.hostname}",
|
||||
after_value=AutomatedTask.serialize(new_task),
|
||||
)
|
||||
|
||||
class SaltMinion(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
ret = {
|
||||
"latestVer": settings.LATEST_SALT_VER,
|
||||
"currentVer": agent.salt_ver,
|
||||
"salt_id": agent.salt_id,
|
||||
"downloadURL": agent.winsalt_dl,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def post(self, request):
|
||||
# accept the salt key
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if agent.salt_id != request.data["saltid"]:
|
||||
return notify_error("Salt keys do not match")
|
||||
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "wheel",
|
||||
"fun": "key.accept",
|
||||
"match": request.data["saltid"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
return notify_error("No communication between agent and salt-api")
|
||||
|
||||
try:
|
||||
data = resp.json()["return"][0]["data"]
|
||||
minion = data["return"]["minions"][0]
|
||||
except Exception:
|
||||
return notify_error("Key error")
|
||||
|
||||
if data["success"] and minion == request.data["saltid"]:
|
||||
return Response("Salt key was accepted")
|
||||
else:
|
||||
return notify_error("Not accepted")
|
||||
|
||||
def patch(self, request):
|
||||
# sync modules
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
r = agent.salt_api_cmd(timeout=45, func="saltutil.sync_modules")
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
return notify_error("Failed to sync salt modules")
|
||||
|
||||
if isinstance(r, list) and any("modules" in i for i in r):
|
||||
return Response("Successfully synced salt modules")
|
||||
elif isinstance(r, list) and not r:
|
||||
return Response("Modules are already in sync")
|
||||
else:
|
||||
return notify_error(f"Failed to sync salt modules: {str(r)}")
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.salt_ver = request.data["ver"]
|
||||
agent.save(update_fields=["salt_ver"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -289,6 +332,7 @@ class WinUpdater(APIView):
|
||||
update.installed = True
|
||||
update.save(update_fields=["result", "downloaded", "installed"])
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
# agent calls this after it's finished installing all patches
|
||||
@@ -308,26 +352,13 @@ class WinUpdater(APIView):
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
else:
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
logger.info(
|
||||
f"{agent.hostname} is rebooting after updates were installed."
|
||||
)
|
||||
else:
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -356,7 +387,15 @@ class MeshInfo(APIView):
|
||||
|
||||
def patch(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.mesh_node_id = request.data["nodeidhex"]
|
||||
|
||||
if "nodeidhex" in request.data:
|
||||
# agent <= 1.1.0
|
||||
nodeid = request.data["nodeidhex"]
|
||||
else:
|
||||
# agent >= 1.1.1
|
||||
nodeid = request.data["nodeid"]
|
||||
|
||||
agent.mesh_node_id = nodeid
|
||||
agent.save(update_fields=["mesh_node_id"])
|
||||
return Response("ok")
|
||||
|
||||
@@ -387,31 +426,9 @@ class MeshExe(APIView):
|
||||
|
||||
|
||||
class NewAgent(APIView):
|
||||
""" For the installer """
|
||||
|
||||
def post(self, request):
|
||||
"""
|
||||
Creates and returns the agents auth token
|
||||
which is stored in the agent's local db
|
||||
and used to authenticate every agent request
|
||||
"""
|
||||
from logs.models import AuditLog
|
||||
|
||||
if "agent_id" not in request.data:
|
||||
return notify_error("Invalid payload")
|
||||
|
||||
agentid = request.data["agent_id"]
|
||||
if Agent.objects.filter(agent_id=agentid).exists():
|
||||
return notify_error(
|
||||
"Agent already exists. Remove old agent first if trying to re-install"
|
||||
)
|
||||
|
||||
user = User.objects.create_user(
|
||||
username=agentid, password=User.objects.make_random_password(60)
|
||||
)
|
||||
token = Token.objects.create(user=user)
|
||||
return Response({"token": token.key})
|
||||
|
||||
def patch(self, request):
|
||||
""" Creates the agent """
|
||||
|
||||
if Agent.objects.filter(agent_id=request.data["agent_id"]).exists():
|
||||
@@ -419,14 +436,10 @@ class NewAgent(APIView):
|
||||
"Agent already exists. Remove old agent first if trying to re-install"
|
||||
)
|
||||
|
||||
client = get_object_or_404(Client, pk=int(request.data["client"]))
|
||||
site = get_object_or_404(Site, pk=int(request.data["site"]))
|
||||
|
||||
agent = Agent(
|
||||
agent_id=request.data["agent_id"],
|
||||
hostname=request.data["hostname"],
|
||||
client=client.client,
|
||||
site=site.site,
|
||||
site_id=int(request.data["site"]),
|
||||
monitoring_type=request.data["monitoring_type"],
|
||||
description=request.data["description"],
|
||||
mesh_node_id=request.data["mesh_node_id"],
|
||||
@@ -436,13 +449,78 @@ class NewAgent(APIView):
|
||||
agent.salt_id = f"{agent.hostname}-{agent.pk}"
|
||||
agent.save(update_fields=["salt_id"])
|
||||
|
||||
user = User.objects.create_user(
|
||||
username=request.data["agent_id"],
|
||||
agent=agent,
|
||||
password=User.objects.make_random_password(60),
|
||||
)
|
||||
|
||||
token = Token.objects.create(user=user)
|
||||
|
||||
if agent.monitoring_type == "workstation":
|
||||
WinUpdatePolicy(agent=agent, run_time_days=[5, 6]).save()
|
||||
else:
|
||||
WinUpdatePolicy(agent=agent).save()
|
||||
|
||||
reload_nats()
|
||||
|
||||
# Generate policies for new agent
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
return Response({"pk": agent.pk, "saltid": f"{agent.hostname}-{agent.pk}"})
|
||||
# create agent install audit record
|
||||
AuditLog.objects.create(
|
||||
username=request.user,
|
||||
agent=agent.hostname,
|
||||
object_type="agent",
|
||||
action="agent_install",
|
||||
message=f"{request.user} installed new agent {agent.hostname}",
|
||||
after_value=Agent.serialize(agent),
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"pk": agent.pk,
|
||||
"saltid": f"{agent.hostname}-{agent.pk}",
|
||||
"token": token.key,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Software(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
|
||||
sw = filter_software(raw)
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.first()
|
||||
s.software = sw
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Installer(APIView):
|
||||
def get(self, request):
|
||||
# used to check if token is valid. will return 401 if not
|
||||
return Response("ok")
|
||||
|
||||
def post(self, request):
|
||||
if "version" not in request.data:
|
||||
return notify_error("Invalid data")
|
||||
|
||||
ver = request.data["version"]
|
||||
if pyver.parse(ver) < pyver.parse(settings.LATEST_AGENT_VER):
|
||||
return notify_error(
|
||||
f"Old installer detected (version {ver} ). Latest version is {settings.LATEST_AGENT_VER} Please generate a new installer from the RMM"
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Policy, PolicyExclusions
|
||||
from .models import Policy
|
||||
|
||||
admin.site.register(Policy)
|
||||
admin.site.register(PolicyExclusions)
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-02 19:13
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("automation", "0005_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name="PolicyExclusions",
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,5 @@
|
||||
from django.db import models
|
||||
from agents.models import Agent
|
||||
from clients.models import Site, Client
|
||||
from core.models import CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
@@ -32,16 +31,15 @@ class Policy(BaseAuditModel):
|
||||
|
||||
filtered_agents_pks = Policy.objects.none()
|
||||
|
||||
for site in explicit_sites:
|
||||
if site.client not in explicit_clients:
|
||||
filtered_agents_pks |= Agent.objects.filter(
|
||||
client=site.client.client,
|
||||
site=site.site,
|
||||
monitoring_type=mon_type,
|
||||
).values_list("pk", flat=True)
|
||||
filtered_agents_pks |= Agent.objects.filter(
|
||||
site__in=[
|
||||
site for site in explicit_sites if site.client not in explicit_clients
|
||||
],
|
||||
monitoring_type=mon_type,
|
||||
).values_list("pk", flat=True)
|
||||
|
||||
filtered_agents_pks |= Agent.objects.filter(
|
||||
client__in=[client.client for client in explicit_clients],
|
||||
site__client__in=[client for client in explicit_clients],
|
||||
monitoring_type=mon_type,
|
||||
).values_list("pk", flat=True)
|
||||
|
||||
@@ -59,6 +57,11 @@ class Policy(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
from autotasks.models import AutomatedTask
|
||||
from logs.models import PendingAction
|
||||
|
||||
# List of all tasks to be applied
|
||||
tasks = list()
|
||||
added_task_pks = list()
|
||||
@@ -68,8 +71,8 @@ class Policy(BaseAuditModel):
|
||||
]
|
||||
|
||||
# Get policies applied to agent and agent site and client
|
||||
client = Client.objects.get(client=agent.client)
|
||||
site = Site.objects.filter(client=client).get(site=agent.site)
|
||||
client = agent.client
|
||||
site = agent.site
|
||||
|
||||
default_policy = None
|
||||
client_policy = None
|
||||
@@ -81,7 +84,7 @@ class Policy(BaseAuditModel):
|
||||
default_policy = CoreSettings.objects.first().server_policy
|
||||
client_policy = client.server_policy
|
||||
site_policy = site.server_policy
|
||||
else:
|
||||
elif agent.monitoring_type == "workstation":
|
||||
default_policy = CoreSettings.objects.first().workstation_policy
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
@@ -108,6 +111,33 @@ class Policy(BaseAuditModel):
|
||||
tasks.append(task)
|
||||
added_task_pks.append(task.pk)
|
||||
|
||||
# remove policy tasks from agent not included in policy
|
||||
for task in agent.autotasks.filter(
|
||||
parent_task__in=[
|
||||
taskpk
|
||||
for taskpk in agent_tasks_parent_pks
|
||||
if taskpk not in added_task_pks
|
||||
]
|
||||
):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
|
||||
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
|
||||
for action in agent.pendingactions.exclude(status="completed"):
|
||||
task = AutomatedTask.objects.get(pk=action.details["task_id"])
|
||||
if (
|
||||
task.parent_task in agent_tasks_parent_pks
|
||||
and task.parent_task in added_task_pks
|
||||
):
|
||||
agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=agent,
|
||||
action_type="taskaction",
|
||||
details={"action": "taskcreate", "task_id": task.id},
|
||||
).save()
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
||||
|
||||
@staticmethod
|
||||
@@ -121,8 +151,8 @@ class Policy(BaseAuditModel):
|
||||
]
|
||||
|
||||
# Get policies applied to agent and agent site and client
|
||||
client = Client.objects.get(client=agent.client)
|
||||
site = Site.objects.filter(client=client).get(site=agent.site)
|
||||
client = agent.client
|
||||
site = agent.site
|
||||
|
||||
default_policy = None
|
||||
client_policy = None
|
||||
@@ -133,7 +163,7 @@ class Policy(BaseAuditModel):
|
||||
default_policy = CoreSettings.objects.first().server_policy
|
||||
client_policy = client.server_policy
|
||||
site_policy = site.server_policy
|
||||
else:
|
||||
elif agent.monitoring_type == "workstation":
|
||||
default_policy = CoreSettings.objects.first().workstation_policy
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
@@ -281,6 +311,15 @@ class Policy(BaseAuditModel):
|
||||
+ eventlog_checks
|
||||
)
|
||||
|
||||
# remove policy checks from agent that fell out of policy scope
|
||||
agent.agentchecks.filter(
|
||||
parent_check__in=[
|
||||
checkpk
|
||||
for checkpk in agent_checks_parent_pks
|
||||
if checkpk not in [check.pk for check in final_list]
|
||||
]
|
||||
).delete()
|
||||
|
||||
return [
|
||||
check for check in final_list if check.pk not in agent_checks_parent_pks
|
||||
]
|
||||
@@ -300,11 +339,3 @@ class Policy(BaseAuditModel):
|
||||
if tasks:
|
||||
for task in tasks:
|
||||
task.create_policy_task(agent)
|
||||
|
||||
|
||||
class PolicyExclusions(models.Model):
|
||||
policy = models.ForeignKey(
|
||||
Policy, related_name="exclusions", on_delete=models.CASCADE
|
||||
)
|
||||
agents = models.ManyToManyField(Agent, related_name="policy_exclusions")
|
||||
sites = models.ManyToManyField(Site, related_name="policy_exclusions")
|
||||
|
||||
@@ -5,6 +5,9 @@ from rest_framework.serializers import (
|
||||
ReadOnlyField,
|
||||
)
|
||||
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
|
||||
from .models import Policy
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
@@ -21,11 +24,11 @@ class PolicySerializer(ModelSerializer):
|
||||
|
||||
class PolicyTableSerializer(ModelSerializer):
|
||||
|
||||
server_clients = StringRelatedField(many=True, read_only=True)
|
||||
server_sites = StringRelatedField(many=True, read_only=True)
|
||||
workstation_clients = StringRelatedField(many=True, read_only=True)
|
||||
workstation_sites = StringRelatedField(many=True, read_only=True)
|
||||
agents = StringRelatedField(many=True, read_only=True)
|
||||
server_clients = ClientSerializer(many=True, read_only=True)
|
||||
server_sites = SiteSerializer(many=True, read_only=True)
|
||||
workstation_clients = ClientSerializer(many=True, read_only=True)
|
||||
workstation_sites = SiteSerializer(many=True, read_only=True)
|
||||
agents = AgentHostnameSerializer(many=True, read_only=True)
|
||||
default_server_policy = ReadOnlyField(source="is_default_server_policy")
|
||||
default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy")
|
||||
agents_count = SerializerMethodField(read_only=True)
|
||||
@@ -43,7 +46,7 @@ class PolicyTableSerializer(ModelSerializer):
|
||||
class PolicyOverviewSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = ("pk", "client", "sites", "workstation_policy", "server_policy")
|
||||
fields = ("pk", "name", "sites", "workstation_policy", "server_policy")
|
||||
depth = 2
|
||||
|
||||
|
||||
|
||||
@@ -6,46 +6,46 @@ from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_checks_from_policies_task(
|
||||
###
|
||||
# copies the policy checks to all affected agents
|
||||
#
|
||||
# clear: clears all policy checks first
|
||||
# create_tasks: also create tasks after checks are generated
|
||||
###
|
||||
policypk,
|
||||
clear=False,
|
||||
create_tasks=False,
|
||||
):
|
||||
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
for agent in policy.related_agents():
|
||||
agent.generate_checks_from_policies(clear=clear)
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents()
|
||||
|
||||
for agent in agents:
|
||||
agent.generate_checks_from_policies()
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies(
|
||||
clear=clear,
|
||||
)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_checks_by_location_task(
|
||||
location, mon_type, clear=False, create_tasks=False
|
||||
):
|
||||
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies(clear=clear)
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_all_agent_checks_task(mon_type, clear=False, create_tasks=False):
|
||||
def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
||||
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies(clear=clear)
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
@@ -83,18 +83,32 @@ def update_policy_check_fields_task(checkpk):
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_tasks_from_policies_task(policypk, clear=False):
|
||||
def generate_agent_tasks_from_policies_task(policypk):
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
for agent in policy.related_agents():
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents()
|
||||
|
||||
for agent in agents:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_tasks_by_location_task(location, mon_type, clear=False):
|
||||
def generate_agent_tasks_by_location_task(location, mon_type):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
|
||||
@@ -71,8 +71,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# create policy with tasks and checks
|
||||
policy = baker.make("automation.Policy")
|
||||
checks = self.create_checks(policy=policy)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
self.create_checks(policy=policy)
|
||||
baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
|
||||
# test copy tasks and checks to another policy
|
||||
data = {
|
||||
@@ -121,9 +121,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
mock_checks_task.assert_called_with(
|
||||
policypk=policy.pk, clear=True, create_tasks=True
|
||||
)
|
||||
mock_checks_task.assert_called_with(policypk=policy.pk, create_tasks=True)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@@ -140,8 +138,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
mock_checks_task.assert_called_with(policypk=policy.pk, clear=True)
|
||||
mock_tasks_task.assert_called_with(policypk=policy.pk, clear=True)
|
||||
mock_checks_task.assert_called_with(policypk=policy.pk)
|
||||
mock_tasks_task.assert_called_with(policypk=policy.pk)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@@ -152,7 +150,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# create policy with tasks
|
||||
policy = baker.make("automation.Policy")
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
url = f"/automation/{policy.pk}/policyautomatedtasks/"
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
@@ -202,6 +200,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_policy_overview(self):
|
||||
from clients.models import Client
|
||||
|
||||
url = "/automation/policies/overview/"
|
||||
|
||||
policies = baker.make(
|
||||
@@ -213,7 +213,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
workstation_policy=cycle(policies),
|
||||
_quantity=5,
|
||||
)
|
||||
sites = baker.make(
|
||||
baker.make(
|
||||
"clients.Site",
|
||||
client=cycle(clients),
|
||||
server_policy=cycle(policies),
|
||||
@@ -221,8 +221,9 @@ class TestPolicyViews(TacticalTestCase):
|
||||
_quantity=4,
|
||||
)
|
||||
|
||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=3)
|
||||
baker.make("clients.Site", client=cycle(clients), _quantity=3)
|
||||
resp = self.client.get(url, format="json")
|
||||
clients = Client.objects.all()
|
||||
serializer = PolicyOverviewSerializer(clients, many=True)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
@@ -256,31 +257,31 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# data setup
|
||||
policy = baker.make("automation.Policy")
|
||||
client = baker.make("clients.Client", client="Test Client")
|
||||
site = baker.make("clients.Site", client=client, site="Test Site")
|
||||
agent = baker.make_recipe("agents.agent", client=client.client, site=site.site)
|
||||
client = baker.make("clients.Client")
|
||||
site = baker.make("clients.Site", client=client)
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
|
||||
# test add client to policy data
|
||||
client_server_payload = {
|
||||
"type": "client",
|
||||
"pk": client.pk,
|
||||
"pk": agent.client.pk,
|
||||
"server_policy": policy.pk,
|
||||
}
|
||||
client_workstation_payload = {
|
||||
"type": "client",
|
||||
"pk": client.pk,
|
||||
"pk": agent.client.pk,
|
||||
"workstation_policy": policy.pk,
|
||||
}
|
||||
|
||||
# test add site to policy data
|
||||
site_server_payload = {
|
||||
"type": "site",
|
||||
"pk": site.pk,
|
||||
"pk": agent.site.pk,
|
||||
"server_policy": policy.pk,
|
||||
}
|
||||
site_workstation_payload = {
|
||||
"type": "site",
|
||||
"pk": site.pk,
|
||||
"pk": agent.site.pk,
|
||||
"workstation_policy": policy.pk,
|
||||
}
|
||||
|
||||
@@ -293,9 +294,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"client": client.client},
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -306,9 +306,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"client": client.client},
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -319,9 +318,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"client": site.client.client, "site": site.site},
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -332,9 +330,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"client": site.client.client, "site": site.site},
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -344,7 +341,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_task.assert_called_with(clear=True)
|
||||
mock_checks_task.assert_called()
|
||||
mock_checks_task.reset_mock()
|
||||
|
||||
# Adding the same relations shouldn't trigger mocks
|
||||
@@ -391,9 +388,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"client": client.client},
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -404,9 +400,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"client": client.client},
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -417,9 +412,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"client": site.client.client, "site": site.site},
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -430,9 +424,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"client": site.client.client, "site": site.site},
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
@@ -441,7 +434,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
resp = self.client.post(url, agent_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
# called because the relation changed
|
||||
mock_checks_task.assert_called_with(clear=True)
|
||||
mock_checks_task.assert_called()
|
||||
mock_checks_task.reset_mock()
|
||||
|
||||
# adding the same relations shouldn't trigger mocks
|
||||
@@ -471,14 +464,14 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_relation_by_type(self):
|
||||
def test_get_relation_by_type(self):
|
||||
url = f"/automation/related/"
|
||||
|
||||
# data setup
|
||||
policy = baker.make("automation.Policy")
|
||||
client = baker.make("clients.Client", client="Test Client")
|
||||
site = baker.make("clients.Site", client=client, site="Test Site")
|
||||
agent = baker.make_recipe("agents.agent", client=client.client, site=site.site)
|
||||
client = baker.make("clients.Client", workstation_policy=policy)
|
||||
site = baker.make("clients.Site", server_policy=policy)
|
||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
||||
|
||||
client_payload = {"type": "client", "pk": client.pk}
|
||||
|
||||
@@ -621,43 +614,38 @@ class TestPolicyViews(TacticalTestCase):
|
||||
"reprocess_failed_inherit": True,
|
||||
}
|
||||
|
||||
# create agents in sites
|
||||
clients = baker.make("clients.Client", client=seq("Client"), _quantity=3)
|
||||
sites = baker.make(
|
||||
"clients.Site", client=cycle(clients), site=seq("Site"), _quantity=6
|
||||
)
|
||||
|
||||
clients = baker.make("clients.Client", _quantity=6)
|
||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=10)
|
||||
agents = baker.make_recipe(
|
||||
"agents.agent",
|
||||
client=cycle([x.client for x in clients]),
|
||||
site=cycle([x.site for x in sites]),
|
||||
site=cycle(sites),
|
||||
_quantity=6,
|
||||
)
|
||||
|
||||
# create patch policies
|
||||
patch_policies = baker.make_recipe(
|
||||
baker.make_recipe(
|
||||
"winupdate.winupdate_approve", agent=cycle(agents), _quantity=6
|
||||
)
|
||||
|
||||
# test reset agents in site
|
||||
data = {"client": clients[0].client, "site": "Site0"}
|
||||
data = {"site": sites[0].id}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
agents = Agent.objects.filter(client=clients[0].client, site="Site0")
|
||||
agents = Agent.objects.filter(site=sites[0])
|
||||
|
||||
for agent in agents:
|
||||
for k, v in inherit_fields.items():
|
||||
self.assertEqual(getattr(agent.winupdatepolicy.get(), k), v)
|
||||
|
||||
# test reset agents in client
|
||||
data = {"client": clients[1].client}
|
||||
data = {"client": clients[1].id}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
agents = Agent.objects.filter(client=clients[1].client)
|
||||
agents = Agent.objects.filter(site__client=clients[1])
|
||||
|
||||
for agent in agents:
|
||||
for k, v in inherit_fields.items():
|
||||
@@ -703,40 +691,24 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
def test_policy_related(self):
|
||||
|
||||
# Get Site and Client from an agent in list
|
||||
clients = baker.make("clients.Client", client=seq("Client"), _quantity=5)
|
||||
sites = baker.make(
|
||||
"clients.Site", client=cycle(clients), site=seq("Site"), _quantity=25
|
||||
)
|
||||
clients = baker.make("clients.Client", _quantity=5)
|
||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=25)
|
||||
server_agents = baker.make_recipe(
|
||||
"agents.server_agent",
|
||||
client=cycle([x.client for x in clients]),
|
||||
site=seq("Site"),
|
||||
site=cycle(sites),
|
||||
_quantity=25,
|
||||
)
|
||||
workstation_agents = baker.make_recipe(
|
||||
"agents.workstation_agent",
|
||||
client=cycle([x.client for x in clients]),
|
||||
site=seq("Site"),
|
||||
site=cycle(sites),
|
||||
_quantity=25,
|
||||
)
|
||||
|
||||
server_client = clients[3]
|
||||
server_site = server_client.sites.all()[3]
|
||||
workstation_client = clients[1]
|
||||
workstation_site = server_client.sites.all()[2]
|
||||
server_agent = baker.make_recipe(
|
||||
"agents.server_agent", client=server_client.client, site=server_site.site
|
||||
)
|
||||
workstation_agent = baker.make_recipe(
|
||||
"agents.workstation_agent",
|
||||
client=workstation_client.client,
|
||||
site=workstation_site.site,
|
||||
)
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
|
||||
# Add Client to Policy
|
||||
policy.server_clients.add(server_client)
|
||||
policy.workstation_clients.add(workstation_client)
|
||||
policy.server_clients.add(server_agents[13].client)
|
||||
policy.workstation_clients.add(workstation_agents[15].client)
|
||||
|
||||
resp = self.client.get(
|
||||
f"/automation/policies/{policy.pk}/related/", format="json"
|
||||
@@ -747,19 +719,19 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEquals(len(resp.data["server_sites"]), 5)
|
||||
self.assertEquals(len(resp.data["workstation_clients"]), 1)
|
||||
self.assertEquals(len(resp.data["workstation_sites"]), 5)
|
||||
self.assertEquals(len(resp.data["agents"]), 12)
|
||||
self.assertEquals(len(resp.data["agents"]), 10)
|
||||
|
||||
# Add Site to Policy and the agents and sites length shouldn't change
|
||||
policy.server_sites.add(server_site)
|
||||
policy.workstation_sites.add(workstation_site)
|
||||
policy.server_sites.add(server_agents[13].site)
|
||||
policy.workstation_sites.add(workstation_agents[15].site)
|
||||
self.assertEquals(len(resp.data["server_sites"]), 5)
|
||||
self.assertEquals(len(resp.data["workstation_sites"]), 5)
|
||||
self.assertEquals(len(resp.data["agents"]), 12)
|
||||
self.assertEquals(len(resp.data["agents"]), 10)
|
||||
|
||||
# Add Agent to Policy and the agents length shouldn't change
|
||||
policy.agents.add(server_agent)
|
||||
policy.agents.add(workstation_agent)
|
||||
self.assertEquals(len(resp.data["agents"]), 12)
|
||||
policy.agents.add(server_agents[13])
|
||||
policy.agents.add(workstation_agents[15])
|
||||
self.assertEquals(len(resp.data["agents"]), 10)
|
||||
|
||||
def test_generating_agent_policy_checks(self):
|
||||
from .tasks import generate_agent_checks_from_policies_task
|
||||
@@ -767,12 +739,11 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
checks = self.create_checks(policy=policy)
|
||||
client = baker.make("clients.Client", client="Default")
|
||||
baker.make("clients.Site", client=client, site="Default")
|
||||
agent = baker.make_recipe("agents.agent", policy=policy)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
||||
|
||||
# test policy assigned to agent
|
||||
generate_agent_checks_from_policies_task(policy.id, clear=True)
|
||||
generate_agent_checks_from_policies_task(policy.id)
|
||||
|
||||
# make sure all checks were created. should be 7
|
||||
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
|
||||
@@ -815,9 +786,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
policy = baker.make("automation.Policy", active=True, enforced=True)
|
||||
script = baker.make_recipe("scripts.script")
|
||||
self.create_checks(policy=policy, script=script)
|
||||
client = baker.make("clients.Client", client="Default")
|
||||
baker.make("clients.Site", client=client, site="Default")
|
||||
agent = baker.make_recipe("agents.agent", policy=policy)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
||||
self.create_checks(agent=agent, script=script)
|
||||
|
||||
generate_agent_checks_from_policies_task(policy.id, create_tasks=True)
|
||||
@@ -839,27 +809,19 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.create_checks(policy=policy)
|
||||
clients = baker.make(
|
||||
"clients.Client",
|
||||
client=seq("Default"),
|
||||
_quantity=2,
|
||||
server_policy=policy,
|
||||
workstation_policy=policy,
|
||||
)
|
||||
baker.make(
|
||||
"clients.Site", client=cycle(clients), site=seq("Default"), _quantity=4
|
||||
)
|
||||
server_agent = baker.make_recipe(
|
||||
"agents.server_agent", client="Default1", site="Default1"
|
||||
)
|
||||
workstation_agent = baker.make_recipe(
|
||||
"agents.workstation_agent", client="Default1", site="Default3"
|
||||
)
|
||||
agent1 = baker.make_recipe("agents.agent", client="Default2", site="Default2")
|
||||
agent2 = baker.make_recipe("agents.agent", client="Default2", site="Default4")
|
||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=4)
|
||||
server_agent = baker.make_recipe("agents.server_agent", site=sites[0])
|
||||
workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2])
|
||||
agent1 = baker.make_recipe("agents.server_agent", site=sites[1])
|
||||
agent2 = baker.make_recipe("agents.workstation_agent", site=sites[3])
|
||||
|
||||
generate_agent_checks_by_location_task(
|
||||
{"client": "Default1", "site": "Default1"},
|
||||
{"site_id": sites[0].id},
|
||||
"server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -871,7 +833,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0)
|
||||
|
||||
generate_agent_checks_by_location_task(
|
||||
{"client": "Default1"}, "workstation", clear=True, create_tasks=True
|
||||
{"site__client_id": clients[0].id},
|
||||
"workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
# workstation_agent should now have policy checks and the other agents should not
|
||||
self.assertEqual(
|
||||
@@ -888,42 +852,34 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(policy=policy)
|
||||
clients = baker.make("clients.Client", client=seq("Default"), _quantity=2)
|
||||
baker.make(
|
||||
"clients.Site", client=cycle(clients), site=seq("Default"), _quantity=4
|
||||
|
||||
site = baker.make("clients.Site")
|
||||
server_agents = baker.make_recipe("agents.server_agent", site=site, _quantity=3)
|
||||
workstation_agents = baker.make_recipe(
|
||||
"agents.workstation_agent", site=site, _quantity=4
|
||||
)
|
||||
server_agent = baker.make_recipe(
|
||||
"agents.server_agent", client="Default1", site="Default1"
|
||||
)
|
||||
workstation_agent = baker.make_recipe(
|
||||
"agents.workstation_agent", client="Default1", site="Default3"
|
||||
)
|
||||
agent1 = baker.make_recipe("agents.agent", client="Default2", site="Default2")
|
||||
agent2 = baker.make_recipe("agents.agent", client="Default2", site="Default4")
|
||||
core = CoreSettings.objects.first()
|
||||
core.server_policy = policy
|
||||
core.workstation_policy = policy
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task("server", clear=True, create_tasks=True)
|
||||
generate_all_agent_checks_task("server", create_tasks=True)
|
||||
|
||||
# all servers should have 7 checks
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
|
||||
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 7)
|
||||
self.assertEqual(Agent.objects.get(pk=agent2.id).agentchecks.count(), 0)
|
||||
for agent in server_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||
|
||||
generate_all_agent_checks_task("workstation", clear=True, create_tasks=True)
|
||||
for agent in workstation_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||
|
||||
generate_all_agent_checks_task("workstation", create_tasks=True)
|
||||
|
||||
# all agents should have 7 checks now
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7
|
||||
)
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
|
||||
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 7)
|
||||
self.assertEqual(Agent.objects.get(pk=agent2.id).agentchecks.count(), 7)
|
||||
for agent in server_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||
|
||||
for agent in workstation_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||
|
||||
def test_delete_policy_check(self):
|
||||
from .tasks import delete_policy_check_task
|
||||
@@ -931,11 +887,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(policy=policy)
|
||||
client = baker.make("clients.Client", client="Default", server_policy=policy)
|
||||
baker.make("clients.Site", client=client, site="Default")
|
||||
agent = baker.make_recipe(
|
||||
"agents.server_agent", client="Default", site="Default"
|
||||
)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
# make sure agent has 7 checks
|
||||
@@ -960,11 +913,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(policy=policy)
|
||||
client = baker.make("clients.Client", client="Default", server_policy=policy)
|
||||
baker.make("clients.Site", client=client, site="Default")
|
||||
agent = baker.make_recipe(
|
||||
"agents.server_agent", client="Default", site="Default"
|
||||
)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
# make sure agent has 7 checks
|
||||
@@ -997,13 +946,10 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
tasks = baker.make(
|
||||
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
|
||||
)
|
||||
client = baker.make("clients.Client", client="Default")
|
||||
baker.make("clients.Site", client=client, site="Default")
|
||||
agent = baker.make_recipe(
|
||||
"agents.server_agent", client="Default", site="Default", policy=policy
|
||||
)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||
|
||||
generate_agent_tasks_from_policies_task(policy.id, clear=True)
|
||||
generate_agent_tasks_from_policies_task(policy.id)
|
||||
|
||||
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
|
||||
|
||||
@@ -1027,33 +973,24 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make(
|
||||
baker.make(
|
||||
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
|
||||
)
|
||||
clients = baker.make(
|
||||
"clients.Client",
|
||||
client=seq("Default"),
|
||||
_quantity=2,
|
||||
server_policy=policy,
|
||||
workstation_policy=policy,
|
||||
)
|
||||
baker.make(
|
||||
"clients.Site", client=cycle(clients), site=seq("Default"), _quantity=4
|
||||
)
|
||||
server_agent = baker.make_recipe(
|
||||
"agents.server_agent", client="Default1", site="Default1"
|
||||
)
|
||||
workstation_agent = baker.make_recipe(
|
||||
"agents.workstation_agent", client="Default1", site="Default3"
|
||||
)
|
||||
agent1 = baker.make_recipe("agents.agent", client="Default2", site="Default2")
|
||||
agent2 = baker.make_recipe("agents.agent", client="Default2", site="Default4")
|
||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=4)
|
||||
server_agent = baker.make_recipe("agents.server_agent", site=sites[0])
|
||||
workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2])
|
||||
agent1 = baker.make_recipe("agents.agent", site=sites[1])
|
||||
agent2 = baker.make_recipe("agents.agent", site=sites[3])
|
||||
|
||||
generate_agent_tasks_by_location_task(
|
||||
{"client": "Default1", "site": "Default1"}, "server", clear=True
|
||||
)
|
||||
generate_agent_tasks_by_location_task({"site_id": sites[0].id}, "server")
|
||||
|
||||
# all servers in Default1 and site Default1 should have 3 tasks
|
||||
# all servers in site1 and site2 should have 3 tasks
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 0
|
||||
)
|
||||
@@ -1062,7 +999,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
|
||||
|
||||
generate_agent_tasks_by_location_task(
|
||||
{"client": "Default1"}, "workstation", clear=True
|
||||
{"site__client_id": clients[0].id}, "workstation"
|
||||
)
|
||||
|
||||
# all workstations in Default1 should have 3 tasks
|
||||
@@ -1079,11 +1016,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
client = baker.make("clients.Client", client="Default", server_policy=policy)
|
||||
baker.make("clients.Site", client=client, site="Default")
|
||||
agent = baker.make_recipe(
|
||||
"agents.server_agent", client="Default", site="Default"
|
||||
)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
delete_policy_autotask_task(tasks[0].id)
|
||||
@@ -1103,20 +1037,20 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
for task in tasks:
|
||||
run_win_task.assert_any_call(task.id)
|
||||
|
||||
def test_updated_policy_tasks(self):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_update_policy_tasks(self, nats_cmd):
|
||||
from .tasks import update_policy_task_fields_task
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
nats_cmd.return_value = "ok"
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make(
|
||||
"autotasks.AutomatedTask", enabled=True, policy=policy, _quantity=3
|
||||
)
|
||||
client = baker.make("clients.Client", client="Default", server_policy=policy)
|
||||
baker.make("clients.Site", client=client, site="Default")
|
||||
agent = baker.make_recipe(
|
||||
"agents.server_agent", client="Default", site="Default"
|
||||
)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
tasks[0].enabled = False
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from django.db import DataError
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from rest_framework.views import APIView
|
||||
@@ -12,7 +11,7 @@ from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
|
||||
from clients.serializers import ClientSerializer, TreeSerializer
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
@@ -33,7 +32,6 @@ from .tasks import (
|
||||
generate_agent_checks_from_policies_task,
|
||||
generate_agent_checks_by_location_task,
|
||||
generate_agent_tasks_from_policies_task,
|
||||
generate_agent_tasks_by_location_task,
|
||||
run_win_policy_autotask_task,
|
||||
)
|
||||
|
||||
@@ -85,7 +83,6 @@ class GetUpdateDeletePolicy(APIView):
|
||||
if saved_policy.active != old_active or saved_policy.enforced != old_enforced:
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
policypk=policy.pk,
|
||||
clear=(not saved_policy.active or not saved_policy.enforced),
|
||||
create_tasks=(saved_policy.active != old_active),
|
||||
)
|
||||
|
||||
@@ -95,8 +92,8 @@ class GetUpdateDeletePolicy(APIView):
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
|
||||
# delete all managed policy checks off of agents
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
||||
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk)
|
||||
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk)
|
||||
policy.delete()
|
||||
|
||||
return Response("ok")
|
||||
@@ -172,7 +169,7 @@ class GetRelated(APIView):
|
||||
if site not in policy.server_sites.all():
|
||||
filtered_server_sites.append(site)
|
||||
|
||||
response["server_sites"] = TreeSerializer(
|
||||
response["server_sites"] = SiteSerializer(
|
||||
filtered_server_sites + list(policy.server_sites.all()), many=True
|
||||
).data
|
||||
|
||||
@@ -181,7 +178,7 @@ class GetRelated(APIView):
|
||||
if site not in policy.workstation_sites.all():
|
||||
filtered_workstation_sites.append(site)
|
||||
|
||||
response["workstation_sites"] = TreeSerializer(
|
||||
response["workstation_sites"] = SiteSerializer(
|
||||
filtered_workstation_sites + list(policy.workstation_sites.all()), many=True
|
||||
).data
|
||||
|
||||
@@ -218,9 +215,8 @@ class GetRelated(APIView):
|
||||
client.save()
|
||||
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"client": client.client},
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -236,9 +232,8 @@ class GetRelated(APIView):
|
||||
site.workstation_policy = policy
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"client": site.client.client, "site": site.site},
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -258,9 +253,8 @@ class GetRelated(APIView):
|
||||
client.server_policy = policy
|
||||
client.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"client": client.client},
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -276,9 +270,8 @@ class GetRelated(APIView):
|
||||
site.server_policy = policy
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"client": site.client.client, "site": site.site},
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -296,9 +289,8 @@ class GetRelated(APIView):
|
||||
client.workstation_policy = None
|
||||
client.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"client": client.client},
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -311,9 +303,8 @@ class GetRelated(APIView):
|
||||
site.workstation_policy = None
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"client": site.client.client, "site": site.site},
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -329,9 +320,8 @@ class GetRelated(APIView):
|
||||
client.server_policy = None
|
||||
client.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"client": client.client},
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -343,9 +333,8 @@ class GetRelated(APIView):
|
||||
site.server_policy = None
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"client": site.client.client, "site": site.site},
|
||||
location={"site_id": site.pk},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -360,14 +349,14 @@ class GetRelated(APIView):
|
||||
if not agent.policy or agent.policy and agent.policy.pk != policy.pk:
|
||||
agent.policy = policy
|
||||
agent.save()
|
||||
agent.generate_checks_from_policies(clear=True)
|
||||
agent.generate_tasks_from_policies(clear=True)
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
else:
|
||||
if agent.policy:
|
||||
agent.policy = None
|
||||
agent.save()
|
||||
agent.generate_checks_from_policies(clear=True)
|
||||
agent.generate_tasks_from_policies(clear=True)
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -423,14 +412,16 @@ class UpdatePatchPolicy(APIView):
|
||||
def patch(self, request):
|
||||
|
||||
agents = None
|
||||
if "client" in request.data and "site" in request.data:
|
||||
agents = Agent.objects.filter(
|
||||
client=request.data["client"], site=request.data["site"]
|
||||
if "client" in request.data:
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site__client_id=request.data["client"]
|
||||
)
|
||||
elif "site" in request.data:
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site_id=request.data["site"]
|
||||
)
|
||||
elif "client" in request.data:
|
||||
agents = Agent.objects.filter(client=request.data["client"])
|
||||
else:
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").only("pk")
|
||||
|
||||
for agent in agents:
|
||||
winupdatepolicy = agent.winupdatepolicy.get()
|
||||
|
||||
@@ -7,7 +7,7 @@ class Command(BaseCommand):
|
||||
help = "Checks for orphaned tasks on all agents and removes them"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "last_seen", "overdue_time")
|
||||
online = [i for i in agents if i.status == "online"]
|
||||
for agent in online:
|
||||
remove_orphaned_win_tasks.delay(agent.pk)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-29 09:12
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0008_auto_20201030_1515"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="automatedtask",
|
||||
name="run_time_bit_weekdays",
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
from django.db import migrations
|
||||
from tacticalrmm.utils import get_bit_days
|
||||
|
||||
DAYS_OF_WEEK = {
|
||||
0: "Monday",
|
||||
1: "Tuesday",
|
||||
2: "Wednesday",
|
||||
3: "Thursday",
|
||||
4: "Friday",
|
||||
5: "Saturday",
|
||||
6: "Sunday",
|
||||
}
|
||||
|
||||
|
||||
def migrate_days(apps, schema_editor):
|
||||
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
|
||||
for task in AutomatedTask.objects.exclude(run_time_days__isnull=True).exclude(
|
||||
run_time_days=[]
|
||||
):
|
||||
run_days = [DAYS_OF_WEEK.get(day) for day in task.run_time_days]
|
||||
task.run_time_bit_weekdays = get_bit_days(run_days)
|
||||
task.save(update_fields=["run_time_bit_weekdays"])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0009_automatedtask_run_time_bit_weekdays"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_days),
|
||||
]
|
||||
@@ -1,3 +1,4 @@
|
||||
import pytz
|
||||
import random
|
||||
import string
|
||||
import datetime as dt
|
||||
@@ -5,8 +6,8 @@ import datetime as dt
|
||||
from django.db import models
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models.fields import DateTimeField
|
||||
from automation.models import Policy
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
RUN_TIME_DAY_CHOICES = [
|
||||
(0, "Monday"),
|
||||
@@ -41,7 +42,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
blank=True,
|
||||
)
|
||||
policy = models.ForeignKey(
|
||||
Policy,
|
||||
"automation.Policy",
|
||||
related_name="autotasks",
|
||||
null=True,
|
||||
blank=True,
|
||||
@@ -68,6 +69,8 @@ class AutomatedTask(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
name = models.CharField(max_length=255)
|
||||
run_time_bit_weekdays = models.IntegerField(null=True, blank=True)
|
||||
# run_time_days is deprecated, use bit weekdays
|
||||
run_time_days = ArrayField(
|
||||
models.IntegerField(choices=RUN_TIME_DAY_CHOICES, null=True, blank=True),
|
||||
null=True,
|
||||
@@ -106,21 +109,21 @@ class AutomatedTask(BaseAuditModel):
|
||||
elif self.task_type == "runonce":
|
||||
return f'Run once on {self.run_time_date.strftime("%m/%d/%Y %I:%M%p")}'
|
||||
elif self.task_type == "scheduled":
|
||||
ret = []
|
||||
for i in self.run_time_days:
|
||||
for j in RUN_TIME_DAY_CHOICES:
|
||||
if i in j:
|
||||
ret.append(j[1][0:3])
|
||||
|
||||
run_time_nice = dt.datetime.strptime(
|
||||
self.run_time_minute, "%H:%M"
|
||||
).strftime("%I:%M %p")
|
||||
|
||||
if len(ret) == 7:
|
||||
return f"Every day at {run_time_nice}"
|
||||
else:
|
||||
days = ",".join(ret)
|
||||
return f"{days} at {run_time_nice}"
|
||||
days = bitdays_to_string(self.run_time_bit_weekdays)
|
||||
return f"{days} at {run_time_nice}"
|
||||
|
||||
@property
|
||||
def last_run_as_timezone(self):
|
||||
if self.last_run is not None and self.agent is not None:
|
||||
return self.last_run.astimezone(
|
||||
pytz.timezone(self.agent.timezone)
|
||||
).strftime("%b-%d-%Y - %H:%M")
|
||||
|
||||
return self.last_run
|
||||
|
||||
@staticmethod
|
||||
def generate_task_name():
|
||||
@@ -137,7 +140,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
def create_policy_task(self, agent=None, policy=None):
|
||||
from .tasks import create_win_task_schedule
|
||||
|
||||
# exit is neither are set or if both are set
|
||||
# exit if neither are set or if both are set
|
||||
if not agent and not policy or agent and policy:
|
||||
return
|
||||
|
||||
@@ -159,6 +162,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
name=self.name,
|
||||
run_time_days=self.run_time_days,
|
||||
run_time_minute=self.run_time_minute,
|
||||
run_time_bit_weekdays=self.run_time_bit_weekdays,
|
||||
run_time_date=self.run_time_date,
|
||||
task_type=self.task_type,
|
||||
win_task_name=self.win_task_name,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from .models import AutomatedTask
|
||||
@@ -12,6 +13,7 @@ class TaskSerializer(serializers.ModelSerializer):
|
||||
|
||||
assigned_check = CheckSerializer(read_only=True)
|
||||
schedule = serializers.ReadOnlyField()
|
||||
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
|
||||
@@ -1,52 +1,37 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from loguru import logger
|
||||
from tacticalrmm.celery import app
|
||||
from django.conf import settings
|
||||
import pytz
|
||||
from django.utils import timezone as djangotime
|
||||
from packaging import version as pyver
|
||||
|
||||
from .models import AutomatedTask
|
||||
from logs.models import PendingAction
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
DAYS_OF_WEEK = {
|
||||
0: "Monday",
|
||||
1: "Tuesday",
|
||||
2: "Wednesday",
|
||||
3: "Thursday",
|
||||
4: "Friday",
|
||||
5: "Saturday",
|
||||
6: "Sunday",
|
||||
}
|
||||
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk, pending_action=False):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if task.task_type == "scheduled":
|
||||
run_days = [DAYS_OF_WEEK.get(day) for day in task.run_time_days]
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task.win_task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {task.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Weekly",
|
||||
f'start_time="{task.run_time_minute}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
kwargs={"days_of_week": run_days},
|
||||
)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": task.run_time_bit_weekdays,
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"hour": dt.datetime.strptime(task.run_time_minute, "%H:%M").hour,
|
||||
"min": dt.datetime.strptime(task.run_time_minute, "%H:%M").minute,
|
||||
},
|
||||
}
|
||||
|
||||
elif task.task_type == "runonce":
|
||||
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(task.agent.timezone)
|
||||
task_time_utc = task.run_time_date.replace(tzinfo=agent_tz).astimezone(pytz.utc)
|
||||
@@ -57,47 +42,48 @@ def create_win_task_schedule(pk, pending_action=False):
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
task.save()
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task.win_task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {task.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
f'start_date="{task.run_time_date.strftime("%Y-%m-%d")}"',
|
||||
f'start_time="{task.run_time_date.strftime("%H:%M")}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"start_when_available=True",
|
||||
],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"year": int(dt.datetime.strftime(task.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(task.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(task.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(task.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(task.run_time_date, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if task.remove_if_not_scheduled and pyver.parse(
|
||||
task.agent.version
|
||||
) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif task.task_type == "checkfailure" or task.task_type == "manual":
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task.win_task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {task.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
'start_date="1975-01-01"',
|
||||
'start_time="01:00"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
},
|
||||
}
|
||||
else:
|
||||
return "error"
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
@@ -129,13 +115,16 @@ def create_win_task_schedule(pk, pending_action=False):
|
||||
def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.edit_task",
|
||||
arg=[f"name={task.win_task_name}", f"enabled={action}"],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "enableschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": task.win_task_name,
|
||||
"enabled": action,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data))
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
PendingAction(
|
||||
@@ -150,9 +139,6 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.error(
|
||||
f"Unable to update the scheduled task {task.win_task_name} on {task.agent.hostname}. It will be updated when the agent checks in."
|
||||
)
|
||||
return
|
||||
|
||||
# clear pending action since it was successful
|
||||
@@ -163,7 +149,7 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
|
||||
task.sync_status = "synced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
logger.info(f"{task.agent.hostname} task {task.name} was edited.")
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@@ -171,15 +157,19 @@ def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
def delete_win_task_schedule(pk, pending_action=False):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
r = task.agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.delete_task",
|
||||
arg=[f"name={task.win_task_name}"],
|
||||
)
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task.win_task_name},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
@@ -188,10 +178,7 @@ def delete_win_task_schedule(pk, pending_action=False):
|
||||
task.sync_status = "pendingdeletion"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.error(
|
||||
f"Unable to delete scheduled task {task.win_task_name} on {task.agent.hostname}. It was marked pending deletion and will be removed when the agent checks in."
|
||||
)
|
||||
return
|
||||
return "timeout"
|
||||
|
||||
# complete pending action since it was successful
|
||||
if pending_action:
|
||||
@@ -199,15 +186,17 @@ def delete_win_task_schedule(pk, pending_action=False):
|
||||
pendingaction.status = "completed"
|
||||
pendingaction.save(update_fields=["status"])
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
task.delete()
|
||||
logger.info(f"{task.agent.hostname} task {task.name} was deleted.")
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_win_task(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
r = task.agent.salt_api_async(func="task.run", arg=[f"name={task.win_task_name}"])
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
return "ok"
|
||||
|
||||
|
||||
@@ -219,18 +208,9 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
logger.info(f"Orphaned task cleanup initiated on {agent.hostname}.")
|
||||
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="task.list_tasks",
|
||||
)
|
||||
r = asyncio.run(agent.nats_cmd({"func": "listschedtasks"}, timeout=10))
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
logger.error(
|
||||
f"Unable to clean up scheduled tasks on {agent.hostname}. Agent might be offline"
|
||||
)
|
||||
return "errtimeout"
|
||||
|
||||
if not isinstance(r, list):
|
||||
if not isinstance(r, list) and not r: # empty list
|
||||
logger.error(f"Unable to clean up scheduled tasks on {agent.hostname}: {r}")
|
||||
return "notlist"
|
||||
|
||||
@@ -239,7 +219,8 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
exclude_tasks = (
|
||||
"TacticalRMM_fixmesh",
|
||||
"TacticalRMM_SchedReboot",
|
||||
"TacticalRMM_saltwatchdog", # will be implemented in future
|
||||
"TacticalRMM_sync",
|
||||
"TacticalRMM_agentupdate",
|
||||
)
|
||||
|
||||
for task in r:
|
||||
@@ -249,16 +230,16 @@ def remove_orphaned_win_tasks(agentpk):
|
||||
|
||||
if task.startswith("TacticalRMM_") and task not in agent_task_names:
|
||||
# delete task since it doesn't exist in UI
|
||||
ret = agent.salt_api_cmd(
|
||||
timeout=20,
|
||||
func="task.delete_task",
|
||||
arg=[f"name={task}"],
|
||||
)
|
||||
if isinstance(ret, bool) and ret is True:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
else:
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task},
|
||||
}
|
||||
ret = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if ret != "ok":
|
||||
logger.error(
|
||||
f"Unable to clean up orphaned task {task} on {agent.hostname}: {ret}"
|
||||
)
|
||||
else:
|
||||
logger.info(f"Removed orphaned task {task} from {agent.hostname}")
|
||||
|
||||
logger.info(f"Orphaned task cleanup finished on {agent.hostname}")
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import datetime as dt
|
||||
from unittest.mock import patch, call
|
||||
from model_bakery import baker
|
||||
from django.utils import timezone as djangotime
|
||||
@@ -25,9 +26,9 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
# setup data
|
||||
script = baker.make_recipe("scripts.script")
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent_old = baker.make_recipe("agents.agent", version="0.9.0")
|
||||
policy = baker.make("automation.Policy")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
|
||||
# test script set to invalid pk
|
||||
data = {"autotask": {"script": 500}}
|
||||
@@ -50,10 +51,10 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test invalid agent version
|
||||
# test old agent version
|
||||
data = {
|
||||
"autotask": {"script": script.id, "script_args": ["args"]},
|
||||
"agent": agent_old.id,
|
||||
"autotask": {"script": script.id},
|
||||
"agent": old_agent.id,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
@@ -63,7 +64,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
data = {
|
||||
"autotask": {
|
||||
"name": "Test Task Scheduled with Assigned Check",
|
||||
"run_time_days": [0, 1, 2],
|
||||
"run_time_days": ["Sunday", "Monday", "Friday"],
|
||||
"run_time_minute": "10:00",
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
@@ -84,6 +85,7 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
data = {
|
||||
"autotask": {
|
||||
"name": "Test Task Manual",
|
||||
"run_time_days": [],
|
||||
"timeout": 120,
|
||||
"enabled": True,
|
||||
"script": script.id,
|
||||
@@ -181,10 +183,10 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("autotasks.tasks.run_win_task.delay")
|
||||
def test_run_autotask(self, run_win_task):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_autotask(self, nats_cmd):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
|
||||
# test invalid url
|
||||
@@ -195,7 +197,15 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
url = f"/tasks/runwintask/{task.id}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
run_win_task.assert_called_with(task.id)
|
||||
nats_cmd.assert_called_with({"func": "runtask", "taskpk": task.id}, wait=False)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
task2 = baker.make("autotasks.AutomatedTask", agent=old_agent)
|
||||
url = f"/tasks/runwintask/{task2.id}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
nats_cmd.assert_not_called()
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -205,8 +215,8 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_remove_orphaned_win_task(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_remove_orphaned_win_task(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
self.task1 = AutomatedTask.objects.create(
|
||||
agent=self.agent,
|
||||
@@ -214,20 +224,6 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
)
|
||||
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, "errtimeout")
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, "errtimeout")
|
||||
|
||||
salt_api_cmd.return_value = "task not found in"
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, "notlist")
|
||||
|
||||
salt_api_cmd.reset_mock()
|
||||
|
||||
# test removing an orphaned task
|
||||
win_tasks = [
|
||||
"Adobe Acrobat Update Task",
|
||||
@@ -242,50 +238,54 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
]
|
||||
|
||||
self.calls = [
|
||||
call(timeout=15, func="task.list_tasks"),
|
||||
call({"func": "listschedtasks"}, timeout=10),
|
||||
call(
|
||||
timeout=20,
|
||||
func="task.delete_task",
|
||||
arg=["name=TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"],
|
||||
{
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": "TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
),
|
||||
]
|
||||
|
||||
salt_api_cmd.side_effect = [win_tasks, True]
|
||||
nats_cmd.side_effect = [win_tasks, "ok"]
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(salt_api_cmd.call_count, 2)
|
||||
salt_api_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(nats_cmd.call_count, 2)
|
||||
nats_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test salt delete_task fail
|
||||
salt_api_cmd.reset_mock()
|
||||
salt_api_cmd.side_effect = [win_tasks, False]
|
||||
# test nats delete task fail
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.side_effect = [win_tasks, "error deleting task"]
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
salt_api_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(salt_api_cmd.call_count, 2)
|
||||
nats_cmd.assert_has_calls(self.calls)
|
||||
self.assertEqual(nats_cmd.call_count, 2)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# no orphaned tasks
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
win_tasks.remove("TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb")
|
||||
salt_api_cmd.side_effect = [win_tasks, True]
|
||||
nats_cmd.side_effect = [win_tasks, "ok"]
|
||||
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
|
||||
self.assertEqual(salt_api_cmd.call_count, 1)
|
||||
self.assertEqual(nats_cmd.call_count, 1)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async")
|
||||
def test_run_win_task(self, salt_api_async):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_win_task(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
self.task1 = AutomatedTask.objects.create(
|
||||
agent=self.agent,
|
||||
name="test task 1",
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
)
|
||||
salt_api_async.return_value = "Response 200"
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = run_win_task.s(self.task1.pk).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_create_win_task_schedule(self, salt_api_cmd):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_create_win_task_schedule(self, nats_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
@@ -295,46 +295,32 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
name="test task 1",
|
||||
win_task_name=task_name,
|
||||
task_type="scheduled",
|
||||
run_time_days=[0, 1, 6],
|
||||
run_time_bit_weekdays=127,
|
||||
run_time_minute="21:55",
|
||||
)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(salt_api_cmd.call_count, 1)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task1.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Weekly",
|
||||
'start_time="21:55"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
kwargs={"days_of_week": ["Monday", "Tuesday", "Sunday"]},
|
||||
self.assertEqual(nats_cmd.call_count, 1)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": 127,
|
||||
"pk": self.task1.pk,
|
||||
"name": task_name,
|
||||
"hour": 21,
|
||||
"min": 55,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "synced")
|
||||
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
|
||||
salt_api_cmd.return_value = False
|
||||
nats_cmd.return_value = "timeout"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
@@ -345,7 +331,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
agent=self.agent, action_type="taskaction"
|
||||
)
|
||||
self.assertEqual(self.pending_action.status, "pending")
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(
|
||||
pk=self.task1.pk, pending_action=self.pending_action.pk
|
||||
).apply()
|
||||
@@ -354,7 +340,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
self.assertEqual(self.pending_action.status, "completed")
|
||||
|
||||
# test runonce with future date
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
run_time_date = djangotime.now() + djangotime.timedelta(hours=22)
|
||||
self.task2 = AutomatedTask.objects.create(
|
||||
@@ -364,30 +350,29 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="runonce",
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task2.pk, pending_action=False).apply()
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task2.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
f'start_date="{run_time_date.strftime("%Y-%m-%d")}"',
|
||||
f'start_time="{run_time_date.strftime("%H:%M")}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"start_when_available=True",
|
||||
],
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": self.task2.pk,
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(self.task2.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(self.task2.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(self.task2.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(self.task2.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test runonce with date in the past
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
run_time_date = djangotime.now() - djangotime.timedelta(days=13)
|
||||
self.task3 = AutomatedTask.objects.create(
|
||||
@@ -397,31 +382,13 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="runonce",
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task3.pk, pending_action=False).apply()
|
||||
self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task3.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
f'start_date="{self.task3.run_time_date.strftime("%Y-%m-%d")}"',
|
||||
f'start_time="{self.task3.run_time_date.strftime("%H:%M")}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"start_when_available=True",
|
||||
],
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test checkfailure
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
self.check = baker.make_recipe("checks.diskspace_check", agent=self.agent)
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
self.task4 = AutomatedTask.objects.create(
|
||||
@@ -431,29 +398,24 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="checkfailure",
|
||||
assigned_check=self.check,
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task4.pk, pending_action=False).apply()
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task4.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
'start_date="1975-01-01"',
|
||||
'start_time="01:00"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.task4.pk,
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
# test manual
|
||||
salt_api_cmd.reset_mock()
|
||||
nats_cmd.reset_mock()
|
||||
task_name = AutomatedTask.generate_task_name()
|
||||
self.task5 = AutomatedTask.objects.create(
|
||||
agent=self.agent,
|
||||
@@ -461,23 +423,18 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
win_task_name=task_name,
|
||||
task_type="manual",
|
||||
)
|
||||
salt_api_cmd.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task5.pk, pending_action=False).apply()
|
||||
salt_api_cmd.assert_called_with(
|
||||
timeout=20,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Program Files\\TacticalAgent\\tacticalrmm.exe"',
|
||||
f'arguments="-m taskrunner -p {self.task5.pk}"',
|
||||
"start_in=C:\\Program Files\\TacticalAgent",
|
||||
"trigger_type=Once",
|
||||
'start_date="1975-01-01"',
|
||||
'start_time="01:00"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
],
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.task5.pk,
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import asyncio
|
||||
import pytz
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from rest_framework.views import APIView
|
||||
@@ -9,16 +11,16 @@ from agents.models import Agent
|
||||
from checks.models import Check
|
||||
|
||||
from scripts.models import Script
|
||||
from core.models import CoreSettings
|
||||
|
||||
from .serializers import TaskSerializer, AutoTaskSerializer
|
||||
|
||||
from .tasks import (
|
||||
create_win_task_schedule,
|
||||
delete_win_task_schedule,
|
||||
run_win_task,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
from tacticalrmm.utils import notify_error
|
||||
from tacticalrmm.utils import notify_error, get_bit_days
|
||||
|
||||
|
||||
class AddAutoTask(APIView):
|
||||
@@ -36,17 +38,20 @@ class AddAutoTask(APIView):
|
||||
parent = {"policy": policy}
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=data["agent"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
parent = {"agent": agent}
|
||||
added = "0.11.0"
|
||||
if data["autotask"]["script_args"] and agent.not_supported(added):
|
||||
return notify_error(
|
||||
f"Script arguments only available in agent {added} or greater"
|
||||
)
|
||||
|
||||
check = None
|
||||
if data["autotask"]["assigned_check"]:
|
||||
check = get_object_or_404(Check, pk=data["autotask"]["assigned_check"])
|
||||
|
||||
bit_weekdays = None
|
||||
if data["autotask"]["run_time_days"]:
|
||||
bit_weekdays = get_bit_days(data["autotask"]["run_time_days"])
|
||||
|
||||
del data["autotask"]["run_time_days"]
|
||||
serializer = TaskSerializer(data=data["autotask"], partial=True, context=parent)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save(
|
||||
@@ -54,6 +59,7 @@ class AddAutoTask(APIView):
|
||||
script=script,
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
assigned_check=check,
|
||||
run_time_bit_weekdays=bit_weekdays,
|
||||
)
|
||||
|
||||
if not "policy" in data:
|
||||
@@ -68,8 +74,12 @@ class AddAutoTask(APIView):
|
||||
class AutoTask(APIView):
|
||||
def get(self, request, pk):
|
||||
|
||||
agent = Agent.objects.only("pk").get(pk=pk)
|
||||
return Response(AutoTaskSerializer(agent).data)
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
ctx = {
|
||||
"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone),
|
||||
"agent_tz": agent.time_zone,
|
||||
}
|
||||
return Response(AutoTaskSerializer(agent, context=ctx).data)
|
||||
|
||||
def patch(self, request, pk):
|
||||
from automation.tasks import update_policy_task_fields_task
|
||||
@@ -108,5 +118,8 @@ class AutoTask(APIView):
|
||||
@api_view()
|
||||
def run_task(request, pk):
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
run_win_task.delay(task.pk)
|
||||
if not task.agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Check
|
||||
from .models import Check, CheckHistory
|
||||
|
||||
admin.site.register(Check)
|
||||
admin.site.register(CheckHistory)
|
||||
|
||||
30
api/tacticalrmm/checks/migrations/0011_check_run_history.py
Normal file
30
api/tacticalrmm/checks/migrations/0011_check_run_history.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-09 02:56
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0010_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="check",
|
||||
name="run_history",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.PositiveIntegerField(),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
blank=True,
|
||||
default=list,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
||||
39
api/tacticalrmm/checks/migrations/0011_checkhistory.py
Normal file
39
api/tacticalrmm/checks/migrations/0011_checkhistory.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-09 21:36
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0010_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="CheckHistory",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("x", models.DateTimeField()),
|
||||
("y", models.PositiveIntegerField()),
|
||||
("results", models.JSONField(blank=True, null=True)),
|
||||
(
|
||||
"check_history",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="check_history",
|
||||
to="checks.check",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
Normal file
18
api/tacticalrmm/checks/migrations/0012_auto_20210110_0503.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-10 05:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0011_checkhistory"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="y",
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
Normal file
18
api/tacticalrmm/checks/migrations/0013_auto_20210110_0505.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-10 05:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0012_auto_20210110_0503"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="y",
|
||||
field=models.PositiveIntegerField(null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,13 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0013_auto_20210110_0505"),
|
||||
("checks", "0011_check_run_history"),
|
||||
]
|
||||
|
||||
operations = []
|
||||
27
api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
Normal file
27
api/tacticalrmm/checks/migrations/0015_auto_20210110_1808.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-10 18:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("checks", "0014_merge_20210110_1808"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="check",
|
||||
name="run_history",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="x",
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="checkhistory",
|
||||
name="y",
|
||||
field=models.PositiveIntegerField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,14 +1,15 @@
|
||||
import base64
|
||||
import asyncio
|
||||
import string
|
||||
import os
|
||||
import json
|
||||
import zlib
|
||||
from statistics import mean
|
||||
import pytz
|
||||
from statistics import mean, mode
|
||||
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from rest_framework.fields import JSONField
|
||||
|
||||
from core.models import CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
@@ -177,6 +178,15 @@ class Check(BaseAuditModel):
|
||||
if self.check_type == "cpuload" or self.check_type == "memory":
|
||||
return ", ".join(str(f"{x}%") for x in self.history[-6:])
|
||||
|
||||
@property
|
||||
def last_run_as_timezone(self):
|
||||
if self.last_run is not None and self.agent is not None:
|
||||
return self.last_run.astimezone(
|
||||
pytz.timezone(self.agent.timezone)
|
||||
).strftime("%b-%d-%Y - %H:%M")
|
||||
|
||||
return self.last_run
|
||||
|
||||
@property
|
||||
def non_editable_fields(self):
|
||||
return [
|
||||
@@ -199,8 +209,15 @@ class Check(BaseAuditModel):
|
||||
"parent_check",
|
||||
"managed_by_policy",
|
||||
"overriden_by_policy",
|
||||
"created_by",
|
||||
"created_time",
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
]
|
||||
|
||||
def add_check_history(self, value, more_info=None):
|
||||
CheckHistory.objects.create(check_history=self, y=value, results=more_info)
|
||||
|
||||
def handle_checkv2(self, data):
|
||||
# cpuload or mem checks
|
||||
if self.check_type == "cpuload" or self.check_type == "memory":
|
||||
@@ -219,6 +236,9 @@ class Check(BaseAuditModel):
|
||||
else:
|
||||
self.status = "passing"
|
||||
|
||||
# add check history
|
||||
self.add_check_history(data["percent"])
|
||||
|
||||
# diskspace checks
|
||||
elif self.check_type == "diskspace":
|
||||
if data["exists"]:
|
||||
@@ -232,6 +252,9 @@ class Check(BaseAuditModel):
|
||||
self.status = "passing"
|
||||
|
||||
self.more_info = f"Total: {total}B, Free: {free}B"
|
||||
|
||||
# add check history
|
||||
self.add_check_history(percent_used)
|
||||
else:
|
||||
self.status = "failing"
|
||||
self.more_info = f"Disk {self.disk} does not exist"
|
||||
@@ -264,6 +287,17 @@ class Check(BaseAuditModel):
|
||||
]
|
||||
)
|
||||
|
||||
# add check history
|
||||
self.add_check_history(
|
||||
1 if self.status == "failing" else 0,
|
||||
{
|
||||
"retcode": data["retcode"],
|
||||
"stdout": data["stdout"][:60],
|
||||
"stderr": data["stderr"][:60],
|
||||
"execution_time": self.execution_time,
|
||||
},
|
||||
)
|
||||
|
||||
# ping checks
|
||||
elif self.check_type == "ping":
|
||||
success = ["Reply", "bytes", "time", "TTL"]
|
||||
@@ -280,6 +314,10 @@ class Check(BaseAuditModel):
|
||||
self.more_info = output
|
||||
self.save(update_fields=["more_info"])
|
||||
|
||||
self.add_check_history(
|
||||
1 if self.status == "failing" else 0, self.more_info[:60]
|
||||
)
|
||||
|
||||
# windows service checks
|
||||
elif self.check_type == "winsvc":
|
||||
svc_stat = data["status"]
|
||||
@@ -292,12 +330,16 @@ class Check(BaseAuditModel):
|
||||
self.status = "passing"
|
||||
else:
|
||||
if self.agent and self.restart_if_stopped:
|
||||
r = self.agent.salt_api_cmd(
|
||||
func="service.restart", arg=self.svc_name, timeout=45
|
||||
)
|
||||
if r == "timeout" or r == "error":
|
||||
nats_data = {
|
||||
"func": "winsvcaction",
|
||||
"payload": {"name": self.svc_name, "action": "start"},
|
||||
}
|
||||
r = asyncio.run(self.agent.nats_cmd(nats_data, timeout=32))
|
||||
if r == "timeout" or r == "natsdown":
|
||||
self.status = "failing"
|
||||
elif isinstance(r, bool) and r:
|
||||
elif not r["success"] and r["errormsg"]:
|
||||
self.status = "failing"
|
||||
elif r["success"]:
|
||||
self.status = "passing"
|
||||
self.more_info = f"Status RUNNING"
|
||||
else:
|
||||
@@ -315,6 +357,10 @@ class Check(BaseAuditModel):
|
||||
|
||||
self.save(update_fields=["more_info"])
|
||||
|
||||
self.add_check_history(
|
||||
1 if self.status == "failing" else 0, self.more_info[:60]
|
||||
)
|
||||
|
||||
elif self.check_type == "eventlog":
|
||||
log = []
|
||||
is_wildcard = self.event_id_is_wildcard
|
||||
@@ -322,8 +368,7 @@ class Check(BaseAuditModel):
|
||||
eventID = self.event_id
|
||||
source = self.event_source
|
||||
message = self.event_message
|
||||
|
||||
r = json.loads(zlib.decompress(base64.b64decode(data["log"])))
|
||||
r = data["log"]
|
||||
|
||||
for i in r:
|
||||
if i["eventType"] == eventType:
|
||||
@@ -375,6 +420,11 @@ class Check(BaseAuditModel):
|
||||
self.extra_details = {"log": log}
|
||||
self.save(update_fields=["extra_details"])
|
||||
|
||||
self.add_check_history(
|
||||
1 if self.status == "failing" else 0,
|
||||
"Events Found:" + str(len(self.extra_details["log"])),
|
||||
)
|
||||
|
||||
# handle status
|
||||
if self.status == "failing":
|
||||
self.fail_count += 1
|
||||
@@ -395,42 +445,6 @@ class Check(BaseAuditModel):
|
||||
|
||||
return self.status
|
||||
|
||||
def handle_check(self, data):
|
||||
if self.check_type != "cpuload" and self.check_type != "memory":
|
||||
|
||||
if data["status"] == "passing" and self.fail_count != 0:
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["fail_count"])
|
||||
|
||||
elif data["status"] == "failing":
|
||||
self.fail_count += 1
|
||||
self.save(update_fields=["fail_count"])
|
||||
|
||||
else:
|
||||
self.history.append(data["percent"])
|
||||
|
||||
if len(self.history) > 15:
|
||||
self.history = self.history[-15:]
|
||||
|
||||
self.save(update_fields=["history"])
|
||||
|
||||
avg = int(mean(self.history))
|
||||
|
||||
if avg > self.threshold:
|
||||
self.status = "failing"
|
||||
self.fail_count += 1
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
else:
|
||||
self.status = "passing"
|
||||
if self.fail_count != 0:
|
||||
self.fail_count = 0
|
||||
self.save(update_fields=["status", "fail_count"])
|
||||
else:
|
||||
self.save(update_fields=["status"])
|
||||
|
||||
if self.email_alert and self.fail_count >= self.fails_b4_alert:
|
||||
handle_check_email_alert_task.delay(self.pk)
|
||||
|
||||
@staticmethod
|
||||
def serialize(check):
|
||||
# serializes the check and returns json
|
||||
@@ -518,7 +532,7 @@ class Check(BaseAuditModel):
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client}, {self.agent.site}, {self} Failed"
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
@@ -594,7 +608,7 @@ class Check(BaseAuditModel):
|
||||
CORE = CoreSettings.objects.first()
|
||||
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client}, {self.agent.site}, {self} Failed"
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
@@ -629,3 +643,17 @@ class Check(BaseAuditModel):
|
||||
body = subject
|
||||
|
||||
CORE.send_sms(body)
|
||||
|
||||
|
||||
class CheckHistory(models.Model):
|
||||
check_history = models.ForeignKey(
|
||||
Check,
|
||||
related_name="check_history",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
x = models.DateTimeField(auto_now_add=True)
|
||||
y = models.PositiveIntegerField(null=True, blank=True, default=None)
|
||||
results = models.JSONField(null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.check_history.readable_desc
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import validators as _v
|
||||
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from .models import Check
|
||||
from .models import Check, CheckHistory
|
||||
from autotasks.models import AutomatedTask
|
||||
from scripts.serializers import ScriptSerializer, ScriptCheckSerializer
|
||||
|
||||
@@ -18,6 +18,7 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
readable_desc = serializers.ReadOnlyField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
assigned_task = serializers.SerializerMethodField()
|
||||
last_run = serializers.ReadOnlyField(source="last_run_as_timezone")
|
||||
history_info = serializers.ReadOnlyField()
|
||||
|
||||
## Change to return only array of tasks after 9/25/2020
|
||||
@@ -47,12 +48,11 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
.filter(check_type="diskspace")
|
||||
.exclude(managed_by_policy=True)
|
||||
)
|
||||
if checks:
|
||||
for check in checks:
|
||||
if val["disk"] in check.disk:
|
||||
raise serializers.ValidationError(
|
||||
f"A disk check for Drive {val['disk']} already exists!"
|
||||
)
|
||||
for check in checks:
|
||||
if val["disk"] in check.disk:
|
||||
raise serializers.ValidationError(
|
||||
f"A disk check for Drive {val['disk']} already exists!"
|
||||
)
|
||||
|
||||
# ping checks
|
||||
if check_type == "ping":
|
||||
@@ -65,6 +65,26 @@ class CheckSerializer(serializers.ModelSerializer):
|
||||
"Please enter a valid IP address or domain name"
|
||||
)
|
||||
|
||||
if check_type == "cpuload" and not self.instance:
|
||||
if (
|
||||
Check.objects.filter(**self.context, check_type="cpuload")
|
||||
.exclude(managed_by_policy=True)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"A cpuload check for this agent already exists"
|
||||
)
|
||||
|
||||
if check_type == "memory" and not self.instance:
|
||||
if (
|
||||
Check.objects.filter(**self.context, check_type="memory")
|
||||
.exclude(managed_by_policy=True)
|
||||
.exists()
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"A memory check for this agent already exists"
|
||||
)
|
||||
|
||||
return val
|
||||
|
||||
|
||||
@@ -75,101 +95,7 @@ class AssignedTaskCheckRunnerField(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class CheckRunnerGetSerializer(serializers.ModelSerializer):
|
||||
# for the windows agent
|
||||
# only send data needed for agent to run a check
|
||||
|
||||
assigned_task = serializers.SerializerMethodField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
|
||||
def get_assigned_task(self, obj):
|
||||
if obj.assignedtask.exists():
|
||||
# this will not break agents on version 0.10.2 or lower
|
||||
# newer agents once released will properly handle multiple tasks assigned to a check
|
||||
task = obj.assignedtask.first()
|
||||
return AssignedTaskCheckRunnerField(task).data
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
exclude = [
|
||||
"policy",
|
||||
"managed_by_policy",
|
||||
"overriden_by_policy",
|
||||
"parent_check",
|
||||
"name",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"fails_b4_alert",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
"stderr",
|
||||
"retcode",
|
||||
"execution_time",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"created_by",
|
||||
"created_time",
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
"history",
|
||||
]
|
||||
|
||||
|
||||
class CheckRunnerGetSerializerV2(serializers.ModelSerializer):
|
||||
# for the windows __python__ agent
|
||||
# only send data needed for agent to run a check
|
||||
|
||||
assigned_tasks = serializers.SerializerMethodField()
|
||||
script = ScriptSerializer(read_only=True)
|
||||
|
||||
def get_assigned_tasks(self, obj):
|
||||
if obj.assignedtask.exists():
|
||||
tasks = obj.assignedtask.all()
|
||||
return AssignedTaskCheckRunnerField(tasks, many=True).data
|
||||
|
||||
class Meta:
|
||||
model = Check
|
||||
exclude = [
|
||||
"policy",
|
||||
"managed_by_policy",
|
||||
"overriden_by_policy",
|
||||
"parent_check",
|
||||
"name",
|
||||
"more_info",
|
||||
"last_run",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"fails_b4_alert",
|
||||
"fail_count",
|
||||
"email_sent",
|
||||
"text_sent",
|
||||
"outage_history",
|
||||
"extra_details",
|
||||
"stdout",
|
||||
"stderr",
|
||||
"retcode",
|
||||
"execution_time",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"created_by",
|
||||
"created_time",
|
||||
"modified_by",
|
||||
"modified_time",
|
||||
"history",
|
||||
]
|
||||
|
||||
|
||||
class CheckRunnerGetSerializerV3(serializers.ModelSerializer):
|
||||
# for the windows __golang__ agent
|
||||
# only send data needed for agent to run a check
|
||||
# the difference here is in the script serializer
|
||||
# script checks no longer rely on salt and are executed directly by the go agent
|
||||
|
||||
assigned_tasks = serializers.SerializerMethodField()
|
||||
script = ScriptCheckSerializer(read_only=True)
|
||||
|
||||
@@ -217,3 +143,15 @@ class CheckResultsSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Check
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CheckHistorySerializer(serializers.ModelSerializer):
|
||||
x = serializers.SerializerMethodField()
|
||||
|
||||
def get_x(self, obj):
|
||||
return obj.x.astimezone(pytz.timezone(self.context["timezone"])).isoformat()
|
||||
|
||||
# used for return large amounts of graph data
|
||||
class Meta:
|
||||
model = CheckHistory
|
||||
fields = ("x", "y", "results")
|
||||
|
||||
@@ -5,8 +5,6 @@ from time import sleep
|
||||
from tacticalrmm.celery import app
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_check_email_alert_task(pk):
|
||||
@@ -59,7 +57,12 @@ def handle_check_sms_alert_task(pk):
|
||||
|
||||
|
||||
@app.task
|
||||
def run_checks_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
agent.salt_api_async(func="win_agent.run_manual_checks")
|
||||
def prune_check_history(older_than_days: int) -> str:
|
||||
from .models import CheckHistory
|
||||
|
||||
CheckHistory.objects.filter(
|
||||
x__lt=djangotime.make_aware(dt.datetime.today())
|
||||
- djangotime.timedelta(days=older_than_days)
|
||||
).delete()
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -1,26 +1,41 @@
|
||||
from tacticalrmm.test import BaseTestCase
|
||||
from checks.models import CheckHistory
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import CheckSerializer
|
||||
from django.utils import timezone as djangotime
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
|
||||
|
||||
class TestCheckViews(BaseTestCase):
|
||||
class TestCheckViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_get_disk_check(self):
|
||||
url = f"/checks/{self.agentDiskCheck.pk}/check/"
|
||||
# setup data
|
||||
disk_check = baker.make_recipe("checks.diskspace_check")
|
||||
|
||||
url = f"/checks/{disk_check.pk}/check/"
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = CheckSerializer(self.agentDiskCheck)
|
||||
serializer = CheckSerializer(disk_check)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_add_disk_check(self):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
|
||||
url = "/checks/checks/"
|
||||
|
||||
valid_payload = {
|
||||
"pk": self.agent.pk,
|
||||
"pk": agent.pk,
|
||||
"check": {
|
||||
"check_type": "diskspace",
|
||||
"disk": "D:",
|
||||
"disk": "C:",
|
||||
"threshold": 55,
|
||||
"fails_b4_alert": 3,
|
||||
},
|
||||
@@ -31,7 +46,7 @@ class TestCheckViews(BaseTestCase):
|
||||
|
||||
# this should fail because we already have a check for drive C: in setup
|
||||
invalid_payload = {
|
||||
"pk": self.agent.pk,
|
||||
"pk": agent.pk,
|
||||
"check": {
|
||||
"check_type": "diskspace",
|
||||
"disk": "C:",
|
||||
@@ -43,24 +58,77 @@ class TestCheckViews(BaseTestCase):
|
||||
resp = self.client.post(url, invalid_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
def test_add_cpuload_check(self):
|
||||
url = "/checks/checks/"
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
payload = {
|
||||
"pk": agent.pk,
|
||||
"check": {
|
||||
"check_type": "cpuload",
|
||||
"threshold": 66,
|
||||
"fails_b4_alert": 9,
|
||||
},
|
||||
}
|
||||
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
payload["threshold"] = 87
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
self.assertEqual(
|
||||
resp.json()["non_field_errors"][0],
|
||||
"A cpuload check for this agent already exists",
|
||||
)
|
||||
|
||||
def test_add_memory_check(self):
|
||||
url = "/checks/checks/"
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
payload = {
|
||||
"pk": agent.pk,
|
||||
"check": {
|
||||
"check_type": "memory",
|
||||
"threshold": 78,
|
||||
"fails_b4_alert": 1,
|
||||
},
|
||||
}
|
||||
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
payload["threshold"] = 55
|
||||
resp = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
self.assertEqual(
|
||||
resp.json()["non_field_errors"][0],
|
||||
"A memory check for this agent already exists",
|
||||
)
|
||||
|
||||
def test_get_policy_disk_check(self):
|
||||
url = f"/checks/{self.policyDiskCheck.pk}/check/"
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy")
|
||||
disk_check = baker.make_recipe("checks.diskspace_check", policy=policy)
|
||||
|
||||
url = f"/checks/{disk_check.pk}/check/"
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = CheckSerializer(self.policyDiskCheck)
|
||||
serializer = CheckSerializer(disk_check)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_add_policy_disk_check(self):
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy")
|
||||
|
||||
url = "/checks/checks/"
|
||||
|
||||
valid_payload = {
|
||||
"policy": self.policy.pk,
|
||||
"policy": policy.pk,
|
||||
"check": {
|
||||
"check_type": "diskspace",
|
||||
"disk": "D:",
|
||||
"disk": "M:",
|
||||
"threshold": 86,
|
||||
"fails_b4_alert": 2,
|
||||
},
|
||||
@@ -71,7 +139,7 @@ class TestCheckViews(BaseTestCase):
|
||||
|
||||
# this should fail because we already have a check for drive M: in setup
|
||||
invalid_payload = {
|
||||
"policy": self.policy.pk,
|
||||
"policy": policy.pk,
|
||||
"check": {
|
||||
"check_type": "diskspace",
|
||||
"disk": "M:",
|
||||
@@ -90,8 +158,14 @@ class TestCheckViews(BaseTestCase):
|
||||
self.assertEqual(26, len(r.data))
|
||||
|
||||
def test_edit_check_alert(self):
|
||||
url_a = f"/checks/{self.agentDiskCheck.pk}/check/"
|
||||
url_p = f"/checks/{self.policyDiskCheck.pk}/check/"
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy")
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
|
||||
policy_disk_check = baker.make_recipe("checks.diskspace_check", policy=policy)
|
||||
agent_disk_check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
url_a = f"/checks/{agent_disk_check.pk}/check/"
|
||||
url_p = f"/checks/{policy_disk_check.pk}/check/"
|
||||
|
||||
valid_payload = {"email_alert": False, "check_alert": True}
|
||||
invalid_payload = {"email_alert": False}
|
||||
@@ -109,3 +183,111 @@ class TestCheckViews(BaseTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url_a)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_checks(self, nats_cmd):
|
||||
agent = baker.make_recipe("agents.agent", version="1.4.1")
|
||||
agent_old = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
agent_b4_141 = baker.make_recipe("agents.agent", version="1.4.0")
|
||||
|
||||
url = f"/checks/runchecks/{agent_old.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.json(), "Requires agent version 1.1.0 or greater")
|
||||
|
||||
url = f"/checks/runchecks/{agent_b4_141.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, wait=False)
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "busy"
|
||||
url = f"/checks/runchecks/{agent.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), f"Checks are already running on {agent.hostname}")
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "ok"
|
||||
url = f"/checks/runchecks/{agent.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), f"Checks will now be re-run on {agent.hostname}")
|
||||
|
||||
nats_cmd.reset_mock()
|
||||
nats_cmd.return_value = "timeout"
|
||||
url = f"/checks/runchecks/{agent.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_cmd.assert_called_with({"func": "runchecks"}, timeout=15)
|
||||
self.assertEqual(r.json(), "Unable to contact the agent")
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_check_history(self):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
||||
check_history_data = baker.make(
|
||||
"checks.CheckHistory",
|
||||
check_history=check,
|
||||
_quantity=30,
|
||||
)
|
||||
|
||||
# need to manually set the date back 35 days
|
||||
for check_history in check_history_data:
|
||||
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
||||
check_history.save()
|
||||
|
||||
# test invalid check pk
|
||||
resp = self.client.patch("/checks/history/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
url = f"/checks/history/{check.id}/"
|
||||
|
||||
# test with timeFilter last 30 days
|
||||
data = {"timeFilter": 30}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 30)
|
||||
|
||||
# test with timeFilter equal to 0
|
||||
data = {"timeFilter": 0}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), 60)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
class TestCheckTasks(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_prune_check_history(self):
|
||||
from .tasks import prune_check_history
|
||||
|
||||
# setup data
|
||||
check = baker.make_recipe("checks.diskspace_check")
|
||||
baker.make("checks.CheckHistory", check_history=check, _quantity=30)
|
||||
check_history_data = baker.make(
|
||||
"checks.CheckHistory",
|
||||
check_history=check,
|
||||
_quantity=30,
|
||||
)
|
||||
|
||||
# need to manually set the date back 35 days
|
||||
for check_history in check_history_data:
|
||||
check_history.x = djangotime.now() - djangotime.timedelta(days=35)
|
||||
check_history.save()
|
||||
|
||||
# prune data 30 days old
|
||||
prune_check_history(30)
|
||||
self.assertEqual(CheckHistory.objects.count(), 30)
|
||||
|
||||
# prune all Check history Data
|
||||
prune_check_history(0)
|
||||
self.assertEqual(CheckHistory.objects.count(), 0)
|
||||
|
||||
@@ -2,9 +2,10 @@ from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("checks/", views.GetAddCheck.as_view()),
|
||||
path("checks/", views.AddCheck.as_view()),
|
||||
path("<int:pk>/check/", views.GetUpdateDeleteCheck.as_view()),
|
||||
path("<pk>/loadchecks/", views.load_checks),
|
||||
path("getalldisks/", views.get_disks_for_policies),
|
||||
path("runchecks/<pk>/", views.run_checks),
|
||||
path("history/<int:checkpk>/", views.CheckHistory.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
import asyncio
|
||||
from packaging import version as pyver
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from datetime import datetime as dt
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
@@ -11,9 +18,8 @@ from automation.models import Policy
|
||||
from .models import Check
|
||||
from scripts.models import Script
|
||||
|
||||
from .serializers import CheckSerializer
|
||||
from .serializers import CheckSerializer, CheckHistorySerializer
|
||||
|
||||
from .tasks import run_checks_task
|
||||
|
||||
from automation.tasks import (
|
||||
generate_agent_checks_from_policies_task,
|
||||
@@ -22,11 +28,7 @@ from automation.tasks import (
|
||||
)
|
||||
|
||||
|
||||
class GetAddCheck(APIView):
|
||||
def get(self, request):
|
||||
checks = Check.objects.all()
|
||||
return Response(CheckSerializer(checks, many=True).data)
|
||||
|
||||
class AddCheck(APIView):
|
||||
def post(self, request):
|
||||
policy = None
|
||||
agent = None
|
||||
@@ -39,17 +41,6 @@ class GetAddCheck(APIView):
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
parent = {"agent": agent}
|
||||
added = "0.11.0"
|
||||
if (
|
||||
request.data["check"]["check_type"] == "script"
|
||||
and request.data["check"]["script_args"]
|
||||
and agent.not_supported(version_added=added)
|
||||
):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": f"Script arguments only available in agent {added} or greater"
|
||||
}
|
||||
)
|
||||
|
||||
script = None
|
||||
if "script" in request.data["check"]:
|
||||
@@ -61,13 +52,6 @@ class GetAddCheck(APIView):
|
||||
request.data["check"]["check_type"] == "eventlog"
|
||||
and request.data["check"]["event_id_is_wildcard"]
|
||||
):
|
||||
if agent and agent.not_supported(version_added="0.10.2"):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": "Wildcard is only available in agent 0.10.2 or greater"
|
||||
}
|
||||
)
|
||||
|
||||
request.data["check"]["event_id"] = 0
|
||||
|
||||
serializer = CheckSerializer(
|
||||
@@ -119,31 +103,8 @@ class GetUpdateDeleteCheck(APIView):
|
||||
pass
|
||||
else:
|
||||
if request.data["event_id_is_wildcard"]:
|
||||
if check.agent.not_supported(version_added="0.10.2"):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": "Wildcard is only available in agent 0.10.2 or greater"
|
||||
}
|
||||
)
|
||||
|
||||
request.data["event_id"] = 0
|
||||
|
||||
elif check.check_type == "script":
|
||||
added = "0.11.0"
|
||||
try:
|
||||
request.data["script_args"]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if request.data["script_args"] and check.agent.not_supported(
|
||||
version_added=added
|
||||
):
|
||||
return notify_error(
|
||||
{
|
||||
"non_field_errors": f"Script arguments only available in agent {added} or greater"
|
||||
}
|
||||
)
|
||||
|
||||
serializer = CheckSerializer(instance=check, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
@@ -179,11 +140,46 @@ class GetUpdateDeleteCheck(APIView):
|
||||
return Response(f"{check.readable_desc} was deleted!")
|
||||
|
||||
|
||||
class CheckHistory(APIView):
|
||||
def patch(self, request, checkpk):
|
||||
check = get_object_or_404(Check, pk=checkpk)
|
||||
|
||||
timeFilter = Q()
|
||||
|
||||
if "timeFilter" in request.data:
|
||||
if request.data["timeFilter"] != 0:
|
||||
timeFilter = Q(
|
||||
x__lte=djangotime.make_aware(dt.today()),
|
||||
x__gt=djangotime.make_aware(dt.today())
|
||||
- djangotime.timedelta(days=request.data["timeFilter"]),
|
||||
)
|
||||
|
||||
check_history = check.check_history.filter(timeFilter).order_by("-x")
|
||||
|
||||
return Response(
|
||||
CheckHistorySerializer(
|
||||
check_history, context={"timezone": check.agent.timezone}, many=True
|
||||
).data
|
||||
)
|
||||
|
||||
|
||||
@api_view()
|
||||
def run_checks(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
run_checks_task.delay(agent.pk)
|
||||
return Response(agent.hostname)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.4.1"):
|
||||
r = asyncio.run(agent.nats_cmd({"func": "runchecks"}, timeout=15))
|
||||
if r == "busy":
|
||||
return notify_error(f"Checks are already running on {agent.hostname}")
|
||||
elif r == "ok":
|
||||
return Response(f"Checks will now be re-run on {agent.hostname}")
|
||||
else:
|
||||
return notify_error("Unable to contact the agent")
|
||||
else:
|
||||
asyncio.run(agent.nats_cmd({"func": "runchecks"}, wait=False))
|
||||
return Response(f"Checks will now be re-run on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
|
||||
@@ -6,48 +6,48 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0004_auto_20200821_2115'),
|
||||
("clients", "0004_auto_20200821_2115"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='created_by',
|
||||
model_name="client",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='created_time',
|
||||
model_name="client",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='modified_by',
|
||||
model_name="client",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='modified_time',
|
||||
model_name="client",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='created_by',
|
||||
model_name="site",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='created_time',
|
||||
model_name="site",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='modified_by',
|
||||
model_name="site",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='modified_time',
|
||||
model_name="site",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -8,24 +8,67 @@ import uuid
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('knox', '0007_auto_20190111_0542'),
|
||||
('clients', '0005_auto_20200922_1344'),
|
||||
("knox", "0007_auto_20190111_0542"),
|
||||
("clients", "0005_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Deployment',
|
||||
name="Deployment",
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('uid', models.UUIDField(default=uuid.uuid4, editable=False)),
|
||||
('mon_type', models.CharField(choices=[('server', 'Server'), ('workstation', 'Workstation')], default='server', max_length=255)),
|
||||
('arch', models.CharField(choices=[('64', '64 bit'), ('32', '32 bit')], default='64', max_length=255)),
|
||||
('expiry', models.DateTimeField(blank=True, null=True)),
|
||||
('token_key', models.CharField(max_length=255)),
|
||||
('install_flags', models.JSONField(blank=True, null=True)),
|
||||
('auth_token', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploytokens', to='knox.authtoken')),
|
||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deployclients', to='clients.client')),
|
||||
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='deploysites', to='clients.site')),
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("uid", models.UUIDField(default=uuid.uuid4, editable=False)),
|
||||
(
|
||||
"mon_type",
|
||||
models.CharField(
|
||||
choices=[("server", "Server"), ("workstation", "Workstation")],
|
||||
default="server",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"arch",
|
||||
models.CharField(
|
||||
choices=[("64", "64 bit"), ("32", "32 bit")],
|
||||
default="64",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
("expiry", models.DateTimeField(blank=True, null=True)),
|
||||
("token_key", models.CharField(max_length=255)),
|
||||
("install_flags", models.JSONField(blank=True, null=True)),
|
||||
(
|
||||
"auth_token",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="deploytokens",
|
||||
to="knox.authtoken",
|
||||
),
|
||||
),
|
||||
(
|
||||
"client",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="deployclients",
|
||||
to="clients.client",
|
||||
),
|
||||
),
|
||||
(
|
||||
"site",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="deploysites",
|
||||
to="clients.site",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-02 19:20
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("clients", "0006_deployment"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="client",
|
||||
old_name="client",
|
||||
new_name="name",
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="site",
|
||||
old_name="site",
|
||||
new_name="name",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-03 14:30
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("clients", "0007_auto_20201102_1920"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="client",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="site",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
]
|
||||
@@ -7,7 +7,7 @@ from logs.models import BaseAuditModel
|
||||
|
||||
|
||||
class Client(BaseAuditModel):
|
||||
client = models.CharField(max_length=255, unique=True)
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
workstation_policy = models.ForeignKey(
|
||||
"automation.Policy",
|
||||
related_name="workstation_clients",
|
||||
@@ -24,18 +24,20 @@ class Client(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
|
||||
def __str__(self):
|
||||
return self.client
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def has_maintenanace_mode_agents(self):
|
||||
return (
|
||||
Agent.objects.filter(client=self.client, maintenance_mode=True).count() > 0
|
||||
Agent.objects.filter(site__client=self, maintenance_mode=True).count() > 0
|
||||
)
|
||||
|
||||
@property
|
||||
def has_failing_checks(self):
|
||||
|
||||
agents = (
|
||||
Agent.objects.only(
|
||||
"pk",
|
||||
@@ -44,18 +46,20 @@ class Client(BaseAuditModel):
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
)
|
||||
.filter(client=self.client)
|
||||
.filter(site__client=self)
|
||||
.prefetch_related("agentchecks")
|
||||
)
|
||||
|
||||
failing = 0
|
||||
for agent in agents:
|
||||
if agent.checks["has_failing_checks"]:
|
||||
return True
|
||||
failing += 1
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
return True
|
||||
failing += 1
|
||||
|
||||
return False
|
||||
return failing > 0
|
||||
|
||||
@staticmethod
|
||||
def serialize(client):
|
||||
@@ -67,7 +71,7 @@ class Client(BaseAuditModel):
|
||||
|
||||
class Site(BaseAuditModel):
|
||||
client = models.ForeignKey(Client, related_name="sites", on_delete=models.CASCADE)
|
||||
site = models.CharField(max_length=255)
|
||||
name = models.CharField(max_length=255)
|
||||
workstation_policy = models.ForeignKey(
|
||||
"automation.Policy",
|
||||
related_name="workstation_sites",
|
||||
@@ -84,21 +88,18 @@ class Site(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
|
||||
def __str__(self):
|
||||
return self.site
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def has_maintenanace_mode_agents(self):
|
||||
return (
|
||||
Agent.objects.filter(
|
||||
client=self.client.client, site=self.site, maintenance_mode=True
|
||||
).count()
|
||||
> 0
|
||||
)
|
||||
return Agent.objects.filter(site=self, maintenance_mode=True).count() > 0
|
||||
|
||||
@property
|
||||
def has_failing_checks(self):
|
||||
|
||||
agents = (
|
||||
Agent.objects.only(
|
||||
"pk",
|
||||
@@ -107,18 +108,20 @@ class Site(BaseAuditModel):
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
)
|
||||
.filter(client=self.client.client, site=self.site)
|
||||
.filter(site=self)
|
||||
.prefetch_related("agentchecks")
|
||||
)
|
||||
|
||||
failing = 0
|
||||
for agent in agents:
|
||||
if agent.checks["has_failing_checks"]:
|
||||
return True
|
||||
failing += 1
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
return True
|
||||
failing += 1
|
||||
|
||||
return False
|
||||
return failing > 0
|
||||
|
||||
@staticmethod
|
||||
def serialize(site):
|
||||
@@ -128,13 +131,6 @@ class Site(BaseAuditModel):
|
||||
return SiteSerializer(site).data
|
||||
|
||||
|
||||
def validate_name(name):
|
||||
if "|" in name:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
MON_TYPE_CHOICES = [
|
||||
("server", "Server"),
|
||||
("workstation", "Workstation"),
|
||||
|
||||
@@ -3,19 +3,25 @@ from .models import Client, Site, Deployment
|
||||
|
||||
|
||||
class SiteSerializer(ModelSerializer):
|
||||
client_name = ReadOnlyField(source="client.name")
|
||||
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = "__all__"
|
||||
|
||||
def validate(self, val):
|
||||
if "|" in val["site"]:
|
||||
if "|" in val["name"]:
|
||||
raise ValidationError("Site name cannot contain the | character")
|
||||
|
||||
if self.context:
|
||||
client = Client.objects.get(pk=self.context["clientpk"])
|
||||
if Site.objects.filter(client=client, name=val["name"]).exists():
|
||||
raise ValidationError(f"Site {val['name']} already exists")
|
||||
|
||||
return val
|
||||
|
||||
|
||||
class ClientSerializer(ModelSerializer):
|
||||
|
||||
sites = SiteSerializer(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -30,29 +36,38 @@ class ClientSerializer(ModelSerializer):
|
||||
if len(self.context["site"]) > 255:
|
||||
raise ValidationError("Site name too long")
|
||||
|
||||
if "|" in val["client"]:
|
||||
if "|" in val["name"]:
|
||||
raise ValidationError("Client name cannot contain the | character")
|
||||
|
||||
return val
|
||||
|
||||
|
||||
class TreeSerializer(ModelSerializer):
|
||||
client_name = ReadOnlyField(source="client.client")
|
||||
class SiteTreeSerializer(ModelSerializer):
|
||||
maintenance_mode = ReadOnlyField(source="has_maintenanace_mode_agents")
|
||||
failing_checks = ReadOnlyField(source="has_failing_checks")
|
||||
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = (
|
||||
"id",
|
||||
"site",
|
||||
"client_name",
|
||||
)
|
||||
fields = "__all__"
|
||||
ordering = ("failing_checks",)
|
||||
|
||||
|
||||
class ClientTreeSerializer(ModelSerializer):
|
||||
sites = SiteTreeSerializer(many=True, read_only=True)
|
||||
maintenance_mode = ReadOnlyField(source="has_maintenanace_mode_agents")
|
||||
failing_checks = ReadOnlyField(source="has_failing_checks")
|
||||
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = "__all__"
|
||||
ordering = ("failing_checks",)
|
||||
|
||||
|
||||
class DeploymentSerializer(ModelSerializer):
|
||||
client_id = ReadOnlyField(source="client.id")
|
||||
site_id = ReadOnlyField(source="site.id")
|
||||
client_name = ReadOnlyField(source="client.client")
|
||||
site_name = ReadOnlyField(source="site.site")
|
||||
client_name = ReadOnlyField(source="client.name")
|
||||
site_name = ReadOnlyField(source="site.name")
|
||||
|
||||
class Meta:
|
||||
model = Deployment
|
||||
|
||||
@@ -1,16 +1,32 @@
|
||||
import uuid
|
||||
from unittest import mock
|
||||
|
||||
from tacticalrmm.test import BaseTestCase
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from model_bakery import baker
|
||||
from .models import Client, Site, Deployment
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from .serializers import (
|
||||
ClientSerializer,
|
||||
SiteSerializer,
|
||||
ClientTreeSerializer,
|
||||
DeploymentSerializer,
|
||||
)
|
||||
|
||||
|
||||
class TestClientViews(BaseTestCase):
|
||||
class TestClientViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_get_clients(self):
|
||||
# setup data
|
||||
baker.make("clients.Client", _quantity=5)
|
||||
clients = Client.objects.all()
|
||||
|
||||
url = "/clients/clients/"
|
||||
r = self.client.get(url)
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = ClientSerializer(clients, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -21,15 +37,42 @@ class TestClientViews(BaseTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload["client"] = "Company1|askd"
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "Client name cannot contain the | character"
|
||||
):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
payload = {"client": "Company 1", "site": "Site2|a34"}
|
||||
payload = {"client": "Company 156", "site": "Site2|a34"}
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "Site name cannot contain the | character"
|
||||
):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test unique
|
||||
payload = {"client": "Company 1", "site": "Site 1"}
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "client with this name already exists."
|
||||
):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test long site name
|
||||
payload = {"client": "Company 2394", "site": "Site123" * 100}
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
with self.assertRaisesMessage(ValidationError, "Site name too long"):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@@ -41,88 +84,177 @@ class TestClientViews(BaseTestCase):
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
def test_get_sites(self):
|
||||
url = "/clients/sites/"
|
||||
r = self.client.get(url)
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_edit_client(self):
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
|
||||
# test invalid id
|
||||
r = self.client.put("/clients/500/client/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
data = {"id": client.id, "name": "New Name"}
|
||||
|
||||
url = f"/clients/{client.id}/client/"
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(Client.objects.filter(name="New Name").exists())
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_client(self):
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
site = baker.make("clients.Site", client=client)
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
|
||||
# test invalid id
|
||||
r = self.client.delete("/clients/500/client/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/clients/{client.id}/client/"
|
||||
|
||||
# test deleting with agents under client
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test successful deletion
|
||||
agent.delete()
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertFalse(Client.objects.filter(pk=client.id).exists())
|
||||
self.assertFalse(Site.objects.filter(pk=site.id).exists())
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_get_sites(self):
|
||||
# setup data
|
||||
baker.make("clients.Site", _quantity=5)
|
||||
sites = Site.objects.all()
|
||||
|
||||
url = "/clients/sites/"
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = SiteSerializer(sites, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_site(self):
|
||||
url = "/clients/addsite/"
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
|
||||
payload = {"client": "Google", "site": "LA Office"}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
url = "/clients/sites/"
|
||||
|
||||
payload = {"client": "Google", "site": "LA Off|ice |*&@#$"}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
payload = {"client": "Google", "site": "KN Office"}
|
||||
# test success add
|
||||
payload = {"client": site.client.id, "name": "LA Office"}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(
|
||||
Site.objects.filter(
|
||||
name="LA Office", client__name=site.client.name
|
||||
).exists()
|
||||
)
|
||||
|
||||
# test with | symbol
|
||||
payload = {"client": site.client.id, "name": "LA Off|ice |*&@#$"}
|
||||
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "Site name cannot contain the | character"
|
||||
):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test site already exists
|
||||
payload = {"client": site.client.id, "name": "LA Office"}
|
||||
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
|
||||
with self.assertRaisesMessage(ValidationError, "Site LA Office already exists"):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_list_clients(self):
|
||||
url = "/clients/listclients/"
|
||||
def test_edit_site(self):
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
|
||||
r = self.client.get(url)
|
||||
# test invalid id
|
||||
r = self.client.put("/clients/500/site/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
data = {"id": site.id, "name": "New Name", "client": site.client.id}
|
||||
|
||||
url = f"/clients/{site.id}/site/"
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(Site.objects.filter(name="New Name").exists())
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_load_tree(self):
|
||||
def test_delete_site(self):
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
|
||||
with mock.patch(
|
||||
"clients.models.Client.has_failing_checks",
|
||||
new_callable=mock.PropertyMock,
|
||||
return_value=True,
|
||||
):
|
||||
# test invalid id
|
||||
r = self.client.delete("/clients/500/site/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = "/clients/loadtree/"
|
||||
url = f"/clients/{site.id}/site/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
# test deleting with last site under client
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
client = Client.objects.get(client="Facebook")
|
||||
self.assertTrue(f"Facebook|{client.pk}|negative" in r.data.keys())
|
||||
# test deletion when agents exist under site
|
||||
baker.make("clients.Site", client=site.client)
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
with mock.patch(
|
||||
"clients.models.Site.has_failing_checks",
|
||||
new_callable=mock.PropertyMock,
|
||||
return_value=False,
|
||||
):
|
||||
|
||||
client = Client.objects.get(client="Google")
|
||||
site = Site.objects.get(client=client, site="LA Office")
|
||||
self.assertTrue(
|
||||
f"LA Office|{site.pk}|black" in [i for i in r.data.values()][0]
|
||||
)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_load_clients(self):
|
||||
url = "/clients/loadclients/"
|
||||
|
||||
r = self.client.get(url)
|
||||
# test successful deletion
|
||||
agent.delete()
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertFalse(Site.objects.filter(pk=site.id).exists())
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_tree(self):
|
||||
# setup data
|
||||
baker.make("clients.Site", _quantity=10)
|
||||
clients = Client.objects.all()
|
||||
|
||||
url = "/clients/tree/"
|
||||
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = ClientTreeSerializer(clients, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_deployments(self):
|
||||
# setup data
|
||||
deployments = baker.make("clients.Deployment", _quantity=5)
|
||||
|
||||
url = "/clients/deployments/"
|
||||
r = self.client.get(url)
|
||||
serializer = DeploymentSerializer(deployments, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_deployment(self):
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
|
||||
url = "/clients/deployments/"
|
||||
payload = {
|
||||
"client": "Google",
|
||||
"site": "Main Office",
|
||||
"client": site.client.id,
|
||||
"site": site.id,
|
||||
"expires": "2037-11-23 18:53",
|
||||
"power": 1,
|
||||
"ping": 0,
|
||||
@@ -134,36 +266,26 @@ class TestClientViews(BaseTestCase):
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload["site"] = "ASDkjh23k4jh"
|
||||
payload["site"] = "500"
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
payload["client"] = "324234ASDqwe"
|
||||
payload["client"] = "500"
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_delete_deployment(self):
|
||||
# setup data
|
||||
deployment = baker.make("clients.Deployment")
|
||||
|
||||
url = "/clients/deployments/"
|
||||
payload = {
|
||||
"client": "Google",
|
||||
"site": "Main Office",
|
||||
"expires": "2037-11-23 18:53",
|
||||
"power": 1,
|
||||
"ping": 0,
|
||||
"rdp": 1,
|
||||
"agenttype": "server",
|
||||
"arch": "64",
|
||||
}
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
dep = Deployment.objects.last()
|
||||
url = f"/clients/{dep.pk}/deployment/"
|
||||
url = f"/clients/{deployment.id}/deployment/"
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists())
|
||||
|
||||
url = "/clients/32348/deployment/"
|
||||
r = self.client.delete(url)
|
||||
|
||||
@@ -4,14 +4,9 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("clients/", views.GetAddClients.as_view()),
|
||||
path("<int:pk>/client/", views.GetUpdateDeleteClient.as_view()),
|
||||
path("tree/", views.GetClientTree.as_view()),
|
||||
path("sites/", views.GetAddSites.as_view()),
|
||||
path("listclients/", views.list_clients),
|
||||
path("listsites/", views.list_sites),
|
||||
path("addsite/", views.add_site),
|
||||
path("editsite/", views.edit_site),
|
||||
path("deletesite/", views.delete_site),
|
||||
path("loadtree/", views.load_tree),
|
||||
path("loadclients/", views.load_clients),
|
||||
path("<int:pk>/site/", views.GetUpdateDeleteSite.as_view()),
|
||||
path("deployments/", views.AgentDeployment.as_view()),
|
||||
path("<int:pk>/deployment/", views.AgentDeployment.as_view()),
|
||||
path("<str:uid>/deploy/", views.GenerateAgent.as_view()),
|
||||
|
||||
@@ -22,10 +22,10 @@ from rest_framework.decorators import api_view
|
||||
from .serializers import (
|
||||
ClientSerializer,
|
||||
SiteSerializer,
|
||||
TreeSerializer,
|
||||
ClientTreeSerializer,
|
||||
DeploymentSerializer,
|
||||
)
|
||||
from .models import Client, Site, Deployment, validate_name
|
||||
from .models import Client, Site, Deployment
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.utils import notify_error
|
||||
@@ -39,51 +39,50 @@ class GetAddClients(APIView):
|
||||
def post(self, request):
|
||||
|
||||
if "initialsetup" in request.data:
|
||||
client = {"client": request.data["client"]["client"].strip()}
|
||||
site = {"site": request.data["client"]["site"].strip()}
|
||||
client = {"name": request.data["client"]["client"].strip()}
|
||||
site = {"name": request.data["client"]["site"].strip()}
|
||||
serializer = ClientSerializer(data=client, context=request.data["client"])
|
||||
serializer.is_valid(raise_exception=True)
|
||||
core = CoreSettings.objects.first()
|
||||
core.default_time_zone = request.data["timezone"]
|
||||
core.save(update_fields=["default_time_zone"])
|
||||
else:
|
||||
client = {"client": request.data["client"].strip()}
|
||||
site = {"site": request.data["site"].strip()}
|
||||
client = {"name": request.data["client"].strip()}
|
||||
site = {"name": request.data["site"].strip()}
|
||||
serializer = ClientSerializer(data=client, context=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
obj = serializer.save()
|
||||
Site(client=obj, site=site["site"]).save()
|
||||
Site(client=obj, name=site["name"]).save()
|
||||
|
||||
return Response(f"{obj} was added!")
|
||||
|
||||
|
||||
class GetUpdateDeleteClient(APIView):
|
||||
def patch(self, request, pk):
|
||||
def put(self, request, pk):
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
orig = client.client
|
||||
|
||||
serializer = ClientSerializer(data=request.data, instance=client)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
serializer.save()
|
||||
|
||||
agents = Agent.objects.filter(client=orig)
|
||||
for agent in agents:
|
||||
agent.client = obj.client
|
||||
agent.save(update_fields=["client"])
|
||||
|
||||
return Response(f"{orig} renamed to {obj}")
|
||||
return Response("The Client was renamed")
|
||||
|
||||
def delete(self, request, pk):
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
agents = Agent.objects.filter(client=client.client)
|
||||
if agents.exists():
|
||||
agent_count = Agent.objects.filter(site__client=client).count()
|
||||
if agent_count > 0:
|
||||
return notify_error(
|
||||
f"Cannot delete {client} while {agents.count()} agents exist in it. Move the agents to another client first."
|
||||
f"Cannot delete {client} while {agent_count} agents exist in it. Move the agents to another client first."
|
||||
)
|
||||
|
||||
client.delete()
|
||||
return Response(f"{client.client} was deleted!")
|
||||
return Response(f"{client.name} was deleted!")
|
||||
|
||||
|
||||
class GetClientTree(APIView):
|
||||
def get(self, request):
|
||||
clients = Client.objects.all()
|
||||
return Response(ClientTreeSerializer(clients, many=True).data)
|
||||
|
||||
|
||||
class GetAddSites(APIView):
|
||||
@@ -91,126 +90,42 @@ class GetAddSites(APIView):
|
||||
sites = Site.objects.all()
|
||||
return Response(SiteSerializer(sites, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
name = request.data["name"].strip()
|
||||
serializer = SiteSerializer(
|
||||
data={"name": name, "client": request.data["client"]},
|
||||
context={"clientpk": request.data["client"]},
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
@api_view(["POST"])
|
||||
def add_site(request):
|
||||
client = Client.objects.get(client=request.data["client"].strip())
|
||||
site = request.data["site"].strip()
|
||||
|
||||
if not validate_name(site):
|
||||
content = {"error": "Site name cannot contain the | character"}
|
||||
return Response(content, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if Site.objects.filter(client=client).filter(site=site):
|
||||
content = {"error": f"Site {site} already exists"}
|
||||
return Response(content, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
try:
|
||||
Site(client=client, site=site).save()
|
||||
except DataError:
|
||||
content = {"error": "Site name too long (max 255 chars)"}
|
||||
return Response(content, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
def edit_site(request):
|
||||
new_name = request.data["name"].strip()
|
||||
class GetUpdateDeleteSite(APIView):
|
||||
def put(self, request, pk):
|
||||
|
||||
if not validate_name(new_name):
|
||||
err = "Site name cannot contain the | character"
|
||||
return Response(err, status=status.HTTP_400_BAD_REQUEST)
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
serializer = SiteSerializer(instance=site, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
client = get_object_or_404(Client, client=request.data["client"])
|
||||
site = Site.objects.filter(client=client).filter(site=request.data["site"]).get()
|
||||
return Response("ok")
|
||||
|
||||
agents = Agent.objects.filter(client=client.client).filter(site=site.site)
|
||||
def delete(self, request, pk):
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
if site.client.sites.count() == 1:
|
||||
return notify_error(f"A client must have at least 1 site.")
|
||||
|
||||
site.site = new_name
|
||||
site.save(update_fields=["site"])
|
||||
agent_count = Agent.objects.filter(site=site).count()
|
||||
|
||||
for agent in agents:
|
||||
agent.site = new_name
|
||||
agent.save(update_fields=["site"])
|
||||
if agent_count > 0:
|
||||
return notify_error(
|
||||
f"Cannot delete {site.name} while {agent_count} agents exist in it. Move the agents to another site first."
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@api_view(["DELETE"])
|
||||
def delete_site(request):
|
||||
client = get_object_or_404(Client, client=request.data["client"])
|
||||
if client.sites.count() == 1:
|
||||
return notify_error(f"A client must have at least 1 site.")
|
||||
|
||||
site = Site.objects.filter(client=client).filter(site=request.data["site"]).get()
|
||||
agents = Agent.objects.filter(client=client.client).filter(site=site.site)
|
||||
|
||||
if agents.exists():
|
||||
return notify_error(
|
||||
f"Cannot delete {site} while {agents.count()} agents exist in it. Move the agents to another site first."
|
||||
)
|
||||
|
||||
site.delete()
|
||||
return Response(f"{site} was deleted!")
|
||||
|
||||
|
||||
@api_view()
|
||||
# for vue
|
||||
def list_clients(request):
|
||||
clients = Client.objects.all()
|
||||
return Response(ClientSerializer(clients, many=True).data)
|
||||
|
||||
|
||||
@api_view()
|
||||
# for vue
|
||||
def list_sites(request):
|
||||
sites = Site.objects.all()
|
||||
return Response(TreeSerializer(sites, many=True).data)
|
||||
|
||||
|
||||
@api_view()
|
||||
def load_tree(request):
|
||||
clients = Client.objects.all()
|
||||
new = {}
|
||||
|
||||
for x in clients:
|
||||
b = []
|
||||
|
||||
sites = Site.objects.filter(client=x)
|
||||
for i in sites:
|
||||
|
||||
if i.has_maintenanace_mode_agents:
|
||||
b.append(f"{i.site}|{i.pk}|warning")
|
||||
elif i.has_failing_checks:
|
||||
b.append(f"{i.site}|{i.pk}|negative")
|
||||
else:
|
||||
b.append(f"{i.site}|{i.pk}|black")
|
||||
|
||||
if x.has_maintenanace_mode_agents:
|
||||
new[f"{x.client}|{x.pk}|warning"] = b
|
||||
elif x.has_failing_checks:
|
||||
new[f"{x.client}|{x.pk}|negative"] = b
|
||||
else:
|
||||
new[f"{x.client}|{x.pk}|black"] = b
|
||||
|
||||
return Response(new)
|
||||
|
||||
|
||||
@api_view()
|
||||
def load_clients(request):
|
||||
clients = Client.objects.all()
|
||||
new = {}
|
||||
|
||||
for x in clients:
|
||||
b = []
|
||||
|
||||
sites = Site.objects.filter(client=x)
|
||||
for i in sites:
|
||||
b.append(i.site)
|
||||
new[x.client] = b
|
||||
|
||||
return Response(new)
|
||||
site.delete()
|
||||
return Response(f"{site.name} was deleted!")
|
||||
|
||||
|
||||
class AgentDeployment(APIView):
|
||||
@@ -221,8 +136,8 @@ class AgentDeployment(APIView):
|
||||
def post(self, request):
|
||||
from knox.models import AuthToken
|
||||
|
||||
client = get_object_or_404(Client, client=request.data["client"])
|
||||
site = get_object_or_404(Site, client=client, site=request.data["site"])
|
||||
client = get_object_or_404(Client, pk=request.data["client"])
|
||||
site = get_object_or_404(Site, pk=request.data["site"])
|
||||
|
||||
expires = dt.datetime.strptime(
|
||||
request.data["expires"], "%Y-%m-%d %H:%M"
|
||||
@@ -277,7 +192,7 @@ class GenerateAgent(APIView):
|
||||
if not os.path.exists(go_bin):
|
||||
return notify_error("Missing golang")
|
||||
|
||||
api = f"{request.scheme}://{request.get_host()}"
|
||||
api = f"https://{request.get_host()}"
|
||||
inno = (
|
||||
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||
if d.arch == "64"
|
||||
@@ -285,8 +200,8 @@ class GenerateAgent(APIView):
|
||||
)
|
||||
download_url = settings.DL_64 if d.arch == "64" else settings.DL_32
|
||||
|
||||
client = d.client.client.replace(" ", "").lower()
|
||||
site = d.site.site.replace(" ", "").lower()
|
||||
client = d.client.name.replace(" ", "").lower()
|
||||
site = d.site.name.replace(" ", "").lower()
|
||||
client = re.sub(r"([^a-zA-Z0-9]+)", "", client)
|
||||
site = re.sub(r"([^a-zA-Z0-9]+)", "", site)
|
||||
|
||||
@@ -308,7 +223,7 @@ class GenerateAgent(APIView):
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"build",
|
||||
f"-ldflags=\"-X 'main.Inno={inno}'",
|
||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
||||
f"-X 'main.Api={api}'",
|
||||
f"-X 'main.Client={d.client.pk}'",
|
||||
f"-X 'main.Site={d.site.pk}'",
|
||||
@@ -367,4 +282,4 @@ class GenerateAgent(APIView):
|
||||
response = HttpResponse()
|
||||
response["Content-Disposition"] = f"attachment; filename={file_name}"
|
||||
response["X-Accel-Redirect"] = f"/private/exe/{file_name}"
|
||||
return response
|
||||
return response
|
||||
|
||||
@@ -56,8 +56,8 @@ func downloadAgent(filepath string) (err error) {
|
||||
func main() {
|
||||
|
||||
debugLog := flag.String("log", "", "Verbose output")
|
||||
localSalt := flag.String("local-salt", "", "Use local salt minion")
|
||||
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
||||
silent := flag.Bool("silent", false, "Do not popup any message boxes during installation")
|
||||
cert := flag.String("cert", "", "Path to ca.pem")
|
||||
timeout := flag.String("timeout", "", "Timeout for subprocess calls")
|
||||
flag.Parse()
|
||||
@@ -78,35 +78,35 @@ func main() {
|
||||
}
|
||||
|
||||
if debug {
|
||||
cmdArgs = append(cmdArgs, "--log", "DEBUG")
|
||||
cmdArgs = append(cmdArgs, "-log", "debug")
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*localSalt)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "--local-salt", *localSalt)
|
||||
if *silent {
|
||||
cmdArgs = append(cmdArgs, "-silent")
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*localMesh)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "--local-mesh", *localMesh)
|
||||
cmdArgs = append(cmdArgs, "-local-mesh", *localMesh)
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*cert)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "--cert", *cert)
|
||||
cmdArgs = append(cmdArgs, "-cert", *cert)
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*timeout)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "--timeout", *timeout)
|
||||
cmdArgs = append(cmdArgs, "-timeout", *timeout)
|
||||
}
|
||||
|
||||
if Rdp == "1" {
|
||||
cmdArgs = append(cmdArgs, "--rdp")
|
||||
cmdArgs = append(cmdArgs, "-rdp")
|
||||
}
|
||||
|
||||
if Ping == "1" {
|
||||
cmdArgs = append(cmdArgs, "--ping")
|
||||
cmdArgs = append(cmdArgs, "-ping")
|
||||
}
|
||||
|
||||
if Power == "1" {
|
||||
cmdArgs = append(cmdArgs, "--power")
|
||||
cmdArgs = append(cmdArgs, "-power")
|
||||
}
|
||||
|
||||
if debug {
|
||||
@@ -133,7 +133,7 @@ func main() {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
time.Sleep(20 * time.Second)
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Println("Installation starting.")
|
||||
cmd := exec.Command(tacrmm, cmdArgs...)
|
||||
|
||||
@@ -36,7 +36,7 @@ If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
|
||||
Start-Process -FilePath $OutPath\$output -ArgumentList ('/VERYSILENT /SUPPRESSMSGBOXES') -Wait
|
||||
write-host ('Extracting...')
|
||||
Start-Sleep -s 20
|
||||
Start-Sleep -s 10
|
||||
Start-Process -FilePath "C:\Program Files\TacticalAgent\tacticalrmm.exe" -ArgumentList $installArgs -Wait
|
||||
exit 0
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ from django.conf import settings
|
||||
from core.models import CoreSettings
|
||||
from .helpers import get_auth_token
|
||||
import asyncio
|
||||
import ssl
|
||||
import websockets
|
||||
import json
|
||||
|
||||
@@ -11,15 +10,14 @@ import json
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self):
|
||||
token = get_auth_token(
|
||||
self.mesh_settings.mesh_username, self.mesh_settings.mesh_token
|
||||
)
|
||||
async def websocket_call(self, mesh_settings):
|
||||
token = get_auth_token(mesh_settings.mesh_username, mesh_settings.mesh_token)
|
||||
|
||||
if settings.MESH_WS_URL:
|
||||
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
|
||||
if settings.DOCKER_BUILD:
|
||||
site = mesh_settings.mesh_site.replace("https", "ws")
|
||||
uri = f"{site}:443/control.ashx?auth={token}"
|
||||
else:
|
||||
site = self.mesh_settings.mesh_site.replace("https", "wss")
|
||||
site = mesh_settings.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
|
||||
async with websockets.connect(uri) as websocket:
|
||||
@@ -45,5 +43,5 @@ class Command(BaseCommand):
|
||||
break
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.mesh_settings = CoreSettings.objects.first()
|
||||
asyncio.get_event_loop().run_until_complete(self.websocket_call())
|
||||
mesh_settings = CoreSettings.objects.first()
|
||||
asyncio.get_event_loop().run_until_complete(self.websocket_call(mesh_settings))
|
||||
|
||||
@@ -1,53 +1,90 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
from core.models import CoreSettings
|
||||
from .helpers import get_auth_token
|
||||
import asyncio
|
||||
import websockets
|
||||
import json
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self):
|
||||
|
||||
token = get_auth_token(
|
||||
self.mesh_settings.mesh_username, self.mesh_settings.mesh_token
|
||||
)
|
||||
|
||||
if settings.MESH_WS_URL:
|
||||
uri = f"{settings.MESH_WS_URL}/control.ashx?auth={token}"
|
||||
else:
|
||||
site = self.mesh_settings.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
|
||||
async with websockets.connect(uri) as websocket:
|
||||
|
||||
# Get Device groups to see if it exists
|
||||
await websocket.send(json.dumps({"action": "meshes"}))
|
||||
|
||||
async for message in websocket:
|
||||
response = json.loads(message)
|
||||
if response["action"] == "meshes":
|
||||
|
||||
# If no meshes are present
|
||||
if not response["meshes"]:
|
||||
await websocket.send(
|
||||
json.dumps(
|
||||
{
|
||||
"action": "createmesh",
|
||||
"meshname": "TacticalRMM",
|
||||
"meshtype": 2,
|
||||
"responseid": "python",
|
||||
}
|
||||
)
|
||||
)
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
self.mesh_settings = CoreSettings.objects.first()
|
||||
asyncio.get_event_loop().run_until_complete(self.websocket_call())
|
||||
self.stdout.write("Initial Mesh Central setup complete")
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
from core.models import CoreSettings
|
||||
from .helpers import get_auth_token
|
||||
import asyncio
|
||||
import websockets
|
||||
import json
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Sets up initial mesh central configuration"
|
||||
|
||||
async def websocket_call(self, mesh_settings):
|
||||
|
||||
token = get_auth_token(mesh_settings.mesh_username, mesh_settings.mesh_token)
|
||||
|
||||
if settings.DOCKER_BUILD:
|
||||
site = mesh_settings.mesh_site.replace("https", "ws")
|
||||
uri = f"{site}:443/control.ashx?auth={token}"
|
||||
else:
|
||||
site = mesh_settings.mesh_site.replace("https", "wss")
|
||||
uri = f"{site}/control.ashx?auth={token}"
|
||||
|
||||
async with websockets.connect(uri) as websocket:
|
||||
|
||||
# Get Device groups to see if it exists
|
||||
await websocket.send(json.dumps({"action": "meshes"}))
|
||||
|
||||
async for message in websocket:
|
||||
response = json.loads(message)
|
||||
if response["action"] == "meshes":
|
||||
|
||||
# If no meshes are present
|
||||
if not response["meshes"]:
|
||||
await websocket.send(
|
||||
json.dumps(
|
||||
{
|
||||
"action": "createmesh",
|
||||
"meshname": "TacticalRMM",
|
||||
"meshtype": 2,
|
||||
"responseid": "python",
|
||||
}
|
||||
)
|
||||
)
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
mesh_settings = CoreSettings.objects.first()
|
||||
|
||||
try:
|
||||
# Check for Mesh Username
|
||||
if (
|
||||
not mesh_settings.mesh_username
|
||||
or settings.MESH_USERNAME != mesh_settings.mesh_username
|
||||
):
|
||||
mesh_settings.mesh_username = settings.MESH_USERNAME
|
||||
|
||||
# Check for Mesh Site
|
||||
if (
|
||||
not mesh_settings.mesh_site
|
||||
or settings.MESH_SITE != mesh_settings.mesh_site
|
||||
):
|
||||
mesh_settings.mesh_site = settings.MESH_SITE
|
||||
|
||||
# Check for Mesh Token
|
||||
if (
|
||||
not mesh_settings.mesh_token
|
||||
or settings.MESH_TOKEN_KEY != mesh_settings.mesh_token
|
||||
):
|
||||
mesh_settings.mesh_token = settings.MESH_TOKEN_KEY
|
||||
|
||||
mesh_settings.save()
|
||||
|
||||
except AttributeError:
|
||||
self.stdout.write(
|
||||
"Mesh Setup was skipped because the configuration wasn't available. Needs to be setup manually."
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
asyncio.get_event_loop().run_until_complete(
|
||||
self.websocket_call(mesh_settings)
|
||||
)
|
||||
self.stdout.write("Initial Mesh Central setup complete")
|
||||
except websockets.exceptions.ConnectionClosedError:
|
||||
self.stdout.write(
|
||||
"Unable to connect to MeshCentral. Please verify it is online and the configuration is correct in the settings."
|
||||
)
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from time import sleep
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
@@ -15,22 +13,10 @@ class Command(BaseCommand):
|
||||
help = "Collection of tasks to run after updating the rmm, after migrations"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
|
||||
if not os.path.exists("/usr/local/bin/goversioninfo"):
|
||||
self.stdout.write(self.style.ERROR("*" * 100))
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"ERROR: New update script available. Delete this one and re-download."
|
||||
)
|
||||
)
|
||||
self.stdout.write("\n")
|
||||
sys.exit(1)
|
||||
|
||||
# 10-16-2020 changed the type of the agent's 'disks' model field
|
||||
# from a dict of dicts, to a list of disks in the golang agent
|
||||
# the following will convert dicts to lists for agent's still on the python agent
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "disks")
|
||||
for agent in agents:
|
||||
if agent.disks is not None and isinstance(agent.disks, dict):
|
||||
new = []
|
||||
@@ -43,88 +29,17 @@ class Command(BaseCommand):
|
||||
self.style.SUCCESS(f"Migrated disks on {agent.hostname}")
|
||||
)
|
||||
|
||||
# sync modules. split into chunks of 60 agents to not overload the salt master
|
||||
agents = Agent.objects.all()
|
||||
online = [i.salt_id for i in agents if i.status == "online"]
|
||||
|
||||
chunks = (online[i : i + 60] for i in range(0, len(online), 60))
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Syncing agent modules..."))
|
||||
for chunk in chunks:
|
||||
r = Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
||||
sleep(5)
|
||||
|
||||
has_old_config = True
|
||||
rmm_conf = "/etc/nginx/sites-available/rmm.conf"
|
||||
if os.path.exists(rmm_conf):
|
||||
with open(rmm_conf) as f:
|
||||
for line in f:
|
||||
if "location" and "builtin" in line:
|
||||
has_old_config = False
|
||||
break
|
||||
|
||||
if has_old_config:
|
||||
new_conf = """
|
||||
location /builtin/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://rmm.yourwebsite.com";
|
||||
alias /srv/salt/scripts/;
|
||||
}
|
||||
"""
|
||||
|
||||
after_this = """
|
||||
location /saltscripts/ {
|
||||
internal;
|
||||
add_header "Access-Control-Allow-Origin" "https://rmm.yourwebsite.com";
|
||||
alias /srv/salt/scripts/userdefined/;
|
||||
}
|
||||
"""
|
||||
|
||||
self.stdout.write(self.style.ERROR("*" * 100))
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"WARNING: A recent update requires you to manually edit your nginx config"
|
||||
)
|
||||
)
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR("Please add the following location block to ")
|
||||
+ self.style.WARNING(rmm_conf)
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(new_conf))
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"You can paste the above right after the following block that's already in your nginx config:"
|
||||
)
|
||||
)
|
||||
self.stdout.write(after_this)
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"Make sure to replace rmm.yourwebsite.com with your domain"
|
||||
)
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.ERROR("After editing, restart nginx with the command ")
|
||||
+ self.style.WARNING("sudo systemctl restart nginx")
|
||||
)
|
||||
self.stdout.write("\n")
|
||||
self.stdout.write(self.style.ERROR("*" * 100))
|
||||
input("Press Enter to continue...")
|
||||
|
||||
# install go
|
||||
if not os.path.exists("/usr/local/rmmgo/"):
|
||||
self.stdout.write(self.style.SUCCESS("Installing golang"))
|
||||
subprocess.run("sudo mkdir -p /usr/local/rmmgo", shell=True)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
r = subprocess.run(
|
||||
f"wget https://golang.org/dl/go1.15.linux-amd64.tar.gz -P {tmpdir}",
|
||||
f"wget https://golang.org/dl/go1.15.5.linux-amd64.tar.gz -P {tmpdir}",
|
||||
shell=True,
|
||||
)
|
||||
|
||||
gotar = os.path.join(tmpdir, "go1.15.linux-amd64.tar.gz")
|
||||
gotar = os.path.join(tmpdir, "go1.15.5.linux-amd64.tar.gz")
|
||||
|
||||
subprocess.run(f"tar -xzf {gotar} -C {tmpdir}", shell=True)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user