Compare commits
538 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a11616aace | ||
|
|
883acadbc4 | ||
|
|
f51e6a3fcf | ||
|
|
371e081c0d | ||
|
|
6f41b3bf1c | ||
|
|
c1d74a6c9e | ||
|
|
24eaa6796e | ||
|
|
1521e3b620 | ||
|
|
b6ff38dd62 | ||
|
|
44ea9ac03c | ||
|
|
4c2701505b | ||
|
|
9022fe18da | ||
|
|
63be349f8b | ||
|
|
c40256a290 | ||
|
|
33ecb8ec52 | ||
|
|
82d62a0015 | ||
|
|
6278240526 | ||
|
|
8c2dc5f57d | ||
|
|
2e5868778a | ||
|
|
a10b8dab9b | ||
|
|
92f4f7ef59 | ||
|
|
31257bd5cb | ||
|
|
bb6510862f | ||
|
|
797ecf0780 | ||
|
|
f9536dc67f | ||
|
|
e8b95362af | ||
|
|
bdc39ad4ec | ||
|
|
4a202c5585 | ||
|
|
3c6b321f73 | ||
|
|
cb29b52799 | ||
|
|
7e48015a54 | ||
|
|
9ed3abf932 | ||
|
|
61762828a3 | ||
|
|
59beabe5ac | ||
|
|
0b30faa28c | ||
|
|
d12d49b93f | ||
|
|
f1d64d275a | ||
|
|
d094eeeb03 | ||
|
|
be25af658e | ||
|
|
794f52c229 | ||
|
|
5d4dc4ed4c | ||
|
|
e49d97b898 | ||
|
|
b6b4f1ba62 | ||
|
|
653d476716 | ||
|
|
48b855258c | ||
|
|
c7efdaf5f9 | ||
|
|
22523ed3d3 | ||
|
|
33c602dd61 | ||
|
|
e2a5509b76 | ||
|
|
61a0fa1a89 | ||
|
|
a35bd8292b | ||
|
|
06c8ae60e3 | ||
|
|
deeab1f845 | ||
|
|
da81c4c987 | ||
|
|
d180f1b2d5 | ||
|
|
526135629c | ||
|
|
6b9493e057 | ||
|
|
9bb33d2afc | ||
|
|
7421138533 | ||
|
|
d0800c52bb | ||
|
|
913fcd4df2 | ||
|
|
83322cc725 | ||
|
|
5944501feb | ||
|
|
17e3603d3d | ||
|
|
95be43ae47 | ||
|
|
feb91cbbaa | ||
|
|
79409af168 | ||
|
|
5dbfb64822 | ||
|
|
5e7ebf5e69 | ||
|
|
e73215ca74 | ||
|
|
a5f123b9ce | ||
|
|
ac058e9675 | ||
|
|
371b764d1d | ||
|
|
66d7172e09 | ||
|
|
99d3a8a749 | ||
|
|
db5ff372a4 | ||
|
|
3fe83f81be | ||
|
|
669e638fd6 | ||
|
|
f1f999f3b6 | ||
|
|
6f3b6fa9ce | ||
|
|
938f945301 | ||
|
|
e3efb2aad6 | ||
|
|
1e678c0d78 | ||
|
|
a59c111140 | ||
|
|
a8b2a31bed | ||
|
|
37402f9ee8 | ||
|
|
e7b5ecb40f | ||
|
|
c817ef04b9 | ||
|
|
f52b18439c | ||
|
|
1e03c628d5 | ||
|
|
71fb39db1f | ||
|
|
bcfb3726b0 | ||
|
|
c6e9e29671 | ||
|
|
1bfefcce39 | ||
|
|
22488e93e1 | ||
|
|
244b89f035 | ||
|
|
1f9a241b94 | ||
|
|
03641aae42 | ||
|
|
a2bdd113cc | ||
|
|
a92e2f3c7b | ||
|
|
97766b3a57 | ||
|
|
9ef4c3bb06 | ||
|
|
d82f0cd757 | ||
|
|
5f529e2af4 | ||
|
|
beadd9e02b | ||
|
|
72543789cb | ||
|
|
5789439fa9 | ||
|
|
f549126bcf | ||
|
|
7197548bad | ||
|
|
241fde783c | ||
|
|
2b872cd1f4 | ||
|
|
a606fb4d1d | ||
|
|
9f9c6be38e | ||
|
|
01ee524049 | ||
|
|
af9cb65338 | ||
|
|
8aa11c580b | ||
|
|
ada627f444 | ||
|
|
a7b6d338c3 | ||
|
|
9f00538b97 | ||
|
|
a085015282 | ||
|
|
0b9c220fbb | ||
|
|
0e3d04873d | ||
|
|
b7578d939f | ||
|
|
b5c28de03f | ||
|
|
e17d25c156 | ||
|
|
c25dc1b99c | ||
|
|
a493a574bd | ||
|
|
4284493dce | ||
|
|
25059de8e1 | ||
|
|
1731b05ad0 | ||
|
|
e80dc663ac | ||
|
|
39988a4c2f | ||
|
|
415bff303a | ||
|
|
a65eb62a54 | ||
|
|
03b2982128 | ||
|
|
bff0527857 | ||
|
|
f3b7634254 | ||
|
|
6a9593c0b9 | ||
|
|
edb785b8e5 | ||
|
|
26d757b50a | ||
|
|
535079ee87 | ||
|
|
ac380c29c1 | ||
|
|
3fd212f26c | ||
|
|
04a3abc651 | ||
|
|
6caf85ddd1 | ||
|
|
16e4071508 | ||
|
|
69e7c4324b | ||
|
|
a1c4a8cbe5 | ||
|
|
e37f6cfda7 | ||
|
|
989c804409 | ||
|
|
7345bc3c82 | ||
|
|
69bee35700 | ||
|
|
598e24df7c | ||
|
|
0ae669201e | ||
|
|
f52a8a4642 | ||
|
|
9c40b61ef2 | ||
|
|
72dabcda83 | ||
|
|
161a06dbcc | ||
|
|
8ed3d4e70c | ||
|
|
a4223ccc8a | ||
|
|
ca85923855 | ||
|
|
52bfe7c493 | ||
|
|
4786bd0cbe | ||
|
|
cadab160ff | ||
|
|
6a7f17b2b0 | ||
|
|
4986a4d775 | ||
|
|
903af0c2cf | ||
|
|
3282fa803c | ||
|
|
67cc47608d | ||
|
|
0411704b8b | ||
|
|
1de85b2c69 | ||
|
|
33b012f29d | ||
|
|
1357584df3 | ||
|
|
e15809e271 | ||
|
|
0da1950427 | ||
|
|
e590b921be | ||
|
|
09462692f5 | ||
|
|
c1d1b5f762 | ||
|
|
6b9c87b858 | ||
|
|
485b6eb904 | ||
|
|
057630bdb5 | ||
|
|
6b02873b30 | ||
|
|
0fa0fc6d6b | ||
|
|
339ec07465 | ||
|
|
cd2e798fea | ||
|
|
d5cadbeae2 | ||
|
|
8046a3ccae | ||
|
|
bf91d60b31 | ||
|
|
539c047ec8 | ||
|
|
290c18fa87 | ||
|
|
98c46f5e57 | ||
|
|
f8bd5b5b4e | ||
|
|
816d32edad | ||
|
|
8453835c05 | ||
|
|
9328c356c8 | ||
|
|
89e3c1fc94 | ||
|
|
67e54cd15d | ||
|
|
278ea24786 | ||
|
|
aba1662631 | ||
|
|
61eeb60c19 | ||
|
|
5e9a8f4806 | ||
|
|
4cb274e9bc | ||
|
|
8b9b1a6a35 | ||
|
|
2655964113 | ||
|
|
188bad061b | ||
|
|
3af4c329aa | ||
|
|
6c13395f7d | ||
|
|
77b32ba360 | ||
|
|
91dba291ac | ||
|
|
a6bc293640 | ||
|
|
53882d6e5f | ||
|
|
d68adfbf10 | ||
|
|
498a392d7f | ||
|
|
740f6c05db | ||
|
|
d810ce301f | ||
|
|
5ef6a14d24 | ||
|
|
a13f6f1e68 | ||
|
|
d2d0f1aaee | ||
|
|
e64c72cc89 | ||
|
|
9ab915a08b | ||
|
|
e26fbf0328 | ||
|
|
d9a52c4a2a | ||
|
|
7b2ec90de9 | ||
|
|
d310bf8bbf | ||
|
|
2abc6cc939 | ||
|
|
56d4e694a2 | ||
|
|
5f002c9cdc | ||
|
|
759daf4b4a | ||
|
|
3a8d9568e3 | ||
|
|
ff22a9d94a | ||
|
|
a6e42d5374 | ||
|
|
a2f74e0488 | ||
|
|
ee44240569 | ||
|
|
d0828744a2 | ||
|
|
6e2e576b29 | ||
|
|
bf61e27f8a | ||
|
|
c441c30b46 | ||
|
|
0e741230ea | ||
|
|
1bfe9ac2db | ||
|
|
6812e72348 | ||
|
|
b6449d2f5b | ||
|
|
7e3ea20dce | ||
|
|
c9d6fe9dcd | ||
|
|
4a649a6b8b | ||
|
|
8fef184963 | ||
|
|
69583ca3c0 | ||
|
|
6038a68e91 | ||
|
|
fa8bd8db87 | ||
|
|
18b4f0ed0f | ||
|
|
461f9d66c9 | ||
|
|
2155103c7a | ||
|
|
c9a6839c45 | ||
|
|
9fbe331a80 | ||
|
|
a56389c4ce | ||
|
|
64656784cb | ||
|
|
6eff2c181e | ||
|
|
1aa48c6d62 | ||
|
|
c7ca1a346d | ||
|
|
fa0ec7b502 | ||
|
|
768438c136 | ||
|
|
9badea0b3c | ||
|
|
43263a1650 | ||
|
|
821e02dc75 | ||
|
|
ed011ecf28 | ||
|
|
d861de4c2f | ||
|
|
3a3b2449dc | ||
|
|
d2614406ca | ||
|
|
0798d098ae | ||
|
|
dab7ddc2bb | ||
|
|
081a96e281 | ||
|
|
a7dd881d79 | ||
|
|
8134d5e24d | ||
|
|
ba6756cd45 | ||
|
|
5d8fce21ac | ||
|
|
e7e4a5bcd4 | ||
|
|
55f33357ea | ||
|
|
90568bba31 | ||
|
|
5d6e2dc2e4 | ||
|
|
6bb33f2559 | ||
|
|
ced92554ed | ||
|
|
dff3383158 | ||
|
|
bf03c89cb2 | ||
|
|
9f1484bbef | ||
|
|
3899680e26 | ||
|
|
6bb2eb25a1 | ||
|
|
f8dfd8edb3 | ||
|
|
042be624a3 | ||
|
|
6bafa4c79a | ||
|
|
58b42fac5c | ||
|
|
3b47b9558a | ||
|
|
ccf9636296 | ||
|
|
96942719f2 | ||
|
|
69cf1c1adc | ||
|
|
d77cba40b8 | ||
|
|
968735b555 | ||
|
|
ceed9d29eb | ||
|
|
41329039ee | ||
|
|
f68b102ca8 | ||
|
|
fa36e54298 | ||
|
|
b689f57435 | ||
|
|
885fa0ff56 | ||
|
|
303acb72a3 | ||
|
|
b2a46cd0cd | ||
|
|
5a5ecb3ee3 | ||
|
|
60b4ab6a63 | ||
|
|
e4b096a08f | ||
|
|
343f55049b | ||
|
|
6b46025261 | ||
|
|
5ea503f23e | ||
|
|
ce95f9ac23 | ||
|
|
c3fb87501b | ||
|
|
dc6a343612 | ||
|
|
3a61053957 | ||
|
|
570129e4d4 | ||
|
|
3315c7045f | ||
|
|
5ae50e242c | ||
|
|
bbcf449719 | ||
|
|
aab10f7184 | ||
|
|
8d43488cb8 | ||
|
|
0a9c647e19 | ||
|
|
40db5d4aa8 | ||
|
|
9254532baa | ||
|
|
7abed47cf0 | ||
|
|
5c6ac758f7 | ||
|
|
007677962c | ||
|
|
9c4aeab64a | ||
|
|
48e6fc0efe | ||
|
|
c8be713d11 | ||
|
|
ae887c8648 | ||
|
|
5daac2531b | ||
|
|
68def00327 | ||
|
|
67e7976710 | ||
|
|
35747e937e | ||
|
|
fb439787a4 | ||
|
|
8fa368f473 | ||
|
|
c84a9d07b1 | ||
|
|
7fb46cdfc4 | ||
|
|
52985e5ddc | ||
|
|
e880935dc3 | ||
|
|
cc22b1bca5 | ||
|
|
49a5128918 | ||
|
|
fedc7dcb44 | ||
|
|
cd32b20215 | ||
|
|
15cd9832c4 | ||
|
|
f25d4e4553 | ||
|
|
12d1c82b63 | ||
|
|
aebe855078 | ||
|
|
3416a71ebd | ||
|
|
94b3fea528 | ||
|
|
ad1a9ecca1 | ||
|
|
715accfb8a | ||
|
|
a8e03c6138 | ||
|
|
f69446b648 | ||
|
|
eedfbe5846 | ||
|
|
153351cc9f | ||
|
|
1b1eec40a7 | ||
|
|
763877541a | ||
|
|
1fad7d72a2 | ||
|
|
51ea2ea879 | ||
|
|
d77a478bf0 | ||
|
|
e413c0264a | ||
|
|
f88e7f898c | ||
|
|
d07bd4a6db | ||
|
|
fb34c099d5 | ||
|
|
1d2ee56a15 | ||
|
|
86665f7f09 | ||
|
|
0d2b4af986 | ||
|
|
dc2b2eeb9f | ||
|
|
e5dbb66d53 | ||
|
|
3474b1c471 | ||
|
|
3886de5b7c | ||
|
|
2b3cec06b3 | ||
|
|
8536754d14 | ||
|
|
1f36235801 | ||
|
|
a4194b14f9 | ||
|
|
2dcc629d9d | ||
|
|
98ddadc6bc | ||
|
|
f6e47b7383 | ||
|
|
f073ddc906 | ||
|
|
3e00631925 | ||
|
|
9b7ac58562 | ||
|
|
f242ddd801 | ||
|
|
c129886fe2 | ||
|
|
f577e814cf | ||
|
|
c860a0cedd | ||
|
|
ae7e28e492 | ||
|
|
90a63234ad | ||
|
|
14bca52e8f | ||
|
|
2f3c3361cf | ||
|
|
4034134055 | ||
|
|
c04f94cb7b | ||
|
|
fd1bbc7925 | ||
|
|
ff69bed394 | ||
|
|
d6e8c5146f | ||
|
|
9a04cf99d7 | ||
|
|
86e7c11e71 | ||
|
|
361cc08faa | ||
|
|
70dc771052 | ||
|
|
c14873a799 | ||
|
|
bba5abd74b | ||
|
|
a224e79c1f | ||
|
|
c305d98186 | ||
|
|
7c5a473e71 | ||
|
|
5e0f5d1eed | ||
|
|
238b269bc4 | ||
|
|
0ad121b9d2 | ||
|
|
7088acd9fd | ||
|
|
e0a900d4b6 | ||
|
|
a0fe2f0c7d | ||
|
|
d5b9bc2f26 | ||
|
|
584254e6ca | ||
|
|
a2963ed7bb | ||
|
|
2a3c2e133d | ||
|
|
3e7dcb2755 | ||
|
|
faeec00b39 | ||
|
|
eeed81392f | ||
|
|
95dce9e992 | ||
|
|
502bd2a191 | ||
|
|
17ac92a9d0 | ||
|
|
ba028cde0c | ||
|
|
6e751e7a9b | ||
|
|
948b56d0e6 | ||
|
|
4bf2dc9ece | ||
|
|
125823f8ab | ||
|
|
24d33397e9 | ||
|
|
2c553825f4 | ||
|
|
198c485e9a | ||
|
|
0138505507 | ||
|
|
5d50dcc600 | ||
|
|
7bdd8c4626 | ||
|
|
fc82c35f0c | ||
|
|
426ebad300 | ||
|
|
1afe61c593 | ||
|
|
c20751829b | ||
|
|
a3b8ee8392 | ||
|
|
156c0fe7f6 | ||
|
|
216f7a38cf | ||
|
|
fd04dc10d4 | ||
|
|
d39bdce926 | ||
|
|
c6e01245b0 | ||
|
|
c168ee7ba4 | ||
|
|
7575253000 | ||
|
|
c28c1efbb1 | ||
|
|
e6aa2c3b78 | ||
|
|
ab7c481f83 | ||
|
|
84ad1c352d | ||
|
|
e9aad39ac9 | ||
|
|
c3444a87bc | ||
|
|
67b224b340 | ||
|
|
bded14d36b | ||
|
|
73fa0b6631 | ||
|
|
2f07337588 | ||
|
|
da163d44e7 | ||
|
|
56fbf8ae0c | ||
|
|
327eb4b39b | ||
|
|
ae7873a7e3 | ||
|
|
9a5f01813b | ||
|
|
0605a3b725 | ||
|
|
09c535f159 | ||
|
|
7fb11da5df | ||
|
|
9c9a46499a | ||
|
|
6fca60261e | ||
|
|
00537b32ef | ||
|
|
8636758a90 | ||
|
|
e39dfbd624 | ||
|
|
6e048b2a12 | ||
|
|
f9657599c2 | ||
|
|
42ae3bba9b | ||
|
|
2fd56a4bfe | ||
|
|
824bcc5603 | ||
|
|
4fbb613aaa | ||
|
|
9eb45270f2 | ||
|
|
75c61c53e8 | ||
|
|
2688a47436 | ||
|
|
fe3bf4b189 | ||
|
|
456cb5ebb2 | ||
|
|
3d91d574b4 | ||
|
|
54876c5499 | ||
|
|
d256585284 | ||
|
|
bd8f100b43 | ||
|
|
44f05f2dcc | ||
|
|
43f7f82bdc | ||
|
|
e902f63211 | ||
|
|
129f68e194 | ||
|
|
4b37fe12d7 | ||
|
|
6de79922c5 | ||
|
|
e1a9791f44 | ||
|
|
81795f51c6 | ||
|
|
68dfb11155 | ||
|
|
39fc1beb89 | ||
|
|
fe0ddec0f9 | ||
|
|
9b52b4efd9 | ||
|
|
e90e527603 | ||
|
|
a510854741 | ||
|
|
8935ce4ccf | ||
|
|
f9edc9059a | ||
|
|
db8917a769 | ||
|
|
c2d70cc1c2 | ||
|
|
3b13c7f9ce | ||
|
|
b7150d8026 | ||
|
|
041830a7f8 | ||
|
|
a18daf0195 | ||
|
|
5d3dfceb22 | ||
|
|
c82855e732 | ||
|
|
956f156018 | ||
|
|
9b13c35e7f | ||
|
|
bc8e637bba | ||
|
|
f03c28c906 | ||
|
|
e4b1f39fdc | ||
|
|
4780af910c | ||
|
|
d61ce5c524 | ||
|
|
20ab151f4d | ||
|
|
8a7be7543a | ||
|
|
3f806aec9c | ||
|
|
6c273b32bb | ||
|
|
b986f9d6ee | ||
|
|
c98cca6b7b | ||
|
|
fbec78ede5 | ||
|
|
c1d9a2d1f1 | ||
|
|
8a10036f32 | ||
|
|
924a3aec0e | ||
|
|
3b3ac31541 | ||
|
|
e0cb2f9d0f | ||
|
|
549b4edb59 | ||
|
|
67c912aca2 | ||
|
|
a74dde5d9e | ||
|
|
f7bcd24726 | ||
|
|
337c900770 | ||
|
|
e83e73ead4 | ||
|
|
9ec2f6b64d | ||
|
|
f970592efe | ||
|
|
7592c11e99 | ||
|
|
759b05e137 | ||
|
|
42ebd9ffce | ||
|
|
bc0fc33966 | ||
|
|
f4aab16e39 | ||
|
|
e91425287c | ||
|
|
f05908f570 |
28
.devcontainer/.env.example
Normal file
28
.devcontainer/.env.example
Normal file
@@ -0,0 +1,28 @@
|
||||
COMPOSE_PROJECT_NAME=trmm
|
||||
|
||||
IMAGE_REPO=tacticalrmm/
|
||||
VERSION=latest
|
||||
|
||||
# tactical credentials (Used to login to dashboard)
|
||||
TRMM_USER=tactical
|
||||
TRMM_PASS=tactical
|
||||
|
||||
# dns settings
|
||||
APP_HOST=rmm.example.com
|
||||
API_HOST=api.example.com
|
||||
MESH_HOST=mesh.example.com
|
||||
|
||||
# mesh settings
|
||||
MESH_USER=tactical
|
||||
MESH_PASS=tactical
|
||||
MONGODB_USER=mongouser
|
||||
MONGODB_PASSWORD=mongopass
|
||||
|
||||
# database settings
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASS=postgrespass
|
||||
|
||||
# DEV SETTINGS
|
||||
APP_PORT=80
|
||||
API_PORT=80
|
||||
HTTP_PROTOCOL=https
|
||||
28
.devcontainer/api.dockerfile
Normal file
28
.devcontainer/api.dockerfile
Normal file
@@ -0,0 +1,28 @@
|
||||
FROM python:3.8-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV WORKSPACE_DIR /workspace
|
||||
ENV TACTICAL_USER tactical
|
||||
ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy Go Files
|
||||
COPY --from=golang:1.16 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
|
||||
# Copy Dev python reqs
|
||||
COPY ./requirements.txt /
|
||||
|
||||
# Copy Docker Entrypoint
|
||||
COPY ./entrypoint.sh /
|
||||
RUN chmod +x /entrypoint.sh
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
WORKDIR ${WORKSPACE_DIR}/api/tacticalrmm
|
||||
19
.devcontainer/docker-compose.debug.yml
Normal file
19
.devcontainer/docker-compose.debug.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 --nothreading --noreload"]
|
||||
ports:
|
||||
- 8000:8000
|
||||
- 5678:5678
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-backend
|
||||
211
.devcontainer/docker-compose.yml
Normal file
211
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,211 @@
|
||||
version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-api"]
|
||||
environment:
|
||||
API_PORT: ${API_PORT}
|
||||
ports:
|
||||
- "8000:${API_PORT}"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-backend
|
||||
|
||||
app-dev:
|
||||
image: node:12-alpine
|
||||
restart: always
|
||||
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
working_dir: /workspace/web
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
ports:
|
||||
- "8080:${APP_PORT}"
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-frontend
|
||||
|
||||
# nats
|
||||
nats-dev:
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
API_HOST: ${API_HOST}
|
||||
API_PORT: ${API_PORT}
|
||||
DEV: 1
|
||||
ports:
|
||||
- "4222:4222"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- ${API_HOST}
|
||||
- tactical-nats
|
||||
|
||||
# meshcentral container
|
||||
meshcentral-dev:
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
MESH_PASS: ${MESH_PASS}
|
||||
MONGODB_USER: ${MONGODB_USER}
|
||||
MONGODB_PASSWORD: ${MONGODB_PASSWORD}
|
||||
NGINX_HOST_IP: 172.21.0.20
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-meshcentral
|
||||
- ${MESH_HOST}
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- mesh-data-dev:/home/node/app/meshcentral-data
|
||||
depends_on:
|
||||
- mongodb-dev
|
||||
|
||||
# mongodb container for meshcentral
|
||||
mongodb-dev:
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
environment:
|
||||
MONGO_INITDB_ROOT_USERNAME: ${MONGODB_USER}
|
||||
MONGO_INITDB_ROOT_PASSWORD: ${MONGODB_PASSWORD}
|
||||
MONGO_INITDB_DATABASE: meshcentral
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-mongodb
|
||||
volumes:
|
||||
- mongo-dev-data:/data/db
|
||||
|
||||
# postgres database for api service
|
||||
postgres-dev:
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_DB: tacticalrmm
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASS}
|
||||
volumes:
|
||||
- postgres-data-dev:/var/lib/postgresql/data
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-postgres
|
||||
|
||||
# redis container for celery tasks
|
||||
redis-dev:
|
||||
restart: always
|
||||
image: redis:6.0-alpine
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-redis
|
||||
|
||||
init-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
restart: on-failure
|
||||
command: ["tactical-init-dev"]
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASS: ${POSTGRES_PASS}
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
MESH_USER: ${MESH_USER}
|
||||
TRMM_USER: ${TRMM_USER}
|
||||
TRMM_PASS: ${TRMM_PASS}
|
||||
HTTP_PROTOCOL: ${HTTP_PROTOCOL}
|
||||
APP_PORT: ${APP_PORT}
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- meshcentral-dev
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
|
||||
# container for celery worker service
|
||||
celery-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celery-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for celery beat service
|
||||
celerybeat-dev:
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-celerybeat-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
- dev
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
nginx-dev:
|
||||
# container for tactical reverse proxy
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
APP_HOST: ${APP_HOST}
|
||||
API_HOST: ${API_HOST}
|
||||
MESH_HOST: ${MESH_HOST}
|
||||
CERT_PUB_KEY: ${CERT_PUB_KEY}
|
||||
CERT_PRIV_KEY: ${CERT_PRIV_KEY}
|
||||
APP_PORT: ${APP_PORT}
|
||||
API_PORT: ${API_PORT}
|
||||
networks:
|
||||
dev:
|
||||
ipv4_address: 172.21.0.20
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
|
||||
volumes:
|
||||
tactical-data-dev:
|
||||
postgres-data-dev:
|
||||
mongo-dev-data:
|
||||
mesh-data-dev:
|
||||
|
||||
networks:
|
||||
dev:
|
||||
driver: bridge
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.21.0.0/24
|
||||
168
.devcontainer/entrypoint.sh
Normal file
168
.devcontainer/entrypoint.sh
Normal file
@@ -0,0 +1,168 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
: "${TRMM_USER:=tactical}"
|
||||
: "${TRMM_PASS:=tactical}"
|
||||
: "${POSTGRES_HOST:=tactical-postgres}"
|
||||
: "${POSTGRES_PORT:=5432}"
|
||||
: "${POSTGRES_USER:=tactical}"
|
||||
: "${POSTGRES_PASS:=tactical}"
|
||||
: "${POSTGRES_DB:=tacticalrmm}"
|
||||
: "${MESH_CONTAINER:=tactical-meshcentral}"
|
||||
: "${MESH_USER:=meshcentral}"
|
||||
: "${MESH_PASS:=meshcentralpass}"
|
||||
: "${MESH_HOST:=tactical-meshcentral}"
|
||||
: "${API_HOST:=tactical-backend}"
|
||||
: "${APP_HOST:=tactical-frontend}"
|
||||
: "${REDIS_HOST:=tactical-redis}"
|
||||
: "${HTTP_PROTOCOL:=http}"
|
||||
: "${APP_PORT:=8080}"
|
||||
: "${API_PORT:=8000}"
|
||||
|
||||
# Add python venv to path
|
||||
export PATH="${VIRTUAL_ENV}/bin:$PATH"
|
||||
|
||||
function check_tactical_ready {
|
||||
sleep 15
|
||||
until [ -f "${TACTICAL_READY_FILE}" ]; do
|
||||
echo "waiting for init container to finish install or update..."
|
||||
sleep 10
|
||||
done
|
||||
}
|
||||
|
||||
function django_setup {
|
||||
until (echo > /dev/tcp/"${POSTGRES_HOST}"/"${POSTGRES_PORT}") &> /dev/null; do
|
||||
echo "waiting for postgresql container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
until (echo > /dev/tcp/"${MESH_CONTAINER}"/443) &> /dev/null; do
|
||||
echo "waiting for meshcentral container to be ready..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
echo "setting up django environment"
|
||||
|
||||
# configure django settings
|
||||
MESH_TOKEN="$(cat ${TACTICAL_DIR}/tmp/mesh_token)"
|
||||
|
||||
DJANGO_SEKRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 80 | head -n 1)
|
||||
|
||||
localvars="$(cat << EOF
|
||||
SECRET_KEY = '${DJANGO_SEKRET}'
|
||||
|
||||
DEBUG = True
|
||||
|
||||
DOCKER_BUILD = True
|
||||
|
||||
CERT_FILE = '/opt/tactical/certs/fullchain.pem'
|
||||
KEY_FILE = '/opt/tactical/certs/privkey.pem'
|
||||
|
||||
SCRIPTS_DIR = '${WORKSPACE_DIR}/scripts'
|
||||
|
||||
ALLOWED_HOSTS = ['${API_HOST}', '*']
|
||||
|
||||
ADMIN_URL = 'admin/'
|
||||
|
||||
CORS_ORIGIN_ALLOW_ALL = True
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.postgresql',
|
||||
'NAME': '${POSTGRES_DB}',
|
||||
'USER': '${POSTGRES_USER}',
|
||||
'PASSWORD': '${POSTGRES_PASS}',
|
||||
'HOST': '${POSTGRES_HOST}',
|
||||
'PORT': '${POSTGRES_PORT}',
|
||||
}
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DATETIME_FORMAT': '%b-%d-%Y - %H:%M',
|
||||
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'knox.auth.TokenAuthentication',
|
||||
),
|
||||
}
|
||||
|
||||
if not DEBUG:
|
||||
REST_FRAMEWORK.update({
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
)
|
||||
})
|
||||
|
||||
MESH_USERNAME = '${MESH_USER}'
|
||||
MESH_SITE = 'https://${MESH_HOST}'
|
||||
MESH_TOKEN_KEY = '${MESH_TOKEN}'
|
||||
REDIS_HOST = '${REDIS_HOST}'
|
||||
EOF
|
||||
)"
|
||||
|
||||
echo "${localvars}" > ${WORKSPACE_DIR}/api/tacticalrmm/tacticalrmm/local_settings.py
|
||||
|
||||
# run migrations and init scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py migrate --no-input
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py collectstatic --no-input
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_db_setup
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py initial_mesh_setup
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_chocos
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py load_community_scripts
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py reload_nats
|
||||
|
||||
# create super user
|
||||
echo "from accounts.models import User; User.objects.create_superuser('${TRMM_USER}', 'admin@example.com', '${TRMM_PASS}') if not User.objects.filter(username='${TRMM_USER}').exists() else 0;" | python manage.py shell
|
||||
}
|
||||
|
||||
if [ "$1" = 'tactical-init-dev' ]; then
|
||||
|
||||
# make directories if they don't exist
|
||||
mkdir -p "${TACTICAL_DIR}/tmp"
|
||||
|
||||
test -f "${TACTICAL_READY_FILE}" && rm "${TACTICAL_READY_FILE}"
|
||||
|
||||
# setup Python virtual env and install dependencies
|
||||
! test -e "${VIRTUAL_ENV}" && python -m venv --copies ${VIRTUAL_ENV}
|
||||
"${VIRTUAL_ENV}"/bin/pip install --no-cache-dir -r /requirements.txt
|
||||
|
||||
django_setup
|
||||
|
||||
# create .env file for frontend
|
||||
webenv="$(cat << EOF
|
||||
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
APP_URL = https://${APP_HOST}
|
||||
EOF
|
||||
)"
|
||||
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
||||
|
||||
# chown everything to tactical user
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${TACTICAL_DIR}"
|
||||
|
||||
# create install ready file
|
||||
su -c "echo 'tactical-init' > ${TACTICAL_READY_FILE}" "${TACTICAL_USER}"
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-api' ]; then
|
||||
cp "${WORKSPACE_DIR}"/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
chmod +x /usr/local/bin/goversioninfo
|
||||
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}"
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celery-dev' ]; then
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm worker -l debug
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||
check_tactical_ready
|
||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug
|
||||
fi
|
||||
46
.devcontainer/requirements.txt
Normal file
46
.devcontainer/requirements.txt
Normal file
@@ -0,0 +1,46 @@
|
||||
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||
amqp==5.0.5
|
||||
asgiref==3.3.1
|
||||
asyncio-nats-client==0.11.4
|
||||
billiard==3.6.3.0
|
||||
celery==5.0.5
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.5
|
||||
chardet==4.0.0
|
||||
cryptography==3.4.4
|
||||
decorator==4.4.2
|
||||
Django==3.1.6
|
||||
django-cors-headers==3.7.0
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.2
|
||||
future==0.18.2
|
||||
kombu==5.0.2
|
||||
loguru==0.5.3
|
||||
msgpack==1.0.2
|
||||
packaging==20.8
|
||||
psycopg2-binary==2.8.6
|
||||
pycparser==2.20
|
||||
pycryptodome==3.10.1
|
||||
pyotp==2.6.0
|
||||
pyparsing==2.4.7
|
||||
pytz==2021.1
|
||||
qrcode==6.1
|
||||
redis==3.5.3
|
||||
requests==2.25.1
|
||||
six==1.15.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.52.0
|
||||
urllib3==1.26.3
|
||||
validators==0.18.2
|
||||
vine==5.0.0
|
||||
websockets==8.1
|
||||
zipp==3.4.0
|
||||
black
|
||||
Werkzeug
|
||||
django-extensions
|
||||
coverage
|
||||
coveralls
|
||||
model_bakery
|
||||
mkdocs
|
||||
mkdocs-material
|
||||
pymdown-extensions
|
||||
25
.dockerignore
Normal file
25
.dockerignore
Normal file
@@ -0,0 +1,25 @@
|
||||
**/__pycache__
|
||||
**/.classpath
|
||||
**/.dockerignore
|
||||
**/.env
|
||||
**/.git
|
||||
**/.gitignore
|
||||
**/.project
|
||||
**/.settings
|
||||
**/.toolstarget
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/*.*proj.user
|
||||
**/*.dbmdl
|
||||
**/*.jfm
|
||||
**/azds.yaml
|
||||
**/charts
|
||||
**/docker-compose*
|
||||
**/Dockerfile*
|
||||
**/node_modules
|
||||
**/npm-debug.log
|
||||
**/obj
|
||||
**/secrets.dev.yaml
|
||||
**/values.dev.yaml
|
||||
**/env
|
||||
README.md
|
||||
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: wh1te909
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
78
.github/workflows/docker-build-push.yml
vendored
Normal file
78
.github/workflows/docker-build-push.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
name: Publish Tactical Docker Images
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
jobs:
|
||||
docker:
|
||||
name: Build and Push Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get Github Tag
|
||||
id: prep
|
||||
run: |
|
||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/v}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and Push Tactical Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical:${{ steps.prep.outputs.version }},tacticalrmm/tactical:latest
|
||||
|
||||
- name: Build and Push Tactical MeshCentral Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-meshcentral/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-meshcentral:${{ steps.prep.outputs.version }},tacticalrmm/tactical-meshcentral:latest
|
||||
|
||||
- name: Build and Push Tactical NATS Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nats/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nats:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nats:latest
|
||||
|
||||
- name: Build and Push Tactical Frontend Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-frontend/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-frontend:${{ steps.prep.outputs.version }},tacticalrmm/tactical-frontend:latest
|
||||
|
||||
- name: Build and Push Tactical Nginx Image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
pull: true
|
||||
file: ./docker/containers/tactical-nginx/dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: tacticalrmm/tactical-nginx:${{ steps.prep.outputs.version }},tacticalrmm/tactical-nginx:latest
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -42,4 +42,6 @@ api/tacticalrmm/accounts/management/commands/random_data.py
|
||||
versioninfo.go
|
||||
resource.syso
|
||||
htmlcov/
|
||||
docker-compose.dev.yml
|
||||
docs/.vuepress/dist
|
||||
nats-rmm.conf
|
||||
|
||||
43
.travis.yml
43
.travis.yml
@@ -1,43 +0,0 @@
|
||||
dist: focal
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- language: node_js
|
||||
node_js: "12"
|
||||
before_install:
|
||||
- cd web
|
||||
install:
|
||||
- npm install
|
||||
script:
|
||||
- npm run test:unit
|
||||
|
||||
- language: python
|
||||
python: "3.8"
|
||||
services:
|
||||
- redis
|
||||
|
||||
addons:
|
||||
postgresql: "13"
|
||||
apt:
|
||||
packages:
|
||||
- postgresql-13
|
||||
|
||||
before_script:
|
||||
- psql -c 'CREATE DATABASE travisci;' -U postgres
|
||||
- psql -c "CREATE USER travisci WITH PASSWORD 'travisSuperSekret6645';" -U postgres
|
||||
- psql -c 'GRANT ALL PRIVILEGES ON DATABASE travisci TO travisci;' -U postgres
|
||||
- psql -c 'ALTER USER travisci CREATEDB;' -U postgres
|
||||
|
||||
before_install:
|
||||
- cd api/tacticalrmm
|
||||
|
||||
install:
|
||||
- pip install --no-cache-dir --upgrade pip
|
||||
- pip install --no-cache-dir setuptools==49.6.0 wheel==0.35.1
|
||||
- pip install --no-cache-dir -r requirements.txt -r requirements-test.txt
|
||||
|
||||
script:
|
||||
- coverage run manage.py test -v 2
|
||||
|
||||
after_success:
|
||||
- coveralls
|
||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -14,6 +14,20 @@
|
||||
"0.0.0.0:8000"
|
||||
],
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Django: Docker Remote Attach",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"port": 5678,
|
||||
"host": "localhost",
|
||||
"preLaunchTask": "docker debug",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}/api/tacticalrmm",
|
||||
"remoteRoot": "/workspace/api/tacticalrmm"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
21
.vscode/settings.json
vendored
21
.vscode/settings.json
vendored
@@ -2,7 +2,7 @@
|
||||
"python.pythonPath": "api/tacticalrmm/env/bin/python",
|
||||
"python.languageServer": "Pylance",
|
||||
"python.analysis.extraPaths": [
|
||||
"api/tacticalrmm"
|
||||
"api/tacticalrmm",
|
||||
],
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.formatting.provider": "black",
|
||||
@@ -41,4 +41,23 @@
|
||||
"**/*.zip": true
|
||||
},
|
||||
},
|
||||
"go.useLanguageServer": true,
|
||||
"[go]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": false,
|
||||
},
|
||||
"editor.snippetSuggestions": "none",
|
||||
},
|
||||
"[go.mod]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true,
|
||||
},
|
||||
},
|
||||
"gopls": {
|
||||
"usePlaceholders": true,
|
||||
"completeUnimported": true,
|
||||
"staticcheck": true,
|
||||
}
|
||||
}
|
||||
23
.vscode/tasks.json
vendored
Normal file
23
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "docker debug",
|
||||
"type": "shell",
|
||||
"command": "docker-compose",
|
||||
"args": [
|
||||
"-p",
|
||||
"trmm",
|
||||
"-f",
|
||||
".devcontainer/docker-compose.yml",
|
||||
"-f",
|
||||
".devcontainer/docker-compose.debug.yml",
|
||||
"up",
|
||||
"-d",
|
||||
"--build"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
30
README.md
30
README.md
@@ -1,13 +1,12 @@
|
||||
# Tactical RMM
|
||||
|
||||
[](https://travis-ci.com/wh1te909/tacticalrmm)
|
||||
[](https://dev.azure.com/dcparsi/Tactical%20RMM/_build/latest?definitionId=4&branchName=develop)
|
||||
[](https://coveralls.io/github/wh1te909/tacticalrmm?branch=develop)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://github.com/python/black)
|
||||
|
||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang, as well as the [SaltStack](https://github.com/saltstack/salt) api and [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
@@ -37,7 +36,7 @@ Demo database resets every hour. Alot of features are disabled for obvious reaso
|
||||
## Installation
|
||||
|
||||
### Requirements
|
||||
- VPS with 4GB ram (an install script is provided for Ubuntu Server 20.04)
|
||||
- VPS with 2GB ram (an install script is provided for Ubuntu Server 20.04 / Debian 10)
|
||||
- A domain you own with at least 3 subdomains
|
||||
- Google Authenticator app (2 factor is NOT optional)
|
||||
|
||||
@@ -63,7 +62,7 @@ sudo ufw default allow outgoing
|
||||
sudo ufw allow ssh
|
||||
sudo ufw allow http
|
||||
sudo ufw allow https
|
||||
sudo ufw allow proto tcp from any to any port 4505,4506
|
||||
sudo ufw allow proto tcp from any to any port 4222
|
||||
sudo ufw enable && sudo ufw reload
|
||||
```
|
||||
|
||||
@@ -78,7 +77,7 @@ Create A record ```mesh.tacticalrmm.com``` for meshcentral
|
||||
Download the install script and run it
|
||||
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/install.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/install.sh
|
||||
chmod +x install.sh
|
||||
./install.sh
|
||||
```
|
||||
@@ -92,17 +91,17 @@ chmod +x install.sh
|
||||
From the app's dashboard, choose Agents > Install Agent to generate an installer.
|
||||
|
||||
## Updating
|
||||
Download and run [update.sh](./update.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh))
|
||||
Download and run [update.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh)
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/update.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/update.sh
|
||||
chmod +x update.sh
|
||||
./update.sh
|
||||
```
|
||||
|
||||
## Backup
|
||||
Download [backup.sh](./backup.sh) ([Raw](https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh))
|
||||
Download [backup.sh](https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh)
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/backup.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/backup.sh
|
||||
```
|
||||
Change the postgres username and password at the top of the file (you can find them in `/rmm/api/tacticalrmm/tacticalrmm/local_settings.py` under the DATABASES section)
|
||||
|
||||
@@ -121,7 +120,7 @@ Copy backup file to new server
|
||||
|
||||
Download the restore script, and edit the postgres username/password at the top of the file. Same instructions as above in the backup steps.
|
||||
```
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/develop/restore.sh
|
||||
wget https://raw.githubusercontent.com/wh1te909/tacticalrmm/master/restore.sh
|
||||
```
|
||||
|
||||
Run the restore script, passing it the backup tar file as the first argument
|
||||
@@ -129,14 +128,3 @@ Run the restore script, passing it the backup tar file as the first argument
|
||||
chmod +x restore.sh
|
||||
./restore.sh rmm-backup-xxxxxxx.tar
|
||||
```
|
||||
|
||||
## Using another ssl certificate
|
||||
During the install you can opt out of using the Let's Encrypt certificate. If you do this the script will create a self-signed certificate, so that https continues to work. You can replace the certificates in /certs/example.com/(privkey.pem | pubkey.pem) with your own.
|
||||
|
||||
If you are migrating from Let's Encrypt to another certificate provider, you can create the /certs directory and copy your certificates there. It is recommended to do this because this directory will be backed up with the backup script provided. Then modify the nginx configurations to use your new certificates
|
||||
|
||||
The cert that is generated is a wildcard certificate and is used in the nginx configurations: rmm.conf, api.conf, and mesh.conf. If you can't generate wildcard certificates you can create a cert for each subdomain and configure each nginx configuration file to use its own certificate. Then restart nginx:
|
||||
|
||||
```
|
||||
sudo systemctl restart nginx
|
||||
```
|
||||
@@ -1,457 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
import psutil
|
||||
import os
|
||||
import datetime
|
||||
import zlib
|
||||
import json
|
||||
import base64
|
||||
import wmi
|
||||
import win32evtlog
|
||||
import win32con
|
||||
import win32evtlogutil
|
||||
import winerror
|
||||
from time import sleep
|
||||
import requests
|
||||
import subprocess
|
||||
import random
|
||||
import platform
|
||||
|
||||
ARCH = "64" if platform.machine().endswith("64") else "32"
|
||||
PROGRAM_DIR = os.path.join(os.environ["ProgramFiles"], "TacticalAgent")
|
||||
TAC_RMM = os.path.join(PROGRAM_DIR, "tacticalrmm.exe")
|
||||
NSSM = os.path.join(PROGRAM_DIR, "nssm.exe" if ARCH == "64" else "nssm-x86.exe")
|
||||
TEMP_DIR = os.path.join(os.environ["WINDIR"], "Temp")
|
||||
SYS_DRIVE = os.environ["SystemDrive"]
|
||||
PY_BIN = os.path.join(SYS_DRIVE, "\\salt", "bin", "python.exe")
|
||||
SALT_CALL = os.path.join(SYS_DRIVE, "\\salt", "salt-call.bat")
|
||||
|
||||
|
||||
def get_services():
|
||||
# see https://github.com/wh1te909/tacticalrmm/issues/38
|
||||
# for why I am manually implementing the svc.as_dict() method of psutil
|
||||
ret = []
|
||||
for svc in psutil.win_service_iter():
|
||||
i = {}
|
||||
try:
|
||||
i["display_name"] = svc.display_name()
|
||||
i["binpath"] = svc.binpath()
|
||||
i["username"] = svc.username()
|
||||
i["start_type"] = svc.start_type()
|
||||
i["status"] = svc.status()
|
||||
i["pid"] = svc.pid()
|
||||
i["name"] = svc.name()
|
||||
i["description"] = svc.description()
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
ret.append(i)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def run_python_script(filename, timeout, script_type="userdefined"):
|
||||
# no longer used in agent version 0.11.0
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
if script_type == "userdefined":
|
||||
__salt__["cp.get_file"](f"salt://scripts/userdefined/{filename}", file_path)
|
||||
else:
|
||||
__salt__["cp.get_file"](f"salt://scripts/{filename}", file_path)
|
||||
|
||||
return __salt__["cmd.run_all"](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def run_script(filepath, filename, shell, timeout, args=[], bg=False):
|
||||
if shell == "powershell" or shell == "cmd":
|
||||
if args:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath,
|
||||
args=" ".join(map(lambda x: f'"{x}"', args)),
|
||||
shell=shell,
|
||||
timeout=timeout,
|
||||
bg=bg,
|
||||
)
|
||||
else:
|
||||
return __salt__["cmd.script"](
|
||||
source=filepath, shell=shell, timeout=timeout, bg=bg
|
||||
)
|
||||
|
||||
elif shell == "python":
|
||||
file_path = os.path.join(TEMP_DIR, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
__salt__["cp.get_file"](filepath, file_path)
|
||||
|
||||
salt_cmd = "cmd.run_bg" if bg else "cmd.run_all"
|
||||
|
||||
if args:
|
||||
a = " ".join(map(lambda x: f'"{x}"', args))
|
||||
cmd = f"{PY_BIN} {file_path} {a}"
|
||||
return __salt__[salt_cmd](cmd, timeout=timeout)
|
||||
else:
|
||||
return __salt__[salt_cmd](f"{PY_BIN} {file_path}", timeout=timeout)
|
||||
|
||||
|
||||
def uninstall_agent():
|
||||
remove_exe = os.path.join(PROGRAM_DIR, "unins000.exe")
|
||||
__salt__["cmd.run_bg"]([remove_exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def update_salt():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "updatesalt" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
cmd = [TAC_RMM, "-m", "updatesalt"]
|
||||
p = Popen(
|
||||
cmd,
|
||||
stdin=PIPE,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
close_fds=True,
|
||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
||||
)
|
||||
return p.pid
|
||||
|
||||
|
||||
def run_manual_checks():
|
||||
__salt__["cmd.run_bg"]([TAC_RMM, "-m", "runchecks"])
|
||||
return "ok"
|
||||
|
||||
|
||||
def install_updates():
|
||||
for p in psutil.process_iter():
|
||||
with p.oneshot():
|
||||
if p.name() == "tacticalrmm.exe" and "winupdater" in p.cmdline():
|
||||
return "running"
|
||||
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "winupdater"])
|
||||
|
||||
|
||||
def _wait_for_service(svc, status, retries=10):
|
||||
attempts = 0
|
||||
while 1:
|
||||
try:
|
||||
service = psutil.win_service_get(svc)
|
||||
except psutil.NoSuchProcess:
|
||||
stat = "fail"
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
stat = service.status()
|
||||
if stat != status:
|
||||
attempts += 1
|
||||
sleep(5)
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts == 0 or attempts > retries:
|
||||
break
|
||||
|
||||
return stat
|
||||
|
||||
|
||||
def agent_update_v2(inno, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself (because we call it twice with salt run_bg)
|
||||
# so if more than 2 running, don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update_v2" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 20)) # don't flood the rmm
|
||||
|
||||
exe = os.path.join(TEMP_DIR, inno)
|
||||
|
||||
if os.path.exists(exe):
|
||||
try:
|
||||
os.remove(exe)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
ret = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=120)
|
||||
|
||||
tac = _wait_for_service(svc="tacticalagent", status="running")
|
||||
if tac != "running":
|
||||
subprocess.run([NSSM, "start", "tacticalagent"], timeout=30)
|
||||
|
||||
chk = _wait_for_service(svc="checkrunner", status="running")
|
||||
if chk != "running":
|
||||
subprocess.run([NSSM, "start", "checkrunner"], timeout=30)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update_v2(inno, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update_v2",
|
||||
f"inno={inno}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def agent_update(version, url):
|
||||
# make sure another instance of the update is not running
|
||||
# this function spawns 2 instances of itself so if more than 2 running,
|
||||
# don't continue as an update is already running
|
||||
count = 0
|
||||
for p in psutil.process_iter():
|
||||
try:
|
||||
with p.oneshot():
|
||||
if "win_agent.agent_update" in p.cmdline():
|
||||
count += 1
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if count > 2:
|
||||
return "already running"
|
||||
|
||||
sleep(random.randint(1, 60)) # don't flood the rmm
|
||||
try:
|
||||
r = requests.get(url, stream=True, timeout=600)
|
||||
except Exception:
|
||||
return "failed"
|
||||
|
||||
if r.status_code != 200:
|
||||
return "failed"
|
||||
|
||||
exe = os.path.join(TEMP_DIR, f"winagent-v{version}.exe")
|
||||
|
||||
with open(exe, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024):
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
del r
|
||||
|
||||
services = ("tacticalagent", "checkrunner")
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "stop", svc], timeout=120)
|
||||
|
||||
sleep(10)
|
||||
r = subprocess.run([exe, "/VERYSILENT", "/SUPPRESSMSGBOXES"], timeout=300)
|
||||
sleep(30)
|
||||
|
||||
for svc in services:
|
||||
subprocess.run([NSSM, "start", svc], timeout=120)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
def do_agent_update(version, url):
|
||||
return __salt__["cmd.run_bg"](
|
||||
[
|
||||
SALT_CALL,
|
||||
"win_agent.agent_update",
|
||||
f"version={version}",
|
||||
f"url={url}",
|
||||
"--local",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class SystemDetail:
|
||||
def __init__(self):
|
||||
self.c = wmi.WMI()
|
||||
self.comp_sys_prod = self.c.Win32_ComputerSystemProduct()
|
||||
self.comp_sys = self.c.Win32_ComputerSystem()
|
||||
self.memory = self.c.Win32_PhysicalMemory()
|
||||
self.os = self.c.Win32_OperatingSystem()
|
||||
self.base_board = self.c.Win32_BaseBoard()
|
||||
self.bios = self.c.Win32_BIOS()
|
||||
self.disk = self.c.Win32_DiskDrive()
|
||||
self.network_adapter = self.c.Win32_NetworkAdapter()
|
||||
self.network_config = self.c.Win32_NetworkAdapterConfiguration()
|
||||
self.desktop_monitor = self.c.Win32_DesktopMonitor()
|
||||
self.cpu = self.c.Win32_Processor()
|
||||
self.usb = self.c.Win32_USBController()
|
||||
|
||||
def get_all(self, obj):
|
||||
ret = []
|
||||
for i in obj:
|
||||
tmp = [
|
||||
{j: getattr(i, j)}
|
||||
for j in list(i.properties)
|
||||
if getattr(i, j) is not None
|
||||
]
|
||||
ret.append(tmp)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def system_info():
|
||||
info = SystemDetail()
|
||||
return {
|
||||
"comp_sys_prod": info.get_all(info.comp_sys_prod),
|
||||
"comp_sys": info.get_all(info.comp_sys),
|
||||
"mem": info.get_all(info.memory),
|
||||
"os": info.get_all(info.os),
|
||||
"base_board": info.get_all(info.base_board),
|
||||
"bios": info.get_all(info.bios),
|
||||
"disk": info.get_all(info.disk),
|
||||
"network_adapter": info.get_all(info.network_adapter),
|
||||
"network_config": info.get_all(info.network_config),
|
||||
"desktop_monitor": info.get_all(info.desktop_monitor),
|
||||
"cpu": info.get_all(info.cpu),
|
||||
"usb": info.get_all(info.usb),
|
||||
}
|
||||
|
||||
|
||||
def local_sys_info():
|
||||
return __salt__["cmd.run_bg"]([TAC_RMM, "-m", "sysinfo"])
|
||||
|
||||
|
||||
def get_procs():
|
||||
ret = []
|
||||
|
||||
# setup
|
||||
for proc in psutil.process_iter():
|
||||
with proc.oneshot():
|
||||
proc.cpu_percent(interval=None)
|
||||
|
||||
# need time for psutil to record cpu percent
|
||||
sleep(1)
|
||||
|
||||
for c, proc in enumerate(psutil.process_iter(), 1):
|
||||
x = {}
|
||||
with proc.oneshot():
|
||||
if proc.pid == 0 or not proc.name():
|
||||
continue
|
||||
|
||||
x["name"] = proc.name()
|
||||
x["cpu_percent"] = proc.cpu_percent(interval=None) / psutil.cpu_count()
|
||||
x["memory_percent"] = proc.memory_percent()
|
||||
x["pid"] = proc.pid
|
||||
x["ppid"] = proc.ppid()
|
||||
x["status"] = proc.status()
|
||||
x["username"] = proc.username()
|
||||
x["id"] = c
|
||||
|
||||
ret.append(x)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _compress_json(j):
|
||||
return {
|
||||
"wineventlog": base64.b64encode(
|
||||
zlib.compress(json.dumps(j).encode("utf-8", errors="ignore"))
|
||||
).decode("ascii", errors="ignore")
|
||||
}
|
||||
|
||||
|
||||
def get_eventlog(logtype, last_n_days):
|
||||
|
||||
start_time = datetime.datetime.now() - datetime.timedelta(days=last_n_days)
|
||||
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
|
||||
|
||||
status_dict = {
|
||||
win32con.EVENTLOG_AUDIT_FAILURE: "AUDIT_FAILURE",
|
||||
win32con.EVENTLOG_AUDIT_SUCCESS: "AUDIT_SUCCESS",
|
||||
win32con.EVENTLOG_INFORMATION_TYPE: "INFO",
|
||||
win32con.EVENTLOG_WARNING_TYPE: "WARNING",
|
||||
win32con.EVENTLOG_ERROR_TYPE: "ERROR",
|
||||
0: "INFO",
|
||||
}
|
||||
|
||||
computer = "localhost"
|
||||
hand = win32evtlog.OpenEventLog(computer, logtype)
|
||||
total = win32evtlog.GetNumberOfEventLogRecords(hand)
|
||||
log = []
|
||||
uid = 0
|
||||
done = False
|
||||
|
||||
try:
|
||||
while 1:
|
||||
events = win32evtlog.ReadEventLog(hand, flags, 0)
|
||||
for ev_obj in events:
|
||||
|
||||
uid += 1
|
||||
# return once total number of events reach or we'll be stuck in an infinite loop
|
||||
if uid >= total:
|
||||
done = True
|
||||
break
|
||||
|
||||
the_time = ev_obj.TimeGenerated.Format()
|
||||
time_obj = datetime.datetime.strptime(the_time, "%c")
|
||||
if time_obj < start_time:
|
||||
done = True
|
||||
break
|
||||
|
||||
computer = str(ev_obj.ComputerName)
|
||||
src = str(ev_obj.SourceName)
|
||||
evt_type = str(status_dict[ev_obj.EventType])
|
||||
evt_id = str(winerror.HRESULT_CODE(ev_obj.EventID))
|
||||
evt_category = str(ev_obj.EventCategory)
|
||||
record = str(ev_obj.RecordNumber)
|
||||
msg = (
|
||||
str(win32evtlogutil.SafeFormatMessage(ev_obj, logtype))
|
||||
.replace("<", "")
|
||||
.replace(">", "")
|
||||
)
|
||||
|
||||
event_dict = {
|
||||
"computer": computer,
|
||||
"source": src,
|
||||
"eventType": evt_type,
|
||||
"eventID": evt_id,
|
||||
"eventCategory": evt_category,
|
||||
"message": msg,
|
||||
"time": the_time,
|
||||
"record": record,
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
log.append(event_dict)
|
||||
|
||||
if done:
|
||||
break
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
win32evtlog.CloseEventLog(hand)
|
||||
return _compress_json(log)
|
||||
@@ -20,6 +20,5 @@ omit =
|
||||
*/urls.py
|
||||
*/tests.py
|
||||
*/test.py
|
||||
api/*.py
|
||||
checks/utils.py
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from rest_framework.authtoken.admin import TokenAdmin
|
||||
|
||||
from .models import User
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as djangotime
|
||||
from knox.models import AuthToken
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import pyotp
|
||||
import subprocess
|
||||
|
||||
import pyotp
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from accounts.models import User
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.auth.validators
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0002_auto_20200810_0544'),
|
||||
("accounts", "0002_auto_20200810_0544"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,24 +6,24 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0003_auto_20200922_1344'),
|
||||
("accounts", "0003_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,28 +6,28 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0004_auto_20201002_1257'),
|
||||
("accounts", "0004_auto_20201002_1257"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_by',
|
||||
model_name="user",
|
||||
name="created_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='created_time',
|
||||
model_name="user",
|
||||
name="created_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_by',
|
||||
model_name="user",
|
||||
name="modified_by",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='modified_time',
|
||||
model_name="user",
|
||||
name="modified_time",
|
||||
field=models.DateTimeField(auto_now=True, null=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-10 20:24
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -6,13 +6,13 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0007_update_agent_primary_key'),
|
||||
("accounts", "0007_update_agent_primary_key"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='dark_mode',
|
||||
model_name="user",
|
||||
name="dark_mode",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-10 17:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0008_user_dark_mode"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="show_community_scripts",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-14 01:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0009_user_show_community_scripts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="agent_dblclick_action",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("editagent", "Edit Agent"),
|
||||
("takecontrol", "Take Control"),
|
||||
("remotebg", "Remote Background"),
|
||||
],
|
||||
default="editagent",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.5 on 2021-01-18 09:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0010_user_agent_dblclick_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="default_agent_tbl_tab",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("server", "Servers"),
|
||||
("workstation", "Workstations"),
|
||||
("mixed", "Mixed"),
|
||||
],
|
||||
default="server",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,13 +1,32 @@
|
||||
from django.db import models
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db import models
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
AGENT_DBLCLICK_CHOICES = [
|
||||
("editagent", "Edit Agent"),
|
||||
("takecontrol", "Take Control"),
|
||||
("remotebg", "Remote Background"),
|
||||
]
|
||||
|
||||
AGENT_TBL_TAB_CHOICES = [
|
||||
("server", "Servers"),
|
||||
("workstation", "Workstations"),
|
||||
("mixed", "Mixed"),
|
||||
]
|
||||
|
||||
|
||||
class User(AbstractUser, BaseAuditModel):
|
||||
is_active = models.BooleanField(default=True)
|
||||
totp_key = models.CharField(max_length=50, null=True, blank=True)
|
||||
dark_mode = models.BooleanField(default=True)
|
||||
show_community_scripts = models.BooleanField(default=True)
|
||||
agent_dblclick_action = models.CharField(
|
||||
max_length=50, choices=AGENT_DBLCLICK_CHOICES, default="editagent"
|
||||
)
|
||||
default_agent_tbl_tab = models.CharField(
|
||||
max_length=50, choices=AGENT_TBL_TAB_CHOICES, default="server"
|
||||
)
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
import pyotp
|
||||
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.serializers import ModelSerializer, SerializerMethodField
|
||||
|
||||
from .models import User
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import override_settings
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from accounts.models import User
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class TestAccounts(TacticalTestCase):
|
||||
@@ -155,6 +156,33 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
data = {
|
||||
"id": self.john.pk,
|
||||
"username": "john",
|
||||
"email": "johndoe@xlawgaming.com",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_not_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
data = {
|
||||
"id": self.john.pk,
|
||||
"username": "john",
|
||||
"email": "johndoe@xlawgaming.com",
|
||||
"first_name": "John",
|
||||
"last_name": "Doe",
|
||||
}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_delete(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
r = self.client.delete(url)
|
||||
@@ -166,6 +194,19 @@ class GetUpdateDeleteUser(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_delete_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_delete_non_root_user(self):
|
||||
url = f"/accounts/{self.john.pk}/users/"
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
|
||||
class TestUserAction(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -184,6 +225,21 @@ class TestUserAction(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_post_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_post_non_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk, "password": "3ASDjh2345kJA!@#)#@__123"}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_put(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
@@ -195,12 +251,42 @@ class TestUserAction(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_darkmode(self):
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
user = User.objects.get(pk=self.john.pk)
|
||||
self.assertEqual(user.totp_key, "")
|
||||
|
||||
@override_settings(ROOT_USER="john")
|
||||
def test_put_non_root_user(self):
|
||||
url = "/accounts/users/reset/"
|
||||
data = {"id": self.john.pk}
|
||||
self.client.force_authenticate(user=self.alice)
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
def test_user_ui(self):
|
||||
url = "/accounts/users/ui/"
|
||||
data = {"dark_mode": False}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {"show_community_scripts": True}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data = {
|
||||
"userui": True,
|
||||
"agent_dblclick_action": "editagent",
|
||||
"default_agent_tbl_tab": "mixed",
|
||||
}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
@@ -1,23 +1,21 @@
|
||||
import pyotp
|
||||
|
||||
from django.contrib.auth import login
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.contrib.auth import login
|
||||
from django.db import IntegrityError
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from django.shortcuts import get_object_or_404
|
||||
from knox.views import LoginView as KnoxLoginView
|
||||
from rest_framework import status
|
||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from .models import User
|
||||
from agents.models import Agent
|
||||
from logs.models import AuditLog
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .serializers import UserSerializer, TOTPSetupSerializer
|
||||
from .models import User
|
||||
from .serializers import TOTPSetupSerializer, UserSerializer
|
||||
|
||||
|
||||
class CheckCreds(KnoxLoginView):
|
||||
@@ -60,7 +58,7 @@ class LoginView(KnoxLoginView):
|
||||
|
||||
if settings.DEBUG and token == "sekret":
|
||||
valid = True
|
||||
elif totp.verify(token, valid_window=1):
|
||||
elif totp.verify(token, valid_window=10):
|
||||
valid = True
|
||||
|
||||
if valid:
|
||||
@@ -108,6 +106,13 @@ class GetUpdateDeleteUser(APIView):
|
||||
def put(self, request, pk):
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
serializer = UserSerializer(instance=user, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
@@ -115,7 +120,15 @@ class GetUpdateDeleteUser(APIView):
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(User, pk=pk).delete()
|
||||
user = get_object_or_404(User, pk=pk)
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be deleted from the UI")
|
||||
|
||||
user.delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -124,8 +137,14 @@ class UserActions(APIView):
|
||||
|
||||
# reset password
|
||||
def post(self, request):
|
||||
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user.set_password(request.data["password"])
|
||||
user.save()
|
||||
|
||||
@@ -133,8 +152,14 @@ class UserActions(APIView):
|
||||
|
||||
# reset two factor token
|
||||
def put(self, request):
|
||||
|
||||
user = get_object_or_404(User, pk=request.data["id"])
|
||||
if (
|
||||
hasattr(settings, "ROOT_USER")
|
||||
and request.user != user
|
||||
and user.username == settings.ROOT_USER
|
||||
):
|
||||
return notify_error("The root user cannot be modified from the UI")
|
||||
|
||||
user.totp_key = ""
|
||||
user.save()
|
||||
|
||||
@@ -161,6 +186,18 @@ class TOTPSetup(APIView):
|
||||
class UserUI(APIView):
|
||||
def patch(self, request):
|
||||
user = request.user
|
||||
|
||||
if "dark_mode" in request.data.keys():
|
||||
user.dark_mode = request.data["dark_mode"]
|
||||
user.save(update_fields=["dark_mode"])
|
||||
|
||||
if "show_community_scripts" in request.data.keys():
|
||||
user.show_community_scripts = request.data["show_community_scripts"]
|
||||
user.save(update_fields=["show_community_scripts"])
|
||||
|
||||
if "userui" in request.data.keys():
|
||||
user.agent_dblclick_action = request.data["agent_dblclick_action"]
|
||||
user.default_agent_tbl_tab = request.data["default_agent_tbl_tab"]
|
||||
user.save(update_fields=["agent_dblclick_action", "default_agent_tbl_tab"])
|
||||
|
||||
return Response("ok")
|
||||
@@ -1,8 +1,7 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
||||
from .models import Agent, Note, RecoveryAction
|
||||
|
||||
admin.site.register(Agent)
|
||||
admin.site.register(AgentOutage)
|
||||
admin.site.register(RecoveryAction)
|
||||
admin.site.register(Note)
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import os
|
||||
import json
|
||||
|
||||
from model_bakery.recipe import Recipe, seq
|
||||
from itertools import cycle
|
||||
from django.utils import timezone as djangotime
|
||||
from django.conf import settings
|
||||
|
||||
from .models import Agent
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery.recipe import Recipe, foreign_key
|
||||
|
||||
|
||||
def generate_agent_id(hostname):
|
||||
@@ -16,6 +14,9 @@ def generate_agent_id(hostname):
|
||||
return f"{rand}-{hostname}"
|
||||
|
||||
|
||||
site = Recipe("clients.Site")
|
||||
|
||||
|
||||
def get_wmi_data():
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json")
|
||||
@@ -24,8 +25,10 @@ def get_wmi_data():
|
||||
|
||||
|
||||
agent = Recipe(
|
||||
Agent,
|
||||
"agents.Agent",
|
||||
site=foreign_key(site),
|
||||
hostname="DESKTOP-TEST123",
|
||||
version="1.3.0",
|
||||
monitoring_type=cycle(["workstation", "server"]),
|
||||
salt_id=generate_agent_id("DESKTOP-TEST123"),
|
||||
agent_id="71AHC-AA813-HH1BC-AAHH5-00013|DESKTOP-TEST123",
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Generated by Django 3.0.6 on 2020-05-31 01:23
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.0.7 on 2020-06-09 16:07
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.0.8 on 2020-08-09 05:31
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Generated by Django 3.1.1 on 2020-09-22 20:57
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,20 +1,26 @@
|
||||
# Generated by Django 3.1.2 on 2020-11-01 22:53
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0006_deployment'),
|
||||
('agents', '0020_auto_20201025_2129'),
|
||||
("clients", "0006_deployment"),
|
||||
("agents", "0020_auto_20201025_2129"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='site_link',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agents', to='clients.site'),
|
||||
model_name="agent",
|
||||
name="site_link",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="agents",
|
||||
to="clients.site",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,16 +6,16 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0022_update_site_primary_key'),
|
||||
("agents", "0022_update_site_primary_key"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='agent',
|
||||
name='client',
|
||||
model_name="agent",
|
||||
name="client",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='agent',
|
||||
name='site',
|
||||
model_name="agent",
|
||||
name="site",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,13 +6,13 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0023_auto_20201101_2312'),
|
||||
("agents", "0023_auto_20201101_2312"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='agent',
|
||||
old_name='site_link',
|
||||
new_name='site',
|
||||
model_name="agent",
|
||||
old_name="site_link",
|
||||
new_name="site",
|
||||
),
|
||||
]
|
||||
|
||||
27
api/tacticalrmm/agents/migrations/0025_auto_20201122_0407.py
Normal file
27
api/tacticalrmm/agents/migrations/0025_auto_20201122_0407.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-22 04:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0024_auto_20201101_2319"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="recoveryaction",
|
||||
name="mode",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("salt", "Salt"),
|
||||
("mesh", "Mesh"),
|
||||
("command", "Command"),
|
||||
("rpc", "Nats RPC"),
|
||||
],
|
||||
default="mesh",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
28
api/tacticalrmm/agents/migrations/0026_auto_20201125_2334.py
Normal file
28
api/tacticalrmm/agents/migrations/0026_auto_20201125_2334.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-25 23:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("agents", "0025_auto_20201122_0407"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="recoveryaction",
|
||||
name="mode",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("salt", "Salt"),
|
||||
("mesh", "Mesh"),
|
||||
("command", "Command"),
|
||||
("rpc", "Nats RPC"),
|
||||
("checkrunner", "Checkrunner"),
|
||||
],
|
||||
default="mesh",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-29 21:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0026_auto_20201125_2334'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='overdue_dashboard_alert',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
Normal file
23
api/tacticalrmm/agents/migrations/0028_auto_20210206_1534.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-06 15:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0027_agent_overdue_dashboard_alert'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agentoutage',
|
||||
name='outage_email_sent_time',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='agentoutage',
|
||||
name='outage_sms_sent_time',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
16
api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
Normal file
16
api/tacticalrmm/agents/migrations/0029_delete_agentoutage.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-10 21:56
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0028_auto_20210206_1534'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name='AgentOutage',
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
Normal file
18
api/tacticalrmm/agents/migrations/0030_agent_offline_time.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.6 on 2021-02-16 08:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0029_delete_agentoutage'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='offline_time',
|
||||
field=models.PositiveIntegerField(default=4),
|
||||
),
|
||||
]
|
||||
@@ -1,25 +1,27 @@
|
||||
import requests
|
||||
import datetime as dt
|
||||
import time
|
||||
import asyncio
|
||||
import base64
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Hash import SHA3_384
|
||||
from Crypto.Util.Padding import pad
|
||||
import validators
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import time
|
||||
from collections import Counter
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from distutils.version import LooseVersion
|
||||
from typing import Any, List, Union
|
||||
|
||||
from django.db import models
|
||||
import msgpack
|
||||
import validators
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Hash import SHA3_384
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Util.Padding import pad
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
from packaging import version as pyver
|
||||
|
||||
from core.models import CoreSettings, TZ_CHOICES
|
||||
from alerts.models import AlertTemplate
|
||||
from core.models import TZ_CHOICES, CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
@@ -50,6 +52,8 @@ class Agent(BaseAuditModel):
|
||||
mesh_node_id = models.CharField(null=True, blank=True, max_length=255)
|
||||
overdue_email_alert = models.BooleanField(default=False)
|
||||
overdue_text_alert = models.BooleanField(default=False)
|
||||
overdue_dashboard_alert = models.BooleanField(default=False)
|
||||
offline_time = models.PositiveIntegerField(default=4)
|
||||
overdue_time = models.PositiveIntegerField(default=30)
|
||||
check_interval = models.PositiveIntegerField(default=120)
|
||||
needs_reboot = models.BooleanField(default=False)
|
||||
@@ -75,6 +79,24 @@ class Agent(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
# get old agent if exists
|
||||
old_agent = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
# check if new agent has been create
|
||||
# or check if policy have changed on agent
|
||||
# or if site has changed on agent and if so generate-policies
|
||||
if (
|
||||
not old_agent
|
||||
or old_agent
|
||||
and old_agent.policy != self.policy
|
||||
or old_agent.site != self.site
|
||||
):
|
||||
self.generate_checks_from_policies()
|
||||
self.generate_tasks_from_policies()
|
||||
|
||||
def __str__(self):
|
||||
return self.hostname
|
||||
|
||||
@@ -82,6 +104,14 @@ class Agent(BaseAuditModel):
|
||||
def client(self):
|
||||
return self.site.client
|
||||
|
||||
@property
|
||||
def has_nats(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.0")
|
||||
|
||||
@property
|
||||
def has_gotasks(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.1")
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
# return the default timezone unless the timezone is explicity set per agent
|
||||
@@ -109,14 +139,6 @@ class Agent(BaseAuditModel):
|
||||
return settings.DL_32
|
||||
return None
|
||||
|
||||
@property
|
||||
def winsalt_dl(self):
|
||||
if self.arch == "64":
|
||||
return settings.SALT_64
|
||||
elif self.arch == "32":
|
||||
return settings.SALT_32
|
||||
return None
|
||||
|
||||
@property
|
||||
def win_inno_exe(self):
|
||||
if self.arch == "64":
|
||||
@@ -127,7 +149,7 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def status(self):
|
||||
offline = djangotime.now() - djangotime.timedelta(minutes=6)
|
||||
offline = djangotime.now() - djangotime.timedelta(minutes=self.offline_time)
|
||||
overdue = djangotime.now() - djangotime.timedelta(minutes=self.overdue_time)
|
||||
|
||||
if self.last_seen is not None:
|
||||
@@ -142,11 +164,7 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def has_patches_pending(self):
|
||||
|
||||
if self.winupdates.filter(action="approve").filter(installed=False).exists():
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return self.winupdates.filter(action="approve").filter(installed=False).exists()
|
||||
|
||||
@property
|
||||
def checks(self):
|
||||
@@ -160,13 +178,11 @@ class Agent(BaseAuditModel):
|
||||
elif i.status == "failing":
|
||||
failing += 1
|
||||
|
||||
has_failing_checks = True if failing > 0 else False
|
||||
|
||||
ret = {
|
||||
"total": total,
|
||||
"passing": passing,
|
||||
"failing": failing,
|
||||
"has_failing_checks": has_failing_checks,
|
||||
"has_failing_checks": failing > 0,
|
||||
}
|
||||
return ret
|
||||
|
||||
@@ -254,6 +270,63 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
return ["unknown disk"]
|
||||
|
||||
def run_script(
|
||||
self,
|
||||
scriptpk: int,
|
||||
args: List[str] = [],
|
||||
timeout: int = 120,
|
||||
full: bool = False,
|
||||
wait: bool = False,
|
||||
run_on_any: bool = False,
|
||||
) -> Any:
|
||||
|
||||
from scripts.models import Script
|
||||
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
data = {
|
||||
"func": "runscriptfull" if full else "runscript",
|
||||
"timeout": timeout,
|
||||
"script_args": args,
|
||||
"payload": {
|
||||
"code": script.code,
|
||||
"shell": script.shell,
|
||||
},
|
||||
}
|
||||
|
||||
running_agent = self
|
||||
if run_on_any:
|
||||
nats_ping = {"func": "ping", "timeout": 1}
|
||||
|
||||
# try on self first
|
||||
r = asyncio.run(self.nats_cmd(nats_ping))
|
||||
|
||||
if r == "pong":
|
||||
running_agent = self
|
||||
else:
|
||||
online = [
|
||||
agent
|
||||
for agent in Agent.objects.only(
|
||||
"pk", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
if agent.status == "online"
|
||||
]
|
||||
|
||||
for agent in online:
|
||||
r = asyncio.run(agent.nats_cmd(nats_ping))
|
||||
if r == "pong":
|
||||
running_agent = agent
|
||||
break
|
||||
|
||||
if running_agent.pk == self.pk:
|
||||
return "Unable to find an online agent"
|
||||
|
||||
if wait:
|
||||
return asyncio.run(running_agent.nats_cmd(data, timeout=timeout, wait=True))
|
||||
else:
|
||||
asyncio.run(running_agent.nats_cmd(data, wait=False))
|
||||
|
||||
return "ok"
|
||||
|
||||
# auto approves updates
|
||||
def approve_updates(self):
|
||||
patch_policy = self.get_patch_policy()
|
||||
@@ -380,14 +453,122 @@ class Agent(BaseAuditModel):
|
||||
|
||||
return patch_policy
|
||||
|
||||
# clear is used to delete managed policy checks from agent
|
||||
# parent_checks specifies a list of checks to delete from agent with matching parent_check field
|
||||
def generate_checks_from_policies(self, clear=False):
|
||||
from automation.models import Policy
|
||||
def get_approved_update_guids(self) -> List[str]:
|
||||
return list(
|
||||
self.winupdates.filter(action="approve", installed=False).values_list(
|
||||
"guid", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
# Clear agent checks managed by policy
|
||||
if clear:
|
||||
self.agentchecks.filter(managed_by_policy=True).delete()
|
||||
# returns alert template assigned in the following order: policy, site, client, global
|
||||
# will return None if nothing is found
|
||||
def get_alert_template(self) -> Union[AlertTemplate, None]:
|
||||
|
||||
site = self.site
|
||||
client = self.client
|
||||
core = CoreSettings.objects.first()
|
||||
|
||||
templates = list()
|
||||
# check if alert template is on a policy assigned to agent
|
||||
if (
|
||||
self.policy
|
||||
and self.policy.alert_template
|
||||
and self.policy.alert_template.is_active
|
||||
):
|
||||
templates.append(self.policy.alert_template)
|
||||
|
||||
# check if policy with alert template is assigned to the site
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and site.server_policy
|
||||
and site.server_policy.alert_template
|
||||
and site.server_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(site.server_policy.alert_template)
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and site.workstation_policy
|
||||
and site.workstation_policy.alert_template
|
||||
and site.workstation_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(site.workstation_policy.alert_template)
|
||||
|
||||
# check if alert template is assigned to site
|
||||
if site.alert_template and site.alert_template.is_active:
|
||||
templates.append(site.alert_template)
|
||||
|
||||
# check if policy with alert template is assigned to the client
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and client.server_policy
|
||||
and client.server_policy.alert_template
|
||||
and client.server_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(client.server_policy.alert_template)
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and client.workstation_policy
|
||||
and client.workstation_policy.alert_template
|
||||
and client.workstation_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(client.workstation_policy.alert_template)
|
||||
|
||||
# check if alert template is on client and return
|
||||
if client.alert_template and client.alert_template.is_active:
|
||||
templates.append(client.alert_template)
|
||||
|
||||
# check if alert template is applied globally and return
|
||||
if core.alert_template and core.alert_template.is_active:
|
||||
templates.append(core.alert_template)
|
||||
|
||||
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||
if (
|
||||
self.monitoring_type == "server"
|
||||
and core.server_policy
|
||||
and core.server_policy.alert_template
|
||||
and core.server_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(core.server_policy.alert_template)
|
||||
if (
|
||||
self.monitoring_type == "workstation"
|
||||
and core.workstation_policy
|
||||
and core.workstation_policy.alert_template
|
||||
and core.workstation_policy.alert_template.is_active
|
||||
):
|
||||
templates.append(core.workstation_policy.alert_template)
|
||||
|
||||
# go through the templates and return the first one that isn't excluded
|
||||
for template in templates:
|
||||
# check if client, site, or agent has been excluded from template
|
||||
if (
|
||||
client.pk
|
||||
in template.excluded_clients.all().values_list("pk", flat=True)
|
||||
or site.pk in template.excluded_sites.all().values_list("pk", flat=True)
|
||||
or self.pk
|
||||
in template.excluded_agents.all()
|
||||
.only("pk")
|
||||
.values_list("pk", flat=True)
|
||||
):
|
||||
continue
|
||||
|
||||
# check if template is excluding desktops
|
||||
elif (
|
||||
self.monitoring_type == "workstation" and template.exclude_workstations
|
||||
):
|
||||
continue
|
||||
|
||||
# check if template is excluding servers
|
||||
elif self.monitoring_type == "server" and template.exclude_servers:
|
||||
continue
|
||||
|
||||
else:
|
||||
return template
|
||||
|
||||
# no alert templates found or agent has been excluded
|
||||
return None
|
||||
|
||||
def generate_checks_from_policies(self):
|
||||
from automation.models import Policy
|
||||
|
||||
# Clear agent checks that have overriden_by_policy set
|
||||
self.agentchecks.update(overriden_by_policy=False)
|
||||
@@ -395,17 +576,9 @@ class Agent(BaseAuditModel):
|
||||
# Generate checks based on policies
|
||||
Policy.generate_policy_checks(self)
|
||||
|
||||
# clear is used to delete managed policy tasks from agent
|
||||
# parent_tasks specifies a list of tasks to delete from agent with matching parent_task field
|
||||
def generate_tasks_from_policies(self, clear=False):
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
def generate_tasks_from_policies(self):
|
||||
from automation.models import Policy
|
||||
|
||||
# Clear agent tasks managed by policy
|
||||
if clear:
|
||||
for task in self.autotasks.filter(managed_by_policy=True):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
|
||||
# Generate tasks based on policies
|
||||
Policy.generate_policy_tasks(self)
|
||||
|
||||
@@ -433,76 +606,36 @@ class Agent(BaseAuditModel):
|
||||
except Exception:
|
||||
return "err"
|
||||
|
||||
def salt_api_cmd(self, **kwargs):
|
||||
|
||||
# salt should always timeout first before the requests' timeout
|
||||
try:
|
||||
timeout = kwargs["timeout"]
|
||||
except KeyError:
|
||||
# default timeout
|
||||
timeout = 15
|
||||
salt_timeout = 12
|
||||
else:
|
||||
if timeout < 8:
|
||||
timeout = 8
|
||||
salt_timeout = 5
|
||||
else:
|
||||
salt_timeout = timeout - 3
|
||||
|
||||
json = {
|
||||
"client": "local",
|
||||
"tgt": self.salt_id,
|
||||
"fun": kwargs["func"],
|
||||
"timeout": salt_timeout,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
async def nats_cmd(self, data, timeout=30, wait=True):
|
||||
nc = NATS()
|
||||
options = {
|
||||
"servers": f"tls://{settings.ALLOWED_HOSTS[0]}:4222",
|
||||
"user": "tacticalrmm",
|
||||
"password": settings.SECRET_KEY,
|
||||
"connect_timeout": 3,
|
||||
"max_reconnect_attempts": 2,
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[json],
|
||||
timeout=timeout,
|
||||
await nc.connect(**options)
|
||||
except:
|
||||
return "natsdown"
|
||||
|
||||
if wait:
|
||||
try:
|
||||
msg = await nc.request(
|
||||
self.agent_id, msgpack.dumps(data), timeout=timeout
|
||||
)
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
try:
|
||||
ret = resp.json()["return"][0][self.salt_id]
|
||||
except Exception as e:
|
||||
logger.error(f"{self.salt_id}: {e}")
|
||||
return "error"
|
||||
except ErrTimeout:
|
||||
ret = "timeout"
|
||||
else:
|
||||
ret = msgpack.loads(msg.data)
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
|
||||
def salt_api_async(self, **kwargs):
|
||||
|
||||
json = {
|
||||
"client": "local_async",
|
||||
"tgt": self.salt_id,
|
||||
"fun": kwargs["func"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
return resp
|
||||
else:
|
||||
await nc.publish(self.agent_id, msgpack.dumps(data))
|
||||
await nc.flush()
|
||||
await nc.close()
|
||||
|
||||
@staticmethod
|
||||
def serialize(agent):
|
||||
@@ -511,92 +644,9 @@ class Agent(BaseAuditModel):
|
||||
|
||||
ret = AgentEditSerializer(agent).data
|
||||
del ret["all_timezones"]
|
||||
del ret["client"]
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def salt_batch_async(**kwargs):
|
||||
assert isinstance(kwargs["minions"], list)
|
||||
|
||||
json = {
|
||||
"client": "local_async",
|
||||
"tgt_type": "list",
|
||||
"tgt": kwargs["minions"],
|
||||
"fun": kwargs["func"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
|
||||
if "arg" in kwargs:
|
||||
json.update({"arg": kwargs["arg"]})
|
||||
if "kwargs" in kwargs:
|
||||
json.update({"kwarg": kwargs["kwargs"]})
|
||||
|
||||
try:
|
||||
resp = requests.post(f"http://{settings.SALT_HOST}:8123/run", json=[json])
|
||||
except Exception:
|
||||
return "timeout"
|
||||
|
||||
return resp
|
||||
|
||||
def schedule_reboot(self, obj):
|
||||
|
||||
start_date = dt.datetime.strftime(obj, "%Y-%m-%d")
|
||||
start_time = dt.datetime.strftime(obj, "%H:%M")
|
||||
|
||||
# let windows task scheduler automatically delete the task after it runs
|
||||
end_obj = obj + dt.timedelta(minutes=15)
|
||||
end_date = dt.datetime.strftime(end_obj, "%Y-%m-%d")
|
||||
end_time = dt.datetime.strftime(end_obj, "%H:%M")
|
||||
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
r = self.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="task.create_task",
|
||||
arg=[
|
||||
f"name={task_name}",
|
||||
"force=True",
|
||||
"action_type=Execute",
|
||||
'cmd="C:\\Windows\\System32\\shutdown.exe"',
|
||||
'arguments="/r /t 5 /f"',
|
||||
"trigger_type=Once",
|
||||
f'start_date="{start_date}"',
|
||||
f'start_time="{start_time}"',
|
||||
f'end_date="{end_date}"',
|
||||
f'end_time="{end_time}"',
|
||||
"ac_only=False",
|
||||
"stop_if_on_batteries=False",
|
||||
"delete_after=Immediately",
|
||||
],
|
||||
)
|
||||
|
||||
if r == "error" or (isinstance(r, bool) and not r):
|
||||
return "failed"
|
||||
elif r == "timeout":
|
||||
return "timeout"
|
||||
elif isinstance(r, bool) and r:
|
||||
from logs.models import PendingAction
|
||||
|
||||
details = {
|
||||
"taskname": task_name,
|
||||
"time": str(obj),
|
||||
}
|
||||
PendingAction(agent=self, action_type="schedreboot", details=details).save()
|
||||
|
||||
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
return {"msg": {"time": nice_time, "agent": self.hostname}}
|
||||
else:
|
||||
return "failed"
|
||||
|
||||
def not_supported(self, version_added):
|
||||
if pyver.parse(self.version) < pyver.parse(version_added):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def delete_superseded_updates(self):
|
||||
try:
|
||||
pks = [] # list of pks to delete
|
||||
@@ -634,8 +684,8 @@ class Agent(BaseAuditModel):
|
||||
if action.action_type == "taskaction":
|
||||
from autotasks.tasks import (
|
||||
create_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
delete_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
|
||||
task_id = action.details["task_id"]
|
||||
@@ -649,78 +699,217 @@ class Agent(BaseAuditModel):
|
||||
elif action.details["action"] == "taskdelete":
|
||||
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
||||
|
||||
# for clearing duplicate pending actions on agent
|
||||
def remove_matching_pending_task_actions(self, task_id):
|
||||
# remove any other pending actions on agent with same task_id
|
||||
for action in self.pendingactions.exclude(status="completed"):
|
||||
if action.details["task_id"] == task_id:
|
||||
action.delete()
|
||||
|
||||
class AgentOutage(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="agentoutages",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.CASCADE,
|
||||
def handle_alert(self, checkin: bool = False) -> None:
|
||||
from agents.tasks import (
|
||||
agent_outage_email_task,
|
||||
agent_outage_sms_task,
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
)
|
||||
outage_time = models.DateTimeField(auto_now_add=True)
|
||||
recovery_time = models.DateTimeField(null=True, blank=True)
|
||||
outage_email_sent = models.BooleanField(default=False)
|
||||
outage_sms_sent = models.BooleanField(default=False)
|
||||
recovery_email_sent = models.BooleanField(default=False)
|
||||
recovery_sms_sent = models.BooleanField(default=False)
|
||||
from alerts.models import Alert
|
||||
|
||||
@property
|
||||
def is_active(self):
|
||||
return False if self.recovery_time else True
|
||||
# return if agent is in maintenace mode
|
||||
if self.maintenance_mode:
|
||||
return
|
||||
|
||||
alert_template = self.get_alert_template()
|
||||
|
||||
# called when agent is back online
|
||||
if checkin:
|
||||
if Alert.objects.filter(agent=self, resolved=False).exists():
|
||||
|
||||
# resolve alert if exists
|
||||
alert = Alert.objects.get(agent=self, resolved=False)
|
||||
alert.resolve()
|
||||
|
||||
# check if a resolved notification should be emailed
|
||||
if (
|
||||
not alert.resolved_email_sent
|
||||
and alert_template
|
||||
and alert_template.agent_email_on_resolved
|
||||
or self.overdue_email_alert
|
||||
):
|
||||
agent_recovery_email_task.delay(pk=alert.pk)
|
||||
|
||||
# check if a resolved notification should be texted
|
||||
if (
|
||||
not alert.resolved_sms_sent
|
||||
and alert_template
|
||||
and alert_template.agent_text_on_resolved
|
||||
or self.overdue_text_alert
|
||||
):
|
||||
agent_recovery_sms_task.delay(pk=alert.pk)
|
||||
|
||||
# check if any scripts should be run
|
||||
if (
|
||||
not alert.resolved_action_run
|
||||
and alert_template
|
||||
and alert_template.resolved_action
|
||||
):
|
||||
r = self.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert_template.resolved_action_args,
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
)
|
||||
|
||||
# command was successful
|
||||
if type(r) == dict:
|
||||
alert.resolved_action_retcode = r["retcode"]
|
||||
alert.resolved_action_stdout = r["stdout"]
|
||||
alert.resolved_action_stderr = r["stderr"]
|
||||
alert.resolved_action_execution_time = "{:.4f}".format(
|
||||
r["execution_time"]
|
||||
)
|
||||
alert.resolved_action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Resolved action: {alert_template.resolved_action} failed to run on any agent for {self.hostname} resolved outage"
|
||||
)
|
||||
|
||||
# called when agent is offline
|
||||
else:
|
||||
# check if alert hasn't been created yet so create it
|
||||
if not Alert.objects.filter(agent=self, resolved=False).exists():
|
||||
|
||||
alert = Alert.create_availability_alert(self)
|
||||
|
||||
# add a null check history to allow gaps in graph
|
||||
for check in self.agentchecks.all():
|
||||
check.add_check_history(None)
|
||||
else:
|
||||
alert = Alert.objects.get(agent=self, resolved=False)
|
||||
|
||||
# create dashboard alert if enabled
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.agent_always_alert
|
||||
or self.overdue_dashboard_alert
|
||||
):
|
||||
alert.hidden = False
|
||||
alert.save()
|
||||
|
||||
# send email alert if enabled
|
||||
if (
|
||||
not alert.email_sent
|
||||
and alert_template
|
||||
and alert_template.agent_always_email
|
||||
or self.overdue_email_alert
|
||||
):
|
||||
agent_outage_email_task.delay(
|
||||
pk=alert.pk,
|
||||
alert_interval=alert_template.check_periodic_alert_days
|
||||
if alert_template
|
||||
else None,
|
||||
)
|
||||
|
||||
# send text message if enabled
|
||||
if (
|
||||
not alert.sms_sent
|
||||
and alert_template
|
||||
and alert_template.agent_always_text
|
||||
or self.overdue_text_alert
|
||||
):
|
||||
agent_outage_sms_task.delay(
|
||||
pk=alert.pk,
|
||||
alert_interval=alert_template.check_periodic_alert_days
|
||||
if alert_template
|
||||
else None,
|
||||
)
|
||||
|
||||
# check if any scripts should be run
|
||||
if not alert.action_run and alert_template and alert_template.action:
|
||||
r = self.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert_template.action_args,
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
)
|
||||
|
||||
# command was successful
|
||||
if isinstance(r, dict):
|
||||
alert.action_retcode = r["retcode"]
|
||||
alert.action_stdout = r["stdout"]
|
||||
alert.action_stderr = r["stderr"]
|
||||
alert.action_execution_time = "{:.4f}".format(r["execution_time"])
|
||||
alert.action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {self.hostname} outage"
|
||||
)
|
||||
|
||||
def send_outage_email(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
alert_template = self.get_alert_template()
|
||||
CORE.send_mail(
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue",
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
(
|
||||
f"Data has not been received from client {self.agent.client.name}, "
|
||||
f"site {self.agent.site.name}, "
|
||||
f"agent {self.agent.hostname} "
|
||||
f"Data has not been received from client {self.client.name}, "
|
||||
f"site {self.site.name}, "
|
||||
f"agent {self.hostname} "
|
||||
"within the expected time."
|
||||
),
|
||||
alert_template=alert_template,
|
||||
)
|
||||
|
||||
def send_recovery_email(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
alert_template = self.get_alert_template()
|
||||
CORE.send_mail(
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received",
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
(
|
||||
f"Data has been received from client {self.agent.client.name}, "
|
||||
f"site {self.agent.site.name}, "
|
||||
f"agent {self.agent.hostname} "
|
||||
f"Data has been received from client {self.client.name}, "
|
||||
f"site {self.site.name}, "
|
||||
f"agent {self.hostname} "
|
||||
"after an interruption in data transmission."
|
||||
),
|
||||
alert_template=alert_template,
|
||||
)
|
||||
|
||||
def send_outage_sms(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
alert_template = self.get_alert_template()
|
||||
CORE = CoreSettings.objects.first()
|
||||
CORE.send_sms(
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data overdue"
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data overdue",
|
||||
alert_template=alert_template,
|
||||
)
|
||||
|
||||
def send_recovery_sms(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
alert_template = self.get_alert_template()
|
||||
CORE.send_sms(
|
||||
f"{self.agent.client.name}, {self.agent.site.name}, {self.agent.hostname} - data received"
|
||||
f"{self.client.name}, {self.site.name}, {self.hostname} - data received",
|
||||
alert_template=alert_template,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.agent.hostname
|
||||
|
||||
|
||||
RECOVERY_CHOICES = [
|
||||
("salt", "Salt"),
|
||||
("mesh", "Mesh"),
|
||||
("command", "Command"),
|
||||
("rpc", "Nats RPC"),
|
||||
("checkrunner", "Checkrunner"),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import pytz
|
||||
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import ReadOnlyField
|
||||
|
||||
from clients.serializers import ClientSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, Note
|
||||
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from clients.serializers import ClientSerializer
|
||||
|
||||
|
||||
class AgentSerializer(serializers.ModelSerializer):
|
||||
# for vue
|
||||
@@ -34,26 +32,70 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
class AgentOverdueActionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = [
|
||||
"pk",
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"overdue_dashboard_alert",
|
||||
]
|
||||
|
||||
|
||||
class AgentTableSerializer(serializers.ModelSerializer):
|
||||
patches_pending = serializers.ReadOnlyField(source="has_patches_pending")
|
||||
pending_actions = serializers.SerializerMethodField()
|
||||
status = serializers.ReadOnlyField()
|
||||
checks = serializers.ReadOnlyField()
|
||||
last_seen = serializers.SerializerMethodField()
|
||||
client_name = serializers.ReadOnlyField(source="client.name")
|
||||
site_name = serializers.ReadOnlyField(source="site.name")
|
||||
logged_username = serializers.SerializerMethodField()
|
||||
italic = serializers.SerializerMethodField()
|
||||
policy = serializers.ReadOnlyField(source="policy.id")
|
||||
alert_template = serializers.SerializerMethodField()
|
||||
|
||||
def get_last_seen(self, obj):
|
||||
def get_alert_template(self, obj):
|
||||
alert_template = obj.get_alert_template()
|
||||
|
||||
if not alert_template:
|
||||
return None
|
||||
else:
|
||||
return {
|
||||
"name": alert_template.name,
|
||||
"always_email": alert_template.agent_always_email,
|
||||
"always_text": alert_template.agent_always_text,
|
||||
"always_alert": alert_template.agent_always_alert,
|
||||
}
|
||||
|
||||
def get_pending_actions(self, obj):
|
||||
return obj.pendingactions.filter(status="pending").count()
|
||||
|
||||
def get_last_seen(self, obj) -> str:
|
||||
if obj.time_zone is not None:
|
||||
agent_tz = pytz.timezone(obj.time_zone)
|
||||
else:
|
||||
agent_tz = self.context["default_tz"]
|
||||
|
||||
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M:%S")
|
||||
return obj.last_seen.astimezone(agent_tz).strftime("%m %d %Y %H:%M")
|
||||
|
||||
def get_logged_username(self, obj) -> str:
|
||||
if obj.logged_in_username == "None" and obj.status == "online":
|
||||
return obj.last_logged_in_user
|
||||
elif obj.logged_in_username != "None":
|
||||
return obj.logged_in_username
|
||||
else:
|
||||
return "-"
|
||||
|
||||
def get_italic(self, obj) -> bool:
|
||||
return obj.logged_in_username == "None" and obj.status == "online"
|
||||
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = [
|
||||
"id",
|
||||
"alert_template",
|
||||
"hostname",
|
||||
"agent_id",
|
||||
"site_name",
|
||||
@@ -62,15 +104,18 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"description",
|
||||
"needs_reboot",
|
||||
"patches_pending",
|
||||
"pending_actions",
|
||||
"status",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_dashboard_alert",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"checks",
|
||||
"logged_in_username",
|
||||
"last_logged_in_user",
|
||||
"maintenance_mode",
|
||||
"logged_username",
|
||||
"italic",
|
||||
"policy",
|
||||
]
|
||||
depth = 2
|
||||
|
||||
@@ -96,10 +141,12 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
||||
"timezone",
|
||||
"check_interval",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"all_timezones",
|
||||
"winupdatepolicy",
|
||||
"policy",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,302 +1,260 @@
|
||||
from loguru import logger
|
||||
from time import sleep
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import random
|
||||
import requests
|
||||
from packaging import version as pyver
|
||||
|
||||
from time import sleep
|
||||
from typing import List, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
from agents.models import Agent, AgentOutage
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from logs.models import PendingAction
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
OLD_64_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2.exe"
|
||||
OLD_32_PY_AGENT = "https://github.com/wh1te909/winagent/releases/download/v0.11.2/winagent-v0.11.2-x86.exe"
|
||||
|
||||
|
||||
@app.task
|
||||
def send_agent_update_task(pks, version):
|
||||
assert isinstance(pks, list)
|
||||
|
||||
q = Agent.objects.filter(pk__in=pks)
|
||||
agents = [i.pk for i in q if pyver.parse(i.version) < pyver.parse(version)]
|
||||
|
||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
def agent_update(pk: int) -> str:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||
logger.warning(
|
||||
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to auto update."
|
||||
)
|
||||
return "not supported"
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(
|
||||
f"Unable to determine arch on {agent.salt_id}. Skipping."
|
||||
f"Unable to determine arch on {agent.hostname}. Skipping agent update."
|
||||
)
|
||||
continue
|
||||
return "noarch"
|
||||
|
||||
# golang agent only backwards compatible with py agent 0.11.2
|
||||
# force an upgrade to the latest python agent if version < 0.11.2
|
||||
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
|
||||
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
|
||||
inno = (
|
||||
"winagent-v0.11.2.exe"
|
||||
if agent.arch == "64"
|
||||
else "winagent-v0.11.2-x86.exe"
|
||||
)
|
||||
else:
|
||||
# removed sqlite in 1.4.0 to get rid of cgo dependency
|
||||
# 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.3.0"):
|
||||
version = settings.LATEST_AGENT_VER
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
logger.info(
|
||||
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
|
||||
else:
|
||||
version = "1.3.0"
|
||||
inno = (
|
||||
"winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe"
|
||||
)
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
|
||||
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists():
|
||||
agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).delete()
|
||||
|
||||
PendingAction.objects.create(
|
||||
agent=agent,
|
||||
action_type="agentupdate",
|
||||
details={
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
)
|
||||
logger.info(f"{agent.salt_id}: {r}")
|
||||
sleep(10)
|
||||
|
||||
nats_data = {
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": url,
|
||||
"version": version,
|
||||
"inno": inno,
|
||||
},
|
||||
}
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
return "created"
|
||||
|
||||
|
||||
@app.task
|
||||
def auto_self_agent_update_task():
|
||||
def send_agent_update_task(pks: List[int]) -> None:
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
|
||||
@app.task
|
||||
def auto_self_agent_update_task() -> None:
|
||||
core = CoreSettings.objects.first()
|
||||
if not core.agent_auto_update:
|
||||
logger.info("Agent auto update is disabled. Skipping.")
|
||||
return
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
agents = [
|
||||
pks: List[int] = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
logger.info(f"Updating {len(agents)}")
|
||||
|
||||
chunks = (agents[i : i + 30] for i in range(0, len(agents), 30))
|
||||
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
agent_update(pk)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
# skip if we can't determine the arch
|
||||
if agent.arch is None:
|
||||
logger.warning(
|
||||
f"Unable to determine arch on {agent.salt_id}. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
# golang agent only backwards compatible with py agent 0.11.2
|
||||
# force an upgrade to the latest python agent if version < 0.11.2
|
||||
if pyver.parse(agent.version) < pyver.parse("0.11.2"):
|
||||
url = OLD_64_PY_AGENT if agent.arch == "64" else OLD_32_PY_AGENT
|
||||
inno = (
|
||||
"winagent-v0.11.2.exe"
|
||||
if agent.arch == "64"
|
||||
else "winagent-v0.11.2-x86.exe"
|
||||
)
|
||||
@app.task
|
||||
def agent_outage_email_task(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
if not alert.email_sent:
|
||||
sleep(random.randint(1, 15))
|
||||
alert.agent.send_outage_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
else:
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
logger.info(
|
||||
f"Updating {agent.salt_id} current version {agent.version} using {inno}"
|
||||
)
|
||||
r = agent.salt_api_async(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": inno,
|
||||
"url": url,
|
||||
},
|
||||
)
|
||||
logger.info(f"{agent.salt_id}: {r}")
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def update_salt_minion_task():
|
||||
q = Agent.objects.all()
|
||||
agents = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) >= pyver.parse("0.11.0")
|
||||
and pyver.parse(i.salt_ver) < pyver.parse(settings.LATEST_SALT_VER)
|
||||
]
|
||||
|
||||
chunks = (agents[i : i + 50] for i in range(0, len(agents), 50))
|
||||
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_async(func="win_agent.update_salt")
|
||||
sleep(20)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_detail_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_async(timeout=30, func="win_agent.local_sys_info")
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def sync_salt_modules_task(pk):
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=35, func="saltutil.sync_modules")
|
||||
# successful sync if new/charnged files: {'return': [{'MINION-15': ['modules.get_eventlog', 'modules.win_agent', 'etc...']}]}
|
||||
# successful sync with no new/changed files: {'return': [{'MINION-15': []}]}
|
||||
if r == "timeout" or r == "error":
|
||||
return f"Unable to sync modules {agent.salt_id}"
|
||||
|
||||
return f"Successfully synced salt modules on {agent.hostname}"
|
||||
|
||||
|
||||
@app.task
|
||||
def batch_sync_modules_task():
|
||||
# sync modules, split into chunks of 50 agents to not overload salt
|
||||
agents = Agent.objects.all()
|
||||
online = [i.salt_id for i in agents if i.status == "online"]
|
||||
chunks = (online[i : i + 50] for i in range(0, len(online), 50))
|
||||
for chunk in chunks:
|
||||
Agent.salt_batch_async(minions=chunk, func="saltutil.sync_modules")
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def batch_sysinfo_task():
|
||||
# update system info using WMI
|
||||
agents = Agent.objects.all()
|
||||
online = [
|
||||
i.salt_id
|
||||
for i in agents
|
||||
if not i.not_supported("0.11.0") and i.status == "online"
|
||||
]
|
||||
chunks = (online[i : i + 30] for i in range(0, len(online), 30))
|
||||
for chunk in chunks:
|
||||
Agent.salt_batch_async(minions=chunk, func="win_agent.local_sys_info")
|
||||
sleep(10)
|
||||
|
||||
|
||||
@app.task
|
||||
def uninstall_agent_task(salt_id):
|
||||
attempts = 0
|
||||
error = False
|
||||
|
||||
while 1:
|
||||
try:
|
||||
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "local",
|
||||
"tgt": salt_id,
|
||||
"fun": "win_agent.uninstall_agent",
|
||||
"timeout": 8,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=10,
|
||||
)
|
||||
ret = r.json()["return"][0][salt_id]
|
||||
except Exception:
|
||||
attempts += 1
|
||||
else:
|
||||
if ret != "ok":
|
||||
attempts += 1
|
||||
else:
|
||||
attempts = 0
|
||||
|
||||
if attempts >= 10:
|
||||
error = True
|
||||
break
|
||||
elif attempts == 0:
|
||||
break
|
||||
|
||||
if error:
|
||||
logger.error(f"{salt_id} uninstall failed")
|
||||
else:
|
||||
logger.info(f"{salt_id} was successfully uninstalled")
|
||||
|
||||
try:
|
||||
r = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "wheel",
|
||||
"fun": "key.delete",
|
||||
"match": salt_id,
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
logger.error(f"{salt_id} unable to remove salt-key")
|
||||
if alert_interval:
|
||||
# send an email only if the last email sent is older than alert interval
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.email_sent < delta:
|
||||
sleep(random.randint(1, 10))
|
||||
alert.agent.send_outage_email()
|
||||
alert.email_sent = djangotime.now()
|
||||
alert.save(update_fields=["email_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_outage_email_task(pk):
|
||||
def agent_recovery_email_task(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
sleep(random.randint(1, 15))
|
||||
outage = AgentOutage.objects.get(pk=pk)
|
||||
outage.send_outage_email()
|
||||
outage.outage_email_sent = True
|
||||
outage.save(update_fields=["outage_email_sent"])
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
alert.agent.send_recovery_email()
|
||||
alert.resolved_email_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_email_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_recovery_email_task(pk):
|
||||
def agent_outage_sms_task(pk: int, alert_interval: Union[float, None] = None) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
|
||||
if not alert.sms_sent:
|
||||
sleep(random.randint(1, 15))
|
||||
outage = AgentOutage.objects.get(pk=pk)
|
||||
outage.send_recovery_email()
|
||||
outage.recovery_email_sent = True
|
||||
outage.save(update_fields=["recovery_email_sent"])
|
||||
alert.agent.send_outage_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
else:
|
||||
if alert_interval:
|
||||
# send an sms only if the last sms sent is older than alert interval
|
||||
delta = djangotime.now() - dt.timedelta(days=alert_interval)
|
||||
if alert.sms_sent < delta:
|
||||
sleep(random.randint(1, 10))
|
||||
alert.agent.send_outage_sms()
|
||||
alert.sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["sms_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_outage_sms_task(pk):
|
||||
def agent_recovery_sms_task(pk: int) -> str:
|
||||
from alerts.models import Alert
|
||||
|
||||
sleep(random.randint(1, 3))
|
||||
outage = AgentOutage.objects.get(pk=pk)
|
||||
outage.send_outage_sms()
|
||||
outage.outage_sms_sent = True
|
||||
outage.save(update_fields=["outage_sms_sent"])
|
||||
alert = Alert.objects.get(pk=pk)
|
||||
alert.agent.send_recovery_sms()
|
||||
alert.resolved_sms_sent = djangotime.now()
|
||||
alert.save(update_fields=["resolved_sms_sent"])
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_recovery_sms_task(pk):
|
||||
sleep(random.randint(1, 3))
|
||||
outage = AgentOutage.objects.get(pk=pk)
|
||||
outage.send_recovery_sms()
|
||||
outage.recovery_sms_sent = True
|
||||
outage.save(update_fields=["recovery_sms_sent"])
|
||||
|
||||
|
||||
@app.task
|
||||
def agent_outages_task():
|
||||
agents = Agent.objects.only("pk")
|
||||
def agent_outages_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"offline_time",
|
||||
"overdue_time",
|
||||
"overdue_email_alert",
|
||||
"overdue_text_alert",
|
||||
"overdue_dashboard_alert",
|
||||
)
|
||||
|
||||
for agent in agents:
|
||||
if agent.status == "overdue":
|
||||
outages = AgentOutage.objects.filter(agent=agent)
|
||||
if outages and outages.last().is_active:
|
||||
continue
|
||||
agent.handle_alert()
|
||||
|
||||
outage = AgentOutage(agent=agent)
|
||||
outage.save()
|
||||
|
||||
if agent.overdue_email_alert and not agent.maintenance_mode:
|
||||
agent_outage_email_task.delay(pk=outage.pk)
|
||||
@app.task
|
||||
def handle_agent_recovery_task(pk: int) -> None:
|
||||
sleep(10)
|
||||
from agents.models import RecoveryAction
|
||||
|
||||
if agent.overdue_text_alert and not agent.maintenance_mode:
|
||||
agent_outage_sms_task.delay(pk=outage.pk)
|
||||
action = RecoveryAction.objects.get(pk=pk)
|
||||
if action.mode == "command":
|
||||
data = {"func": "recoverycmd", "recoverycommand": action.command}
|
||||
else:
|
||||
data = {"func": "recover", "payload": {"mode": action.mode}}
|
||||
|
||||
asyncio.run(action.agent.nats_cmd(data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def run_script_email_results_task(
|
||||
agentpk: int, scriptpk: int, nats_timeout: int, emails: List[str]
|
||||
):
|
||||
agent = Agent.objects.get(pk=agentpk)
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
r = agent.run_script(scriptpk=script.pk, full=True, timeout=nats_timeout, wait=True)
|
||||
if r == "timeout":
|
||||
logger.error(f"{agent.hostname} timed out running script.")
|
||||
return
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
subject = f"{agent.hostname} {script.name} Results"
|
||||
exec_time = "{:.4f}".format(r["execution_time"])
|
||||
body = (
|
||||
subject
|
||||
+ f"\nReturn code: {r['retcode']}\nExecution time: {exec_time} seconds\nStdout: {r['stdout']}\nStderr: {r['stderr']}"
|
||||
)
|
||||
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
|
||||
msg = EmailMessage()
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = CORE.smtp_from_email
|
||||
|
||||
if emails:
|
||||
msg["To"] = ", ".join(emails)
|
||||
else:
|
||||
msg["To"] = ", ".join(CORE.email_alert_recipients)
|
||||
|
||||
msg.set_content(body)
|
||||
|
||||
try:
|
||||
with smtplib.SMTP(CORE.smtp_host, CORE.smtp_port, timeout=20) as server:
|
||||
if CORE.smtp_requires_auth:
|
||||
server.ehlo()
|
||||
server.starttls()
|
||||
server.login(CORE.smtp_host_user, CORE.smtp_host_password)
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
else:
|
||||
server.send_message(msg)
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
@@ -1,30 +1,21 @@
|
||||
import json
|
||||
import os
|
||||
from itertools import cycle
|
||||
from typing import List
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework.authtoken.models import Token
|
||||
from model_bakery import baker
|
||||
from packaging import version as pyver
|
||||
|
||||
from accounts.models import User
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from .serializers import AgentSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from .models import Agent
|
||||
from .tasks import (
|
||||
auto_self_agent_update_task,
|
||||
update_salt_minion_task,
|
||||
get_wmi_detail_task,
|
||||
sync_salt_modules_task,
|
||||
batch_sync_modules_task,
|
||||
batch_sysinfo_task,
|
||||
OLD_64_PY_AGENT,
|
||||
OLD_32_PY_AGENT,
|
||||
)
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent
|
||||
from .serializers import AgentSerializer
|
||||
from .tasks import auto_self_agent_update_task
|
||||
|
||||
|
||||
class TestAgentViews(TacticalTestCase):
|
||||
@@ -34,7 +25,9 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
client = baker.make("clients.Client", name="Google")
|
||||
site = baker.make("clients.Site", client=client, name="LA Office")
|
||||
self.agent = baker.make_recipe("agents.online_agent", site=site)
|
||||
self.agent = baker.make_recipe(
|
||||
"agents.online_agent", site=site, version="1.1.1"
|
||||
)
|
||||
baker.make_recipe("winupdate.winupdate_policy", agent=self.agent)
|
||||
|
||||
def test_get_patch_policy(self):
|
||||
@@ -72,38 +65,60 @@ class TestAgentViews(TacticalTestCase):
|
||||
@patch("agents.tasks.send_agent_update_task.delay")
|
||||
def test_update_agents(self, mock_task):
|
||||
url = "/agents/updateagents/"
|
||||
data = {"pks": [1, 2, 3, 5, 10], "version": "0.11.1"}
|
||||
baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
_quantity=15,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.3.0",
|
||||
_quantity=15,
|
||||
)
|
||||
|
||||
pks: List[int] = list(
|
||||
Agent.objects.only("pk", "version").values_list("pk", flat=True)
|
||||
)
|
||||
|
||||
data = {"pks": pks}
|
||||
expected: List[int] = [
|
||||
i.pk
|
||||
for i in Agent.objects.only("pk", "version")
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_task.assert_called_with(pks=data["pks"], version=data["version"])
|
||||
mock_task.assert_called_with(pks=expected)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_ping(self, mock_ret):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_ping(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/ping/"
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||
self.assertEqual(r.json(), ret)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "natsdown"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||
self.assertEqual(r.json(), ret)
|
||||
|
||||
mock_ret.return_value = True
|
||||
nats_cmd.return_value = "pong"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "online"}
|
||||
self.assertEqual(r.json(), ret)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "asdasjdaksdasd"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
ret = {"name": self.agent.hostname, "status": "offline"}
|
||||
@@ -111,41 +126,29 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
||||
def test_uninstall(self, mock_task):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@patch("agents.views.reload_nats")
|
||||
def test_uninstall(self, reload_nats, nats_cmd):
|
||||
url = "/agents/uninstall/"
|
||||
data = {"pk": self.agent.pk}
|
||||
|
||||
r = self.client.delete(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_task.assert_called_with(self.agent.salt_id)
|
||||
nats_cmd.assert_called_with({"func": "uninstall"}, wait=False)
|
||||
reload_nats.assert_called_once()
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("agents.tasks.uninstall_agent_task.delay")
|
||||
def test_uninstall_catch_no_user(self, mock_task):
|
||||
# setup data
|
||||
agent_user = User.objects.create_user(
|
||||
username=self.agent.agent_id, password=User.objects.make_random_password(60)
|
||||
)
|
||||
agent_token = Token.objects.create(user=agent_user)
|
||||
|
||||
url = "/agents/uninstall/"
|
||||
data = {"pk": self.agent.pk}
|
||||
|
||||
agent_user.delete()
|
||||
|
||||
r = self.client.delete(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_task.assert_called_with(self.agent.salt_id)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_processes(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/getprocs/"
|
||||
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
|
||||
url_old = f"/agents/{agent_old.pk}/getprocs/"
|
||||
r = self.client.get(url_old)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
||||
url = f"/agents/{agent.pk}/getprocs/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/procs.json")
|
||||
@@ -155,90 +158,93 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
assert any(i["name"] == "Registry" for i in mock_ret.return_value)
|
||||
assert any(
|
||||
i["memory_percent"] == 0.004843281375620747 for i in mock_ret.return_value
|
||||
)
|
||||
assert any(i["membytes"] == 434655234324 for i in mock_ret.return_value)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_kill_proc(self, mock_ret):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_kill_proc(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/8234/killproc/"
|
||||
|
||||
mock_ret.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "process doesn't exist"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_get_event_log(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/30/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/eventlograw.json")
|
||||
) as f:
|
||||
mock_ret.return_value = json.load(f)
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_event_log(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Application/22/"
|
||||
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "tacticalrmm/test_data/appeventlog.json")
|
||||
) as f:
|
||||
decoded = json.load(f)
|
||||
nats_cmd.return_value = json.load(f)
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(decoded, r.json())
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "eventlog",
|
||||
"timeout": 30,
|
||||
"payload": {
|
||||
"logname": "Application",
|
||||
"days": str(22),
|
||||
},
|
||||
},
|
||||
timeout=32,
|
||||
)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
url = f"/agents/{self.agent.pk}/geteventlog/Security/6/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "eventlog",
|
||||
"timeout": 180,
|
||||
"payload": {
|
||||
"logname": "Security",
|
||||
"days": str(6),
|
||||
},
|
||||
},
|
||||
timeout=182,
|
||||
)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_power_action(self, mock_ret):
|
||||
url = f"/agents/poweraction/"
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_reboot_now(self, nats_cmd):
|
||||
url = f"/agents/reboot/"
|
||||
|
||||
data = {"pk": self.agent.pk, "action": "rebootnow"}
|
||||
mock_ret.return_value = True
|
||||
data = {"pk": self.agent.pk}
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "rebootnow"}, timeout=10)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_send_raw_cmd(self, mock_ret):
|
||||
url = f"/agents/sendrawcmd/"
|
||||
|
||||
@@ -257,36 +263,39 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_reboot_later(self, mock_ret):
|
||||
url = f"/agents/rebootlater/"
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_reboot_later(self, nats_cmd):
|
||||
url = f"/agents/reboot/"
|
||||
|
||||
data = {
|
||||
"pk": self.agent.pk,
|
||||
"datetime": "2025-08-29 18:41",
|
||||
}
|
||||
|
||||
mock_ret.return_value = True
|
||||
r = self.client.post(url, data, format="json")
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data["time"], "August 29, 2025 at 06:41 PM")
|
||||
self.assertEqual(r.data["agent"], self.agent.hostname)
|
||||
|
||||
mock_ret.return_value = "failed"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"trigger": "once",
|
||||
"name": r.data["task_name"],
|
||||
"year": 2025,
|
||||
"month": "August",
|
||||
"day": 29,
|
||||
"hour": 18,
|
||||
"min": 41,
|
||||
},
|
||||
}
|
||||
nats_cmd.assert_called_with(nats_data, timeout=10)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
nats_cmd.return_value = "error creating task"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@@ -294,12 +303,12 @@ class TestAgentViews(TacticalTestCase):
|
||||
"pk": self.agent.pk,
|
||||
"datetime": "rm -rf /",
|
||||
}
|
||||
r = self.client.post(url, data_invalid, format="json")
|
||||
r = self.client.patch(url, data_invalid, format="json")
|
||||
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "Invalid date")
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("os.path.exists")
|
||||
@patch("subprocess.run")
|
||||
@@ -348,7 +357,6 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertIn("rdp", r.json()["cmd"])
|
||||
self.assertNotIn("power", r.json()["cmd"])
|
||||
self.assertNotIn("ping", r.json()["cmd"])
|
||||
|
||||
data.update({"ping": 1, "power": 1})
|
||||
r = self.client.post(url, data, format="json")
|
||||
@@ -367,7 +375,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data["mode"] = "salt"
|
||||
data["mode"] = "mesh"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("pending", r.json())
|
||||
@@ -387,7 +395,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.agent.version = "0.9.4"
|
||||
self.agent.save(update_fields=["version"])
|
||||
data["mode"] = "salt"
|
||||
data["mode"] = "mesh"
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertIn("0.9.5", r.json())
|
||||
@@ -421,6 +429,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"site": site.id,
|
||||
"monitoring_type": "workstation",
|
||||
"description": "asjdk234andasd",
|
||||
"offline_time": 4,
|
||||
"overdue_time": 300,
|
||||
"check_interval": 60,
|
||||
"overdue_email_alert": True,
|
||||
@@ -469,7 +478,14 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertIn("&viewmode=13", r.data["file"])
|
||||
self.assertIn("&viewmode=12", r.data["terminal"])
|
||||
self.assertIn("&viewmode=11", r.data["control"])
|
||||
self.assertIn("mstsc.html?login=", r.data["webrdp"])
|
||||
|
||||
self.assertIn("&gotonode=", r.data["file"])
|
||||
self.assertIn("&gotonode=", r.data["terminal"])
|
||||
self.assertIn("&gotonode=", r.data["control"])
|
||||
|
||||
self.assertIn("?login=", r.data["file"])
|
||||
self.assertIn("?login=", r.data["terminal"])
|
||||
self.assertIn("?login=", r.data["control"])
|
||||
|
||||
self.assertEqual(self.agent.hostname, r.data["hostname"])
|
||||
self.assertEqual(self.agent.client.name, r.data["client"])
|
||||
@@ -512,42 +528,20 @@ class TestAgentViews(TacticalTestCase):
|
||||
def test_overdue_action(self):
|
||||
url = "/agents/overdueaction/"
|
||||
|
||||
payload = {"pk": self.agent.pk, "alertType": "email", "action": "enabled"}
|
||||
payload = {"pk": self.agent.pk, "overdue_email_alert": True}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "email", "action": "disabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_email_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "enabled"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertTrue(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "text", "action": "disabled"})
|
||||
payload = {"pk": self.agent.pk, "overdue_text_alert": False}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
agent = Agent.objects.get(pk=self.agent.pk)
|
||||
self.assertFalse(agent.overdue_text_alert)
|
||||
self.assertEqual(self.agent.hostname, r.data)
|
||||
|
||||
payload.update({"alertType": "email", "action": "523423"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
payload.update({"alertType": "text", "action": "asdasd3434asdasd"})
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_list_agents_no_detail(self):
|
||||
@@ -568,15 +562,18 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||
""" @patch("winupdate.tasks.bulk_check_for_updates_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_script_task.delay")
|
||||
@patch("scripts.tasks.handle_bulk_command_task.delay")
|
||||
@patch("agents.models.Agent.salt_batch_async")
|
||||
def test_bulk_cmd_script(self, mock_ret, mock_update):
|
||||
def test_bulk_cmd_script(
|
||||
self, salt_batch_async, bulk_command, bulk_script, mock_update
|
||||
):
|
||||
url = "/agents/bulk/"
|
||||
|
||||
mock_ret.return_value = "ok"
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "all",
|
||||
"target": "agents",
|
||||
"client": None,
|
||||
"site": None,
|
||||
@@ -589,10 +586,12 @@ class TestAgentViews(TacticalTestCase):
|
||||
}
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "servers",
|
||||
"target": "agents",
|
||||
"client": None,
|
||||
"site": None,
|
||||
@@ -607,12 +606,11 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "workstations",
|
||||
"target": "client",
|
||||
"client": self.agent.client.id,
|
||||
"site": None,
|
||||
"agentPKs": [
|
||||
self.agent.pk,
|
||||
],
|
||||
"agentPKs": [],
|
||||
"cmd": "gpupdate /force",
|
||||
"timeout": 300,
|
||||
"shell": "cmd",
|
||||
@@ -620,9 +618,11 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
|
||||
payload = {
|
||||
"mode": "command",
|
||||
"monType": "all",
|
||||
"target": "client",
|
||||
"client": self.agent.client.id,
|
||||
"site": self.agent.site.id,
|
||||
@@ -636,15 +636,11 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
payload["client"] = self.agent.client.id
|
||||
payload["site"] = self.agent.site.id
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
bulk_command.assert_called_with([self.agent.pk], "gpupdate /force", "cmd", 300)
|
||||
|
||||
payload = {
|
||||
"mode": "scan",
|
||||
"monType": "all",
|
||||
"target": "agents",
|
||||
"client": None,
|
||||
"site": None,
|
||||
@@ -652,13 +648,13 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.agent.pk,
|
||||
],
|
||||
}
|
||||
mock_ret.return_value = "ok"
|
||||
r = self.client.post(url, payload, format="json")
|
||||
mock_update.assert_called_once()
|
||||
mock_update.assert_called_with(minions=[self.agent.salt_id])
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload = {
|
||||
"mode": "install",
|
||||
"monType": "all",
|
||||
"target": "client",
|
||||
"client": self.agent.client.id,
|
||||
"site": None,
|
||||
@@ -666,6 +662,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.agent.pk,
|
||||
],
|
||||
}
|
||||
salt_batch_async.return_value = "ok"
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -679,43 +676,20 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
# TODO mock the script
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("post", url) """
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_restart_mesh(self, mock_ret):
|
||||
url = f"/agents/{self.agent.pk}/restartmesh/"
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = False
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = True
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_recover_mesh(self, mock_ret):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_recover_mesh(self, nats_cmd):
|
||||
url = f"/agents/{self.agent.pk}/recovermesh/"
|
||||
mock_ret.return_value = True
|
||||
nats_cmd.return_value = "ok"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn(self.agent.hostname, r.data)
|
||||
nats_cmd.assert_called_with(
|
||||
{"func": "recover", "payload": {"mode": "mesh"}}, timeout=45
|
||||
)
|
||||
|
||||
mock_ret.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
nats_cmd.return_value = "timeout"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
@@ -729,6 +703,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
class TestAgentViewsNew(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_agent_counts(self):
|
||||
url = "/agents/agent_counts/"
|
||||
@@ -739,15 +714,12 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
monitoring_type=cycle(["server", "workstation"]),
|
||||
_quantity=6,
|
||||
)
|
||||
agents = baker.make_recipe(
|
||||
baker.make_recipe(
|
||||
"agents.overdue_agent",
|
||||
monitoring_type=cycle(["server", "workstation"]),
|
||||
_quantity=6,
|
||||
)
|
||||
|
||||
# make an AgentOutage for every overdue agent
|
||||
baker.make("agents.AgentOutage", agent=cycle(agents), _quantity=6)
|
||||
|
||||
# returned data should be this
|
||||
data = {
|
||||
"total_server_count": 6,
|
||||
@@ -804,217 +776,102 @@ class TestAgentTasks(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async", return_value=None)
|
||||
def test_get_wmi_detail_task(self, salt_api_async):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
ret = get_wmi_detail_task.s(self.agent.pk).apply()
|
||||
salt_api_async.assert_called_with(timeout=30, func="win_agent.local_sys_info")
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_agent_update(self, nats_cmd):
|
||||
from agents.tasks import agent_update
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_sync_salt_modules_task(self, salt_api_cmd):
|
||||
self.agent = baker.make_recipe("agents.agent")
|
||||
salt_api_cmd.return_value = {"return": [{f"{self.agent.salt_id}": []}]}
|
||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
||||
salt_api_cmd.assert_called_with(timeout=35, func="saltutil.sync_modules")
|
||||
agent_noarch = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Error getting OS",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
)
|
||||
r = agent_update(agent_noarch.pk)
|
||||
self.assertEqual(r, "noarch")
|
||||
|
||||
agent_1111 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.11",
|
||||
)
|
||||
r = agent_update(agent_1111.pk)
|
||||
self.assertEqual(r, "not supported")
|
||||
|
||||
agent64_1112 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.12",
|
||||
)
|
||||
|
||||
r = agent_update(agent64_1112.pk)
|
||||
self.assertEqual(r, "created")
|
||||
action = PendingAction.objects.get(agent__pk=agent64_1112.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
self.assertEqual(
|
||||
ret.result, f"Successfully synced salt modules on {self.agent.hostname}"
|
||||
action.details["url"],
|
||||
"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
)
|
||||
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
|
||||
self.assertEqual(action.details["version"], "1.3.0")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
"version": "1.3.0",
|
||||
"inno": "winagent-v1.3.0.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
salt_api_cmd.return_value = "timeout"
|
||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
||||
|
||||
salt_api_cmd.return_value = "error"
|
||||
ret = sync_salt_modules_task.s(self.agent.pk).apply()
|
||||
self.assertEqual(ret.result, f"Unable to sync modules {self.agent.salt_id}")
|
||||
|
||||
@patch("agents.models.Agent.salt_batch_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_batch_sync_modules_task(self, mock_sleep, salt_batch_async):
|
||||
# chunks of 50, 60 online should run only 2 times
|
||||
baker.make_recipe(
|
||||
"agents.online_agent", last_seen=djangotime.now(), _quantity=60
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.overdue_agent",
|
||||
last_seen=djangotime.now() - djangotime.timedelta(minutes=9),
|
||||
_quantity=115,
|
||||
)
|
||||
ret = batch_sync_modules_task.s().apply()
|
||||
self.assertEqual(salt_batch_async.call_count, 2)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_batch_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_batch_sysinfo_task(self, mock_sleep, salt_batch_async):
|
||||
# chunks of 30, 70 online should run only 3 times
|
||||
self.online = baker.make_recipe(
|
||||
"agents.online_agent", version=settings.LATEST_AGENT_VER, _quantity=70
|
||||
)
|
||||
self.overdue = baker.make_recipe(
|
||||
"agents.overdue_agent", version=settings.LATEST_AGENT_VER, _quantity=115
|
||||
)
|
||||
ret = batch_sysinfo_task.s().apply()
|
||||
self.assertEqual(salt_batch_async.call_count, 3)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
salt_batch_async.reset_mock()
|
||||
[i.delete() for i in self.online]
|
||||
[i.delete() for i in self.overdue]
|
||||
|
||||
# test old agents, should not run
|
||||
self.online_old = baker.make_recipe(
|
||||
"agents.online_agent", version="0.10.2", _quantity=70
|
||||
)
|
||||
self.overdue_old = baker.make_recipe(
|
||||
"agents.overdue_agent", version="0.10.2", _quantity=115
|
||||
)
|
||||
ret = batch_sysinfo_task.s().apply()
|
||||
salt_batch_async.assert_not_called()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async", return_value=None)
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_update_salt_minion_task(self, mock_sleep, salt_api_async):
|
||||
# test agents that need salt update
|
||||
self.agents = baker.make_recipe(
|
||||
"agents.agent",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
salt_ver="1.0.3",
|
||||
_quantity=53,
|
||||
)
|
||||
ret = update_salt_minion_task.s().apply()
|
||||
self.assertEqual(salt_api_async.call_count, 53)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
[i.delete() for i in self.agents]
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test agents that need salt update but agent version too low
|
||||
self.agents = baker.make_recipe(
|
||||
"agents.agent",
|
||||
version="0.10.2",
|
||||
salt_ver="1.0.3",
|
||||
_quantity=53,
|
||||
)
|
||||
ret = update_salt_minion_task.s().apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
salt_api_async.assert_not_called()
|
||||
[i.delete() for i in self.agents]
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test agents already on latest salt ver
|
||||
self.agents = baker.make_recipe(
|
||||
"agents.agent",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
salt_ver=settings.LATEST_SALT_VER,
|
||||
_quantity=53,
|
||||
)
|
||||
ret = update_salt_minion_task.s().apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
salt_api_async.assert_not_called()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_async")
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_auto_self_agent_update_task(self, mock_sleep, salt_api_async):
|
||||
# test 64bit golang agent
|
||||
self.agent64 = baker.make_recipe(
|
||||
agent_64_130 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.0.0",
|
||||
version="1.3.0",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent_64_130.pk)
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": settings.DL_64,
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
},
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.agent64.delete()
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test 32bit golang agent
|
||||
self.agent32 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 7 Professional, 32 bit (build 7601.24544)",
|
||||
version="1.0.0",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe",
|
||||
"url": settings.DL_32,
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.agent32.delete()
|
||||
salt_api_async.reset_mock()
|
||||
action = PendingAction.objects.get(agent__pk=agent_64_130.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
# test agent that has a null os field
|
||||
self.agentNone = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system=None,
|
||||
version="1.0.0",
|
||||
)
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_not_called()
|
||||
self.agentNone.delete()
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test auto update disabled in global settings
|
||||
self.agent64 = baker.make_recipe(
|
||||
@patch("agents.tasks.agent_update")
|
||||
@patch("agents.tasks.sleep", return_value=None)
|
||||
def test_auto_self_agent_update_task(self, mock_sleep, agent_update):
|
||||
baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.0.0",
|
||||
version=settings.LATEST_AGENT_VER,
|
||||
_quantity=23,
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.3.0",
|
||||
_quantity=33,
|
||||
)
|
||||
|
||||
self.coresettings.agent_auto_update = False
|
||||
self.coresettings.save(update_fields=["agent_auto_update"])
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_not_called()
|
||||
|
||||
# reset core settings
|
||||
self.agent64.delete()
|
||||
salt_api_async.reset_mock()
|
||||
r = auto_self_agent_update_task.s().apply()
|
||||
self.assertEqual(agent_update.call_count, 0)
|
||||
|
||||
self.coresettings.agent_auto_update = True
|
||||
self.coresettings.save(update_fields=["agent_auto_update"])
|
||||
|
||||
# test 64bit python agent
|
||||
self.agent64py = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="0.11.1",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": "winagent-v0.11.2.exe",
|
||||
"url": OLD_64_PY_AGENT,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.agent64py.delete()
|
||||
salt_api_async.reset_mock()
|
||||
|
||||
# test 32bit python agent
|
||||
self.agent32py = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 7 Professional, 32 bit (build 7601.24544)",
|
||||
version="0.11.1",
|
||||
)
|
||||
salt_api_async.return_value = True
|
||||
ret = auto_self_agent_update_task.s().apply()
|
||||
salt_api_async.assert_called_with(
|
||||
func="win_agent.do_agent_update_v2",
|
||||
kwargs={
|
||||
"inno": "winagent-v0.11.2-x86.exe",
|
||||
"url": OLD_32_PY_AGENT,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
r = auto_self_agent_update_task.s().apply()
|
||||
self.assertEqual(agent_update.call_count, 33)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
@@ -12,7 +13,6 @@ urlpatterns = [
|
||||
path("<pk>/agentdetail/", views.agent_detail),
|
||||
path("<int:pk>/meshcentral/", views.meshcentral),
|
||||
path("<str:arch>/getmeshexe/", views.get_mesh_exe),
|
||||
path("poweraction/", views.power_action),
|
||||
path("uninstall/", views.uninstall),
|
||||
path("editagent/", views.edit_agent),
|
||||
path("<pk>/geteventlog/<logtype>/<days>/", views.get_event_log),
|
||||
@@ -20,16 +20,16 @@ urlpatterns = [
|
||||
path("updateagents/", views.update_agents),
|
||||
path("<pk>/getprocs/", views.get_processes),
|
||||
path("<pk>/<pid>/killproc/", views.kill_proc),
|
||||
path("rebootlater/", views.reboot_later),
|
||||
path("reboot/", views.Reboot.as_view()),
|
||||
path("installagent/", views.install_agent),
|
||||
path("<int:pk>/ping/", views.ping),
|
||||
path("recover/", views.recover),
|
||||
path("runscript/", views.run_script),
|
||||
path("<int:pk>/restartmesh/", views.restart_mesh),
|
||||
path("<int:pk>/recovermesh/", views.recover_mesh),
|
||||
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
|
||||
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
|
||||
path("bulk/", views.bulk),
|
||||
path("agent_counts/", views.agent_counts),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,45 +1,40 @@
|
||||
from loguru import logger
|
||||
import os
|
||||
import subprocess
|
||||
import zlib
|
||||
import json
|
||||
import base64
|
||||
import pytz
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
from packaging import version as pyver
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import subprocess
|
||||
from typing import List
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.http import HttpResponse
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework import generics, status
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status, generics
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from .models import Agent, AgentOutage, RecoveryAction, Note
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from clients.models import Client, Site
|
||||
from accounts.models import User
|
||||
from core.models import CoreSettings
|
||||
from logs.models import AuditLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from logs.models import AuditLog
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
|
||||
from .models import Agent, Note, RecoveryAction
|
||||
from .serializers import (
|
||||
AgentSerializer,
|
||||
AgentHostnameSerializer,
|
||||
AgentTableSerializer,
|
||||
AgentEditSerializer,
|
||||
AgentHostnameSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
AgentSerializer,
|
||||
AgentTableSerializer,
|
||||
NoteSerializer,
|
||||
NotesSerializer,
|
||||
)
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .tasks import uninstall_agent_task, send_agent_update_task
|
||||
from winupdate.tasks import bulk_check_for_updates_task
|
||||
from scripts.tasks import run_script_bg_task, run_bulk_script_task
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
from .tasks import run_script_email_results_task, send_agent_update_task
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@@ -57,45 +52,49 @@ def get_agent_versions(request):
|
||||
|
||||
@api_view(["POST"])
|
||||
def update_agents(request):
|
||||
pks = request.data["pks"]
|
||||
version = request.data["version"]
|
||||
send_agent_update_task.delay(pks=pks, version=version)
|
||||
q = Agent.objects.filter(pk__in=request.data["pks"]).only("pk", "version")
|
||||
pks: List[int] = [
|
||||
i.pk
|
||||
for i in q
|
||||
if pyver.parse(i.version) < pyver.parse(settings.LATEST_AGENT_VER)
|
||||
]
|
||||
send_agent_update_task.delay(pks=pks)
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@api_view()
|
||||
def ping(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=5, func="test.ping")
|
||||
status = "offline"
|
||||
if agent.has_nats:
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
return Response({"name": agent.hostname, "status": "offline"})
|
||||
|
||||
if isinstance(r, bool) and r:
|
||||
return Response({"name": agent.hostname, "status": "online"})
|
||||
else:
|
||||
return Response({"name": agent.hostname, "status": "offline"})
|
||||
return Response({"name": agent.hostname, "status": status})
|
||||
|
||||
|
||||
@api_view(["DELETE"])
|
||||
def uninstall(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
|
||||
salt_id = agent.salt_id
|
||||
name = agent.hostname
|
||||
agent.delete()
|
||||
|
||||
uninstall_agent_task.delay(salt_id)
|
||||
reload_nats()
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
a_serializer = AgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
a_serializer.is_valid(raise_exception=True)
|
||||
a_serializer.save()
|
||||
|
||||
if "winupdatepolicy" in request.data.keys():
|
||||
policy = agent.winupdatepolicy.get()
|
||||
p_serializer = WinUpdatePolicySerializer(
|
||||
instance=policy, data=request.data["winupdatepolicy"][0]
|
||||
@@ -118,16 +117,9 @@ def meshcentral(request, pk):
|
||||
if token == "err":
|
||||
return notify_error("Invalid mesh token")
|
||||
|
||||
control = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
)
|
||||
terminal = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
)
|
||||
file = (
|
||||
f"{core.mesh_site}/?login={token}&node={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
)
|
||||
webrdp = f"{core.mesh_site}/mstsc.html?login={token}&node={agent.mesh_node_id}"
|
||||
control = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=11&hide=31"
|
||||
terminal = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=12&hide=31"
|
||||
file = f"{core.mesh_site}/?login={token}&gotonode={agent.mesh_node_id}&viewmode=13&hide=31"
|
||||
|
||||
AuditLog.audit_mesh_session(username=request.user.username, hostname=agent.hostname)
|
||||
|
||||
@@ -136,7 +128,6 @@ def meshcentral(request, pk):
|
||||
"control": control,
|
||||
"terminal": terminal,
|
||||
"file": file,
|
||||
"webrdp": webrdp,
|
||||
"status": agent.status,
|
||||
"client": agent.client.name,
|
||||
"site": agent.site.name,
|
||||
@@ -153,28 +144,29 @@ def agent_detail(request, pk):
|
||||
@api_view()
|
||||
def get_processes(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=20, func="win_agent.get_procs")
|
||||
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
|
||||
return notify_error("Requires agent version 1.2.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view()
|
||||
def kill_proc(request, pk, pid):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(timeout=25, func="ps.kill_pid", arg=int(pid))
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(
|
||||
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
||||
)
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
if isinstance(r, bool) and not r:
|
||||
return notify_error("Unable to kill the process")
|
||||
elif r != "ok":
|
||||
return notify_error(r)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -182,55 +174,42 @@ def kill_proc(request, pk, pid):
|
||||
@api_view()
|
||||
def get_event_log(request, pk, logtype, days):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=30,
|
||||
func="win_agent.get_eventlog",
|
||||
arg=[logtype, int(days)],
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = 180 if logtype == "Security" else 30
|
||||
data = {
|
||||
"func": "eventlog",
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"logname": logtype,
|
||||
"days": str(days),
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(json.loads(zlib.decompress(base64.b64decode(r["wineventlog"]))))
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def power_action(request):
|
||||
pk = request.data["pk"]
|
||||
action = request.data["action"]
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if action == "rebootnow":
|
||||
logger.info(f"{agent.hostname} was scheduled for immediate reboot")
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=30,
|
||||
func="system.reboot",
|
||||
arg=3,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response("ok")
|
||||
return Response(r)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def send_raw_cmd(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=request.data["timeout"],
|
||||
func="cmd.run",
|
||||
kwargs={
|
||||
"cmd": request.data["cmd"],
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = int(request.data["timeout"])
|
||||
data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
"payload": {
|
||||
"command": request.data["cmd"],
|
||||
"shell": request.data["shell"],
|
||||
"timeout": request.data["timeout"],
|
||||
},
|
||||
)
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=timeout + 2))
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error" or not r:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
AuditLog.audit_raw_command(
|
||||
username=request.user.username,
|
||||
@@ -239,7 +218,6 @@ def send_raw_cmd(request):
|
||||
shell=request.data["shell"],
|
||||
)
|
||||
|
||||
logger.info(f"The command {request.data['cmd']} was sent on agent {agent.hostname}")
|
||||
return Response(r)
|
||||
|
||||
|
||||
@@ -258,6 +236,7 @@ class AgentsTableList(generics.ListAPIView):
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
@@ -270,9 +249,7 @@ class AgentsTableList(generics.ListAPIView):
|
||||
|
||||
def list(self, request):
|
||||
queryset = self.get_queryset()
|
||||
ctx = {
|
||||
"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)
|
||||
}
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
serializer = AgentTableSerializer(queryset, many=True, context=ctx)
|
||||
return Response(serializer.data)
|
||||
|
||||
@@ -306,6 +283,7 @@ def by_client(request, clientpk):
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
@@ -314,7 +292,7 @@ def by_client(request, clientpk):
|
||||
"maintenance_mode",
|
||||
)
|
||||
)
|
||||
ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)}
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
|
||||
|
||||
|
||||
@@ -335,6 +313,7 @@ def by_site(request, sitepk):
|
||||
"overdue_text_alert",
|
||||
"overdue_email_alert",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
"last_seen",
|
||||
"boot_time",
|
||||
"logged_in_username",
|
||||
@@ -343,53 +322,78 @@ def by_site(request, sitepk):
|
||||
"maintenance_mode",
|
||||
)
|
||||
)
|
||||
ctx = {"default_tz": pytz.timezone(CoreSettings.objects.first().default_time_zone)}
|
||||
ctx = {"default_tz": get_default_timezone()}
|
||||
return Response(AgentTableSerializer(agents, many=True, context=ctx).data)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def overdue_action(request):
|
||||
pk = request.data["pk"]
|
||||
alert_type = request.data["alertType"]
|
||||
action = request.data["action"]
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if alert_type == "email" and action == "enabled":
|
||||
agent.overdue_email_alert = True
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "email" and action == "disabled":
|
||||
agent.overdue_email_alert = False
|
||||
agent.save(update_fields=["overdue_email_alert"])
|
||||
elif alert_type == "text" and action == "enabled":
|
||||
agent.overdue_text_alert = True
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
elif alert_type == "text" and action == "disabled":
|
||||
agent.overdue_text_alert = False
|
||||
agent.save(update_fields=["overdue_text_alert"])
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Something went wrong"}, status=status.HTTP_400_BAD_REQUEST
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
serializer = AgentOverdueActionSerializer(
|
||||
instance=agent, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response(agent.hostname)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def reboot_later(request):
|
||||
class Reboot(APIView):
|
||||
# reboot now
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
date_time = request.data["datetime"]
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response("ok")
|
||||
|
||||
# reboot later
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
try:
|
||||
obj = dt.datetime.strptime(date_time, "%Y-%m-%d %H:%M")
|
||||
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
|
||||
except Exception:
|
||||
return notify_error("Invalid date")
|
||||
|
||||
r = agent.schedule_reboot(obj)
|
||||
task_name = "TacticalRMM_SchedReboot_" + "".join(
|
||||
random.choice(string.ascii_letters) for _ in range(10)
|
||||
)
|
||||
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "failed":
|
||||
return notify_error("Something went wrong")
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"trigger": "once",
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(obj, "%Y")),
|
||||
"month": dt.datetime.strftime(obj, "%B"),
|
||||
"day": int(dt.datetime.strftime(obj, "%d")),
|
||||
"hour": int(dt.datetime.strftime(obj, "%H")),
|
||||
"min": int(dt.datetime.strftime(obj, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
return Response(r["msg"])
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error(r)
|
||||
|
||||
details = {"taskname": task_name, "time": str(obj)}
|
||||
PendingAction.objects.create(
|
||||
agent=agent, action_type="schedreboot", details=details
|
||||
)
|
||||
nice_time = dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
return Response(
|
||||
{"time": nice_time, "agent": agent.hostname, "task_name": task_name}
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
@@ -450,7 +454,7 @@ def install_agent(request):
|
||||
f"GOARCH={goarch}",
|
||||
go_bin,
|
||||
"build",
|
||||
f"-ldflags=\"-X 'main.Inno={inno}'",
|
||||
f"-ldflags=\"-s -w -X 'main.Inno={inno}'",
|
||||
f"-X 'main.Api={api}'",
|
||||
f"-X 'main.Client={client_id}'",
|
||||
f"-X 'main.Site={site_id}'",
|
||||
@@ -548,12 +552,10 @@ def install_agent(request):
|
||||
"/VERYSILENT",
|
||||
"/SUPPRESSMSGBOXES",
|
||||
"&&",
|
||||
"timeout",
|
||||
"/t",
|
||||
"20",
|
||||
"/nobreak",
|
||||
">",
|
||||
"NUL",
|
||||
"ping",
|
||||
"127.0.0.1",
|
||||
"-n",
|
||||
"5",
|
||||
"&&",
|
||||
r'"C:\Program Files\TacticalAgent\tacticalrmm.exe"',
|
||||
"-m",
|
||||
@@ -580,8 +582,6 @@ def install_agent(request):
|
||||
resp = {
|
||||
"cmd": " ".join(str(i) for i in cmd),
|
||||
"url": download_url,
|
||||
"salt64": settings.SALT_64,
|
||||
"salt32": settings.SALT_32,
|
||||
}
|
||||
|
||||
return Response(resp)
|
||||
@@ -636,35 +636,55 @@ def install_agent(request):
|
||||
@api_view(["POST"])
|
||||
def recover(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
mode = request.data["mode"]
|
||||
|
||||
if pyver.parse(agent.version) <= pyver.parse("0.9.5"):
|
||||
return notify_error("Only available in agent version greater than 0.9.5")
|
||||
|
||||
if not agent.has_nats:
|
||||
if mode == "tacagent" or mode == "rpc":
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
# attempt a realtime recovery if supported, otherwise fall back to old recovery method
|
||||
if agent.has_nats:
|
||||
if mode == "tacagent" or mode == "mesh":
|
||||
data = {"func": "recover", "payload": {"mode": mode}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=10))
|
||||
if r == "ok":
|
||||
return Response("Successfully completed recovery")
|
||||
|
||||
if agent.recoveryactions.filter(last_run=None).exists():
|
||||
return notify_error(
|
||||
"A recovery action is currently pending. Please wait for the next agent check-in."
|
||||
)
|
||||
|
||||
if request.data["mode"] == "command" and not request.data["cmd"]:
|
||||
if mode == "command" and not request.data["cmd"]:
|
||||
return notify_error("Command is required")
|
||||
|
||||
# if we've made it this far and realtime recovery didn't work,
|
||||
# tacagent service is the fallback recovery so we obv can't use that to recover itself if it's down
|
||||
if mode == "tacagent":
|
||||
return notify_error(
|
||||
"Requires RPC service to be functional. Please recover that first"
|
||||
)
|
||||
|
||||
# we should only get here if all other methods fail
|
||||
RecoveryAction(
|
||||
agent=agent,
|
||||
mode=request.data["mode"],
|
||||
command=request.data["cmd"] if request.data["mode"] == "command" else None,
|
||||
mode=mode,
|
||||
command=request.data["cmd"] if mode == "command" else None,
|
||||
).save()
|
||||
|
||||
return Response(f"Recovery will be attempted on the agent's next check-in")
|
||||
return Response("Recovery will be attempted on the agent's next check-in")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def run_script(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
|
||||
output = request.data["output"]
|
||||
args = request.data["args"]
|
||||
|
||||
req_timeout = int(request.data["timeout"]) + 3
|
||||
|
||||
AuditLog.audit_script_run(
|
||||
@@ -674,74 +694,37 @@ def run_script(request):
|
||||
)
|
||||
|
||||
if output == "wait":
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=req_timeout,
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
"shell": script.shell,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": args,
|
||||
},
|
||||
r = agent.run_script(scriptpk=script.pk, timeout=req_timeout, wait=True)
|
||||
return Response(r)
|
||||
|
||||
elif output == "email":
|
||||
if not pyver.parse(agent.version) >= pyver.parse("1.1.12"):
|
||||
return notify_error("Requires agent version 1.1.12 or greater")
|
||||
|
||||
emails = (
|
||||
[] if request.data["emailmode"] == "default" else request.data["emails"]
|
||||
)
|
||||
run_script_email_results_task.delay(
|
||||
agentpk=agent.pk,
|
||||
scriptpk=script.pk,
|
||||
nats_timeout=req_timeout,
|
||||
emails=emails,
|
||||
)
|
||||
|
||||
if isinstance(r, dict):
|
||||
if r["stdout"]:
|
||||
return Response(r["stdout"])
|
||||
elif r["stderr"]:
|
||||
return Response(r["stderr"])
|
||||
else:
|
||||
try:
|
||||
r["retcode"]
|
||||
except KeyError:
|
||||
return notify_error("Something went wrong")
|
||||
agent.run_script(scriptpk=script.pk, timeout=req_timeout)
|
||||
|
||||
return Response(f"Return code: {r['retcode']}")
|
||||
|
||||
else:
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif r == "error":
|
||||
return notify_error("Something went wrong")
|
||||
else:
|
||||
return notify_error(str(r))
|
||||
|
||||
else:
|
||||
data = {
|
||||
"agentpk": agent.pk,
|
||||
"scriptpk": script.pk,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": args,
|
||||
}
|
||||
run_script_bg_task.delay(data)
|
||||
return Response(f"{script.name} will now be run on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
def restart_mesh(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(func="service.restart", arg="mesh agent", timeout=30)
|
||||
if r == "timeout" or r == "error":
|
||||
return notify_error("Unable to contact the agent")
|
||||
elif isinstance(r, bool) and r:
|
||||
return Response(f"Restarted Mesh Agent on {agent.hostname}")
|
||||
else:
|
||||
return notify_error(f"Failed to restart the Mesh Agent on {agent.hostname}")
|
||||
|
||||
|
||||
@api_view()
|
||||
def recover_mesh(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=60,
|
||||
func="cmd.run",
|
||||
kwargs={
|
||||
"cmd": r'"C:\\Program Files\\TacticalAgent\\tacticalrmm.exe" -m recovermesh',
|
||||
"timeout": 55,
|
||||
},
|
||||
)
|
||||
if r == "timeout" or r == "error":
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
data = {"func": "recover", "payload": {"mode": "mesh"}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=45))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
return Response(f"Repaired mesh agent on {agent.hostname}")
|
||||
@@ -805,93 +788,89 @@ def bulk(request):
|
||||
return notify_error("Must select at least 1 agent")
|
||||
|
||||
if request.data["target"] == "client":
|
||||
agents = Agent.objects.filter(site__client_id=request.data["client"])
|
||||
q = Agent.objects.filter(site__client_id=request.data["client"])
|
||||
elif request.data["target"] == "site":
|
||||
agents = Agent.objects.filter(site_id=request.data["site"])
|
||||
q = Agent.objects.filter(site_id=request.data["site"])
|
||||
elif request.data["target"] == "agents":
|
||||
agents = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
q = Agent.objects.filter(pk__in=request.data["agentPKs"])
|
||||
elif request.data["target"] == "all":
|
||||
agents = Agent.objects.all()
|
||||
q = Agent.objects.only("pk", "monitoring_type")
|
||||
else:
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
minions = [agent.salt_id for agent in agents]
|
||||
if request.data["monType"] == "servers":
|
||||
q = q.filter(monitoring_type="server")
|
||||
elif request.data["monType"] == "workstations":
|
||||
q = q.filter(monitoring_type="workstation")
|
||||
|
||||
agents: List[int] = [agent.pk for agent in q]
|
||||
|
||||
AuditLog.audit_bulk_action(request.user, request.data["mode"], request.data)
|
||||
|
||||
if request.data["mode"] == "command":
|
||||
r = Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="cmd.run_bg",
|
||||
kwargs={
|
||||
"cmd": request.data["cmd"],
|
||||
"shell": request.data["shell"],
|
||||
"timeout": request.data["timeout"],
|
||||
},
|
||||
handle_bulk_command_task.delay(
|
||||
agents, request.data["cmd"], request.data["shell"], request.data["timeout"]
|
||||
)
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
return Response(f"Command will now be run on {len(minions)} agents")
|
||||
return Response(f"Command will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "script":
|
||||
script = get_object_or_404(Script, pk=request.data["scriptPK"])
|
||||
|
||||
if script.shell == "python":
|
||||
r = Agent.salt_batch_async(
|
||||
minions=minions,
|
||||
func="win_agent.run_script",
|
||||
kwargs={
|
||||
"filepath": script.filepath,
|
||||
"filename": script.filename,
|
||||
"shell": script.shell,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": request.data["args"],
|
||||
"bg": True,
|
||||
},
|
||||
handle_bulk_script_task.delay(
|
||||
script.pk, agents, request.data["args"], request.data["timeout"]
|
||||
)
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
else:
|
||||
data = {
|
||||
"minions": minions,
|
||||
"scriptpk": script.pk,
|
||||
"timeout": request.data["timeout"],
|
||||
"args": request.data["args"],
|
||||
}
|
||||
run_bulk_script_task.delay(data)
|
||||
|
||||
return Response(f"{script.name} will now be run on {len(minions)} agents")
|
||||
return Response(f"{script.name} will now be run on {len(agents)} agents")
|
||||
|
||||
elif request.data["mode"] == "install":
|
||||
r = Agent.salt_batch_async(minions=minions, func="win_agent.install_updates")
|
||||
if r == "timeout":
|
||||
return notify_error("Salt API not running")
|
||||
bulk_install_updates_task.delay(agents)
|
||||
return Response(
|
||||
f"Pending updates will now be installed on {len(minions)} agents"
|
||||
f"Pending updates will now be installed on {len(agents)} agents"
|
||||
)
|
||||
elif request.data["mode"] == "scan":
|
||||
bulk_check_for_updates_task.delay(minions=minions)
|
||||
return Response(f"Patch status scan will now run on {len(minions)} agents")
|
||||
bulk_check_for_updates_task.delay(agents)
|
||||
return Response(f"Patch status scan will now run on {len(agents)} agents")
|
||||
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def agent_counts(request):
|
||||
|
||||
server_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
workstation_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"total_server_count": Agent.objects.filter(
|
||||
monitoring_type="server"
|
||||
).count(),
|
||||
"total_server_offline_count": AgentOutage.objects.filter(
|
||||
recovery_time=None, agent__monitoring_type="server"
|
||||
).count(),
|
||||
"total_server_offline_count": server_offline_count,
|
||||
"total_workstation_count": Agent.objects.filter(
|
||||
monitoring_type="workstation"
|
||||
).count(),
|
||||
"total_workstation_offline_count": AgentOutage.objects.filter(
|
||||
recovery_time=None, agent__monitoring_type="workstation"
|
||||
).count(),
|
||||
"total_workstation_offline_count": workstation_offline_count,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -917,3 +896,15 @@ def agent_maintenance(request):
|
||||
return notify_error("Invalid data")
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WMI(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.1.2"):
|
||||
return notify_error("Requires agent version 1.1.2 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
return Response("ok")
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Alert
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
|
||||
admin.site.register(Alert)
|
||||
admin.site.register(AlertTemplate)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1 on 2020-08-15 15:31
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,25 +1,31 @@
|
||||
# Generated by Django 3.1.2 on 2020-10-21 18:15
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('checks', '0010_auto_20200922_1344'),
|
||||
('alerts', '0002_auto_20200815_1618'),
|
||||
("checks", "0010_auto_20200922_1344"),
|
||||
("alerts", "0002_auto_20200815_1618"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='assigned_check',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='checks.check'),
|
||||
model_name="alert",
|
||||
name="assigned_check",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="alert",
|
||||
to="checks.check",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alert',
|
||||
name='alert_time',
|
||||
model_name="alert",
|
||||
name="alert_time",
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
]
|
||||
172
api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
Normal file
172
api/tacticalrmm/alerts/migrations/0004_auto_20210212_1408.py
Normal file
@@ -0,0 +1,172 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-12 14:08
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0029_delete_agentoutage'),
|
||||
('clients', '0008_auto_20201103_1430'),
|
||||
('autotasks', '0017_auto_20210210_1512'),
|
||||
('scripts', '0005_auto_20201207_1606'),
|
||||
('alerts', '0003_auto_20201021_1815'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_execution_time',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_retcode',
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_run',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_stderr',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_stdout',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='action_timeout',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='alert_type',
|
||||
field=models.CharField(choices=[('availability', 'Availability'), ('check', 'Check'), ('task', 'Task'), ('custom', 'Custom')], default='availability', max_length=20),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='assigned_task',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='alert', to='autotasks.automatedtask'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='email_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='hidden',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_execution_time',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_retcode',
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_run',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_stderr',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_stdout',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_action_timeout',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_email_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_on',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='resolved_sms_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='sms_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alert',
|
||||
name='snoozed',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alert',
|
||||
name='severity',
|
||||
field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AlertTemplate',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=100)),
|
||||
('is_active', models.BooleanField(default=True)),
|
||||
('action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
|
||||
('resolved_action_args', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
|
||||
('email_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)),
|
||||
('text_recipients', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=100), blank=True, default=list, null=True, size=None)),
|
||||
('email_from', models.EmailField(blank=True, max_length=254, null=True)),
|
||||
('agent_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_include_desktops', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_always_email', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_always_text', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_always_alert', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('agent_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
|
||||
('check_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('check_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('check_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('check_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_always_email', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_always_text', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_always_alert', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('check_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
|
||||
('task_email_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('task_text_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('task_dashboard_alert_severity', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], max_length=25), blank=True, default=list, size=None)),
|
||||
('task_email_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_text_on_resolved', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_always_email', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_always_text', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_always_alert', models.BooleanField(blank=True, default=False, null=True)),
|
||||
('task_periodic_alert_days', models.PositiveIntegerField(blank=True, default=0, null=True)),
|
||||
('action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='alert_template', to='scripts.script')),
|
||||
('excluded_agents', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='agents.Agent')),
|
||||
('excluded_clients', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Client')),
|
||||
('excluded_sites', models.ManyToManyField(blank=True, related_name='alert_exclusions', to='clients.Site')),
|
||||
('resolved_action', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_alert_template', to='scripts.script')),
|
||||
],
|
||||
),
|
||||
]
|
||||
31
api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
Normal file
31
api/tacticalrmm/alerts/migrations/0005_auto_20210212_1745.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-12 17:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0004_auto_20210212_1408'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='alert',
|
||||
name='action_timeout',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='alert',
|
||||
name='resolved_action_timeout',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='action_timeout',
|
||||
field=models.PositiveIntegerField(default=15),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='resolved_action_timeout',
|
||||
field=models.PositiveIntegerField(default=15),
|
||||
),
|
||||
]
|
||||
72
api/tacticalrmm/alerts/migrations/0006_auto_20210217_1736.py
Normal file
72
api/tacticalrmm/alerts/migrations/0006_auto_20210217_1736.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# Generated by Django 3.1.6 on 2021-02-17 17:36
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0005_auto_20210212_1745'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_include_desktops',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='exclude_servers',
|
||||
field=models.BooleanField(blank=True, default=False, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='alerttemplate',
|
||||
name='exclude_workstations',
|
||||
field=models.BooleanField(blank=True, default=False, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_always_alert',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_always_email',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='agent_always_text',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_always_alert',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_always_email',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='check_always_text',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_always_alert',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_always_email',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='alerttemplate',
|
||||
name='task_always_text',
|
||||
field=models.BooleanField(blank=True, default=None, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,7 @@
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
|
||||
from django.db.models.fields import BooleanField, PositiveIntegerField
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
SEVERITY_CHOICES = [
|
||||
("info", "Informational"),
|
||||
@@ -7,6 +9,13 @@ SEVERITY_CHOICES = [
|
||||
("error", "Error"),
|
||||
]
|
||||
|
||||
ALERT_TYPE_CHOICES = [
|
||||
("availability", "Availability"),
|
||||
("check", "Check"),
|
||||
("task", "Task"),
|
||||
("custom", "Custom"),
|
||||
]
|
||||
|
||||
|
||||
class Alert(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
@@ -23,21 +32,255 @@ class Alert(models.Model):
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
assigned_task = models.ForeignKey(
|
||||
"autotasks.AutomatedTask",
|
||||
related_name="alert",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
alert_type = models.CharField(
|
||||
max_length=20, choices=ALERT_TYPE_CHOICES, default="availability"
|
||||
)
|
||||
message = models.TextField(null=True, blank=True)
|
||||
alert_time = models.DateTimeField(auto_now_add=True, null=True)
|
||||
alert_time = models.DateTimeField(auto_now_add=True, null=True, blank=True)
|
||||
snoozed = models.BooleanField(default=False)
|
||||
snooze_until = models.DateTimeField(null=True, blank=True)
|
||||
resolved = models.BooleanField(default=False)
|
||||
severity = models.CharField(
|
||||
max_length=100, choices=SEVERITY_CHOICES, default="info"
|
||||
resolved_on = models.DateTimeField(null=True, blank=True)
|
||||
severity = models.CharField(max_length=30, choices=SEVERITY_CHOICES, default="info")
|
||||
email_sent = models.DateTimeField(null=True, blank=True)
|
||||
resolved_email_sent = models.DateTimeField(null=True, blank=True)
|
||||
sms_sent = models.DateTimeField(null=True, blank=True)
|
||||
resolved_sms_sent = models.DateTimeField(null=True, blank=True)
|
||||
hidden = models.BooleanField(default=False)
|
||||
action_run = models.DateTimeField(null=True, blank=True)
|
||||
action_stdout = models.TextField(null=True, blank=True)
|
||||
action_stderr = models.TextField(null=True, blank=True)
|
||||
action_retcode = models.IntegerField(null=True, blank=True)
|
||||
action_execution_time = models.CharField(max_length=100, null=True, blank=True)
|
||||
resolved_action_run = models.DateTimeField(null=True, blank=True)
|
||||
resolved_action_stdout = models.TextField(null=True, blank=True)
|
||||
resolved_action_stderr = models.TextField(null=True, blank=True)
|
||||
resolved_action_retcode = models.IntegerField(null=True, blank=True)
|
||||
resolved_action_execution_time = models.CharField(
|
||||
max_length=100, null=True, blank=True
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
def resolve(self):
|
||||
self.resolved = True
|
||||
self.resolved_on = djangotime.now()
|
||||
self.snoozed = False
|
||||
self.snooze_until = None
|
||||
self.save()
|
||||
|
||||
@classmethod
|
||||
def create_availability_alert(cls, agent):
|
||||
pass
|
||||
if not cls.objects.filter(agent=agent, resolved=False).exists():
|
||||
return cls.objects.create(
|
||||
agent=agent,
|
||||
alert_type="availability",
|
||||
severity="error",
|
||||
message=f"{agent.hostname} in {agent.client.name}\\{agent.site.name} is overdue.",
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_check_alert(cls, check):
|
||||
|
||||
if not cls.objects.filter(assigned_check=check, resolved=False).exists():
|
||||
return cls.objects.create(
|
||||
assigned_check=check,
|
||||
alert_type="check",
|
||||
severity=check.alert_severity,
|
||||
message=f"{check.agent.hostname} has a {check.check_type} check: {check.readable_desc} that failed.",
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_task_alert(cls, task):
|
||||
|
||||
if not cls.objects.filter(assigned_task=task, resolved=False).exists():
|
||||
return cls.objects.create(
|
||||
assigned_task=task,
|
||||
alert_type="task",
|
||||
severity=task.alert_severity,
|
||||
message=f"{task.agent.hostname} has task: {task.name} that failed.",
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_custom_alert(cls, custom):
|
||||
pass
|
||||
|
||||
|
||||
class AlertTemplate(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
action = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
related_name="alert_template",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
action_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
action_timeout = models.PositiveIntegerField(default=15)
|
||||
resolved_action = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
related_name="resolved_alert_template",
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
resolved_action_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
resolved_action_timeout = models.PositiveIntegerField(default=15)
|
||||
|
||||
# overrides the global recipients
|
||||
email_recipients = ArrayField(
|
||||
models.CharField(max_length=100, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
text_recipients = ArrayField(
|
||||
models.CharField(max_length=100, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
|
||||
# overrides the from address
|
||||
email_from = models.EmailField(blank=True, null=True)
|
||||
|
||||
# agent alert settings
|
||||
agent_email_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
agent_text_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
agent_always_email = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
agent_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
agent_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
|
||||
# check alert settings
|
||||
check_email_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
check_text_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
check_dashboard_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
check_email_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
check_text_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
check_always_email = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
check_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
check_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
|
||||
# task alert settings
|
||||
task_email_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
task_text_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
task_dashboard_alert_severity = ArrayField(
|
||||
models.CharField(max_length=25, blank=True, choices=SEVERITY_CHOICES),
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
task_email_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
task_text_on_resolved = BooleanField(null=True, blank=True, default=False)
|
||||
task_always_email = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_text = BooleanField(null=True, blank=True, default=None)
|
||||
task_always_alert = BooleanField(null=True, blank=True, default=None)
|
||||
task_periodic_alert_days = PositiveIntegerField(blank=True, null=True, default=0)
|
||||
|
||||
# exclusion settings
|
||||
exclude_workstations = BooleanField(null=True, blank=True, default=False)
|
||||
exclude_servers = BooleanField(null=True, blank=True, default=False)
|
||||
|
||||
excluded_sites = models.ManyToManyField(
|
||||
"clients.Site", related_name="alert_exclusions", blank=True
|
||||
)
|
||||
excluded_clients = models.ManyToManyField(
|
||||
"clients.Client", related_name="alert_exclusions", blank=True
|
||||
)
|
||||
excluded_agents = models.ManyToManyField(
|
||||
"agents.Agent", related_name="alert_exclusions", blank=True
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def has_agent_settings(self) -> bool:
|
||||
return (
|
||||
self.agent_email_on_resolved
|
||||
or self.agent_text_on_resolved
|
||||
or self.agent_always_email
|
||||
or self.agent_always_text
|
||||
or self.agent_always_alert
|
||||
or bool(self.agent_periodic_alert_days)
|
||||
)
|
||||
|
||||
@property
|
||||
def has_check_settings(self) -> bool:
|
||||
return (
|
||||
bool(self.check_email_alert_severity)
|
||||
or bool(self.check_text_alert_severity)
|
||||
or bool(self.check_dashboard_alert_severity)
|
||||
or self.check_email_on_resolved
|
||||
or self.check_text_on_resolved
|
||||
or self.check_always_email
|
||||
or self.check_always_text
|
||||
or self.check_always_alert
|
||||
or bool(self.check_periodic_alert_days)
|
||||
)
|
||||
|
||||
@property
|
||||
def has_task_settings(self) -> bool:
|
||||
return (
|
||||
bool(self.task_email_alert_severity)
|
||||
or bool(self.task_text_alert_severity)
|
||||
or bool(self.task_dashboard_alert_severity)
|
||||
or self.task_email_on_resolved
|
||||
or self.task_text_on_resolved
|
||||
or self.task_always_email
|
||||
or self.task_always_text
|
||||
or self.task_always_alert
|
||||
or bool(self.task_periodic_alert_days)
|
||||
)
|
||||
|
||||
@property
|
||||
def has_core_settings(self) -> bool:
|
||||
return bool(self.email_from) or self.email_recipients or self.text_recipients
|
||||
|
||||
@property
|
||||
def is_default_template(self) -> bool:
|
||||
return self.default_alert_template.exists()
|
||||
|
||||
@@ -1,19 +1,121 @@
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
ReadOnlyField,
|
||||
DateTimeField,
|
||||
)
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField
|
||||
|
||||
from .models import Alert
|
||||
from automation.serializers import PolicySerializer
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from tacticalrmm.utils import get_default_timezone
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
|
||||
|
||||
class AlertSerializer(ModelSerializer):
|
||||
|
||||
hostname = ReadOnlyField(source="agent.hostname")
|
||||
client = ReadOnlyField(source="agent.client")
|
||||
site = ReadOnlyField(source="agent.site")
|
||||
alert_time = DateTimeField(format="iso-8601")
|
||||
hostname = SerializerMethodField(read_only=True)
|
||||
client = SerializerMethodField(read_only=True)
|
||||
site = SerializerMethodField(read_only=True)
|
||||
alert_time = SerializerMethodField(read_only=True)
|
||||
resolve_on = SerializerMethodField(read_only=True)
|
||||
snoozed_until = SerializerMethodField(read_only=True)
|
||||
|
||||
def get_hostname(self, instance):
|
||||
if instance.alert_type == "availability":
|
||||
return instance.agent.hostname if instance.agent else ""
|
||||
elif instance.alert_type == "check":
|
||||
return (
|
||||
instance.assigned_check.agent.hostname
|
||||
if instance.assigned_check
|
||||
else ""
|
||||
)
|
||||
elif instance.alert_type == "task":
|
||||
return (
|
||||
instance.assigned_task.agent.hostname if instance.assigned_task else ""
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def get_client(self, instance):
|
||||
if instance.alert_type == "availability":
|
||||
return instance.agent.client.name if instance.agent else ""
|
||||
elif instance.alert_type == "check":
|
||||
return (
|
||||
instance.assigned_check.agent.client.name
|
||||
if instance.assigned_check
|
||||
else ""
|
||||
)
|
||||
elif instance.alert_type == "task":
|
||||
return (
|
||||
instance.assigned_task.agent.client.name
|
||||
if instance.assigned_task
|
||||
else ""
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def get_site(self, instance):
|
||||
if instance.alert_type == "availability":
|
||||
return instance.agent.site.name if instance.agent else ""
|
||||
elif instance.alert_type == "check":
|
||||
return (
|
||||
instance.assigned_check.agent.site.name
|
||||
if instance.assigned_check
|
||||
else ""
|
||||
)
|
||||
elif instance.alert_type == "task":
|
||||
return (
|
||||
instance.assigned_task.agent.site.name if instance.assigned_task else ""
|
||||
)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def get_alert_time(self, instance):
|
||||
if instance.alert_time:
|
||||
return instance.alert_time.astimezone(get_default_timezone()).timestamp()
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_resolve_on(self, instance):
|
||||
if instance.resolved_on:
|
||||
return instance.resolved_on.astimezone(get_default_timezone()).timestamp()
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_snoozed_until(self, instance):
|
||||
if instance.snooze_until:
|
||||
return instance.snooze_until.astimezone(get_default_timezone()).timestamp()
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
model = Alert
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class AlertTemplateSerializer(ModelSerializer):
|
||||
agent_settings = ReadOnlyField(source="has_agent_settings")
|
||||
check_settings = ReadOnlyField(source="has_check_settings")
|
||||
task_settings = ReadOnlyField(source="has_task_settings")
|
||||
core_settings = ReadOnlyField(source="has_core_settings")
|
||||
default_template = ReadOnlyField(source="is_default_template")
|
||||
action_name = ReadOnlyField(source="action.name")
|
||||
resolved_action_name = ReadOnlyField(source="resolved_action.name")
|
||||
applied_count = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
def get_applied_count(self, instance):
|
||||
count = 0
|
||||
count += instance.policies.count()
|
||||
count += instance.clients.count()
|
||||
count += instance.sites.count()
|
||||
return count
|
||||
|
||||
|
||||
class AlertTemplateRelationSerializer(ModelSerializer):
|
||||
policies = PolicySerializer(read_only=True, many=True)
|
||||
clients = ClientSerializer(read_only=True, many=True)
|
||||
sites = SiteSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = AlertTemplate
|
||||
fields = "__all__"
|
||||
|
||||
14
api/tacticalrmm/alerts/tasks.py
Normal file
14
api/tacticalrmm/alerts/tasks.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from alerts.models import Alert
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def unsnooze_alerts() -> str:
|
||||
|
||||
Alert.objects.filter(snoozed=True, snooze_until__lte=djangotime.now()).update(
|
||||
snoozed=False, snooze_until=None
|
||||
)
|
||||
|
||||
return "ok"
|
||||
@@ -1,3 +1,485 @@
|
||||
from django.test import TestCase
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Create your tests here.
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker, seq
|
||||
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Alert, AlertTemplate
|
||||
from .serializers import (
|
||||
AlertSerializer,
|
||||
AlertTemplateRelationSerializer,
|
||||
AlertTemplateSerializer,
|
||||
)
|
||||
|
||||
|
||||
class TestAlertsViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_get_alerts(self):
|
||||
url = "/alerts/alerts/"
|
||||
|
||||
# create check, task, and agent to test each serializer function
|
||||
check = baker.make_recipe("checks.diskspace_check")
|
||||
task = baker.make("autotasks.AutomatedTask")
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
# setup data
|
||||
alerts = baker.make(
|
||||
"alerts.Alert",
|
||||
agent=agent,
|
||||
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||
severity="warning",
|
||||
_quantity=3,
|
||||
)
|
||||
baker.make(
|
||||
"alerts.Alert",
|
||||
assigned_check=check,
|
||||
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||
severity="error",
|
||||
_quantity=7,
|
||||
)
|
||||
baker.make(
|
||||
"alerts.Alert",
|
||||
assigned_task=task,
|
||||
snoozed=True,
|
||||
snooze_until=djangotime.now(),
|
||||
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||
_quantity=2,
|
||||
)
|
||||
baker.make(
|
||||
"alerts.Alert",
|
||||
agent=agent,
|
||||
resolved=True,
|
||||
resolved_on=djangotime.now(),
|
||||
alert_time=seq(datetime.now(), timedelta(days=15)),
|
||||
_quantity=9,
|
||||
)
|
||||
|
||||
# test top alerts for alerts icon
|
||||
data = {"top": 3}
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEquals(resp.data["alerts"], AlertSerializer(alerts, many=True).data)
|
||||
self.assertEquals(resp.data["alerts_count"], 10)
|
||||
|
||||
# test filter data
|
||||
# test data and result counts
|
||||
data = [
|
||||
{
|
||||
"filter": {
|
||||
"timeFilter": 30,
|
||||
"snoozedFilter": True,
|
||||
"resolvedFilter": False,
|
||||
},
|
||||
"count": 12,
|
||||
},
|
||||
{
|
||||
"filter": {
|
||||
"timeFilter": 45,
|
||||
"snoozedFilter": False,
|
||||
"resolvedFilter": False,
|
||||
},
|
||||
"count": 10,
|
||||
},
|
||||
{
|
||||
"filter": {
|
||||
"severityFilter": ["error"],
|
||||
"snoozedFilter": False,
|
||||
"resolvedFilter": True,
|
||||
"timeFilter": 20,
|
||||
},
|
||||
"count": 7,
|
||||
},
|
||||
{
|
||||
"filter": {
|
||||
"clientFilter": [],
|
||||
"snoozedFilter": True,
|
||||
"resolvedFilter": False,
|
||||
},
|
||||
"count": 0,
|
||||
},
|
||||
{"filter": {}, "count": 21},
|
||||
{"filter": {"snoozedFilter": True, "resolvedFilter": False}, "count": 12},
|
||||
]
|
||||
|
||||
for req in data:
|
||||
resp = self.client.patch(url, req["filter"], format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(len(resp.data), req["count"])
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_add_alert(self):
|
||||
url = "/alerts/alerts/"
|
||||
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
data = {
|
||||
"alert_time": datetime.now(),
|
||||
"agent": agent.id,
|
||||
"severity": "warning",
|
||||
"alert_type": "availability",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_alert(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.get("/alerts/alerts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert = baker.make("alerts.Alert")
|
||||
url = f"/alerts/alerts/{alert.pk}/"
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertSerializer(alert)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_update_alert(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.put("/alerts/alerts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert = baker.make("alerts.Alert", resolved=False, snoozed=False)
|
||||
|
||||
url = f"/alerts/alerts/{alert.pk}/"
|
||||
|
||||
# test resolving alert
|
||||
data = {
|
||||
"id": alert.pk,
|
||||
"type": "resolve",
|
||||
}
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Alert.objects.get(pk=alert.pk).resolved)
|
||||
self.assertTrue(Alert.objects.get(pk=alert.pk).resolved_on)
|
||||
|
||||
# test snoozing alert
|
||||
data = {"id": alert.pk, "type": "snooze", "snooze_days": "30"}
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Alert.objects.get(pk=alert.pk).snoozed)
|
||||
self.assertTrue(Alert.objects.get(pk=alert.pk).snooze_until)
|
||||
|
||||
# test snoozing alert without snooze_days
|
||||
data = {"id": alert.pk, "type": "snooze"}
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
# test unsnoozing alert
|
||||
data = {"id": alert.pk, "type": "unsnooze"}
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertFalse(Alert.objects.get(pk=alert.pk).snoozed)
|
||||
self.assertFalse(Alert.objects.get(pk=alert.pk).snooze_until)
|
||||
|
||||
# test invalid type
|
||||
data = {"id": alert.pk, "type": "invalid"}
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_alert(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.put("/alerts/alerts/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert = baker.make("alerts.Alert")
|
||||
|
||||
# test delete alert
|
||||
url = f"/alerts/alerts/{alert.pk}/"
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.assertFalse(Alert.objects.filter(pk=alert.pk).exists())
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_bulk_alert_actions(self):
|
||||
url = "/alerts/bulk/"
|
||||
|
||||
# setup data
|
||||
alerts = baker.make("alerts.Alert", resolved=False, _quantity=3)
|
||||
|
||||
# test invalid data
|
||||
data = {"bulk_action": "invalid"}
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
# test snooze without snooze days
|
||||
data = {"bulk_action": "snooze"}
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
# test bulk snoozing alerts
|
||||
data = {
|
||||
"bulk_action": "snooze",
|
||||
"alerts": [alert.pk for alert in alerts],
|
||||
"snooze_days": "30",
|
||||
}
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertFalse(Alert.objects.filter(snoozed=False).exists())
|
||||
|
||||
# test bulk resolving alerts
|
||||
data = {"bulk_action": "resolve", "alerts": [alert.pk for alert in alerts]}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertFalse(Alert.objects.filter(resolved=False).exists())
|
||||
self.assertTrue(Alert.objects.filter(snoozed=False).exists())
|
||||
|
||||
def test_get_alert_templates(self):
|
||||
url = "/alerts/alerttemplates/"
|
||||
|
||||
alert_templates = baker.make("alerts.AlertTemplate", _quantity=3)
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateSerializer(alert_templates, many=True)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_alert_template(self):
|
||||
url = "/alerts/alerttemplates/"
|
||||
|
||||
data = {
|
||||
"name": "Test Template",
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_alert_template(self):
|
||||
# returns 404 for invalid alert template pk
|
||||
resp = self.client.get("/alerts/alerttemplates/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert_template = baker.make("alerts.AlertTemplate")
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/"
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateSerializer(alert_template)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_update_alert_template(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.put("/alerts/alerttemplates/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert_template = baker.make("alerts.AlertTemplate")
|
||||
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/"
|
||||
|
||||
# test data
|
||||
data = {
|
||||
"id": alert_template.pk,
|
||||
"agent_email_on_resolved": True,
|
||||
"agent_text_on_resolved": True,
|
||||
"agent_include_desktops": True,
|
||||
"agent_always_email": True,
|
||||
"agent_always_text": True,
|
||||
"agent_always_alert": True,
|
||||
"agent_periodic_alert_days": "90",
|
||||
}
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_alert_template(self):
|
||||
# returns 404 for invalid alert pk
|
||||
resp = self.client.put("/alerts/alerttemplates/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
alert_template = baker.make("alerts.AlertTemplate")
|
||||
|
||||
# test delete alert
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/"
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.assertFalse(AlertTemplate.objects.filter(pk=alert_template.pk).exists())
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_alert_template_related(self):
|
||||
# setup data
|
||||
alert_template = baker.make("alerts.AlertTemplate")
|
||||
baker.make("clients.Client", alert_template=alert_template, _quantity=2)
|
||||
baker.make("clients.Site", alert_template=alert_template, _quantity=3)
|
||||
baker.make("automation.Policy", alert_template=alert_template)
|
||||
core = CoreSettings.objects.first()
|
||||
core.alert_template = alert_template
|
||||
core.save()
|
||||
|
||||
url = f"/alerts/alerttemplates/{alert_template.pk}/related/"
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AlertTemplateRelationSerializer(alert_template)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
self.assertEqual(len(resp.data["policies"]), 1)
|
||||
self.assertEqual(len(resp.data["clients"]), 2)
|
||||
self.assertEqual(len(resp.data["sites"]), 3)
|
||||
self.assertTrue(
|
||||
AlertTemplate.objects.get(pk=alert_template.pk).is_default_template
|
||||
)
|
||||
|
||||
|
||||
class TestAlertTasks(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_unsnooze_alert_task(self):
|
||||
from alerts.tasks import unsnooze_alerts
|
||||
|
||||
# these will be unsnoozed whent eh function is run
|
||||
not_snoozed = baker.make(
|
||||
"alerts.Alert",
|
||||
snoozed=True,
|
||||
snooze_until=seq(datetime.now(), timedelta(days=15)),
|
||||
_quantity=5,
|
||||
)
|
||||
|
||||
# these will still be snoozed after the function is run
|
||||
snoozed = baker.make(
|
||||
"alerts.Alert",
|
||||
snoozed=True,
|
||||
snooze_until=seq(datetime.now(), timedelta(days=-15)),
|
||||
_quantity=5,
|
||||
)
|
||||
|
||||
unsnooze_alerts()
|
||||
|
||||
self.assertFalse(
|
||||
Alert.objects.filter(
|
||||
pk__in=[alert.pk for alert in not_snoozed], snoozed=False
|
||||
).exists()
|
||||
)
|
||||
self.assertTrue(
|
||||
Alert.objects.filter(
|
||||
pk__in=[alert.pk for alert in snoozed], snoozed=False
|
||||
).exists()
|
||||
)
|
||||
|
||||
def test_agent_gets_correct_alert_template(self):
|
||||
|
||||
core = CoreSettings.objects.first()
|
||||
# setup data
|
||||
workstation = baker.make_recipe("agents.agent", monitoring_type="workstation")
|
||||
server = baker.make_recipe("agents.agent", monitoring_type="server")
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
|
||||
alert_templates = baker.make("alerts.AlertTemplate", _quantity=6)
|
||||
|
||||
# should be None
|
||||
self.assertFalse(workstation.get_alert_template())
|
||||
self.assertFalse(server.get_alert_template())
|
||||
|
||||
# assign first Alert Template as to a policy and apply it as default
|
||||
policy.alert_template = alert_templates[0]
|
||||
policy.save()
|
||||
core.workstation_policy = policy
|
||||
core.server_policy = policy
|
||||
core.save()
|
||||
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[0].pk)
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[0].pk)
|
||||
|
||||
# assign second Alert Template to as default alert template
|
||||
core.alert_template = alert_templates[1]
|
||||
core.save()
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[1].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[1].pk)
|
||||
|
||||
# assign third Alert Template to client
|
||||
workstation.client.alert_template = alert_templates[2]
|
||||
server.client.alert_template = alert_templates[2]
|
||||
workstation.client.save()
|
||||
server.client.save()
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[2].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[2].pk)
|
||||
|
||||
# apply policy to client and should override
|
||||
workstation.client.workstation_policy = policy
|
||||
server.client.server_policy = policy
|
||||
workstation.client.save()
|
||||
server.client.save()
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[0].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[0].pk)
|
||||
|
||||
# assign fouth Alert Template to site
|
||||
workstation.site.alert_template = alert_templates[3]
|
||||
server.site.alert_template = alert_templates[3]
|
||||
workstation.site.save()
|
||||
server.site.save()
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[3].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[3].pk)
|
||||
|
||||
# apply policy to site
|
||||
workstation.site.workstation_policy = policy
|
||||
server.site.server_policy = policy
|
||||
workstation.site.save()
|
||||
server.site.save()
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[0].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[0].pk)
|
||||
|
||||
# apply policy to agents
|
||||
workstation.policy = policy
|
||||
server.policy = policy
|
||||
workstation.save()
|
||||
server.save()
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[0].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[0].pk)
|
||||
|
||||
# test disabling alert template
|
||||
alert_templates[0].is_active = False
|
||||
alert_templates[0].save()
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[3].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[3].pk)
|
||||
|
||||
# test policy exclusions
|
||||
alert_templates[3].excluded_agents.set([workstation.pk])
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[2].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[3].pk)
|
||||
|
||||
# test workstation exclusions
|
||||
alert_templates[2].exclude_workstations = True
|
||||
alert_templates[2].save()
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[1].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[3].pk)
|
||||
|
||||
# test server exclusions
|
||||
alert_templates[3].exclude_servers = True
|
||||
alert_templates[3].save()
|
||||
|
||||
self.assertEquals(workstation.get_alert_template().pk, alert_templates[1].pk)
|
||||
self.assertEquals(server.get_alert_template().pk, alert_templates[2].pk)
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("alerts/", views.GetAddAlerts.as_view()),
|
||||
path("bulk/", views.BulkAlerts.as_view()),
|
||||
path("alerts/<int:pk>/", views.GetUpdateDeleteAlert.as_view()),
|
||||
path("alerttemplates/", views.GetAddAlertTemplates.as_view()),
|
||||
path("alerttemplates/<int:pk>/", views.GetUpdateDeleteAlertTemplate.as_view()),
|
||||
path("alerttemplates/<int:pk>/related/", views.RelatedAlertTemplate.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,18 +1,102 @@
|
||||
from datetime import datetime as dt
|
||||
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from django.utils import timezone as djangotime
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from .models import Alert
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .serializers import AlertSerializer
|
||||
from .models import Alert, AlertTemplate
|
||||
from .serializers import (
|
||||
AlertSerializer,
|
||||
AlertTemplateRelationSerializer,
|
||||
AlertTemplateSerializer,
|
||||
)
|
||||
|
||||
|
||||
class GetAddAlerts(APIView):
|
||||
def get(self, request):
|
||||
alerts = Alert.objects.all()
|
||||
def patch(self, request):
|
||||
|
||||
# top 10 alerts for dashboard icon
|
||||
if "top" in request.data.keys():
|
||||
alerts = Alert.objects.filter(
|
||||
resolved=False, snoozed=False, hidden=False
|
||||
).order_by("alert_time")[: int(request.data["top"])]
|
||||
count = Alert.objects.filter(
|
||||
resolved=False, snoozed=False, hidden=False
|
||||
).count()
|
||||
return Response(
|
||||
{
|
||||
"alerts_count": count,
|
||||
"alerts": AlertSerializer(alerts, many=True).data,
|
||||
}
|
||||
)
|
||||
|
||||
elif any(
|
||||
key
|
||||
in [
|
||||
"timeFilter",
|
||||
"clientFilter",
|
||||
"severityFilter",
|
||||
"resolvedFilter",
|
||||
"snoozedFilter",
|
||||
]
|
||||
for key in request.data.keys()
|
||||
):
|
||||
clientFilter = Q()
|
||||
severityFilter = Q()
|
||||
timeFilter = Q()
|
||||
resolvedFilter = Q()
|
||||
snoozedFilter = Q()
|
||||
|
||||
if (
|
||||
"snoozedFilter" in request.data.keys()
|
||||
and not request.data["snoozedFilter"]
|
||||
):
|
||||
snoozedFilter = Q(snoozed=request.data["snoozedFilter"])
|
||||
|
||||
if (
|
||||
"resolvedFilter" in request.data.keys()
|
||||
and not request.data["resolvedFilter"]
|
||||
):
|
||||
resolvedFilter = Q(resolved=request.data["resolvedFilter"])
|
||||
|
||||
if "clientFilter" in request.data.keys():
|
||||
from agents.models import Agent
|
||||
from clients.models import Client
|
||||
|
||||
clients = Client.objects.filter(
|
||||
pk__in=request.data["clientFilter"]
|
||||
).values_list("id")
|
||||
agents = Agent.objects.filter(site__client_id__in=clients).values_list(
|
||||
"id"
|
||||
)
|
||||
|
||||
clientFilter = Q(agent__in=agents)
|
||||
|
||||
if "severityFilter" in request.data.keys():
|
||||
severityFilter = Q(severity__in=request.data["severityFilter"])
|
||||
|
||||
if "timeFilter" in request.data.keys():
|
||||
timeFilter = Q(
|
||||
alert_time__lte=djangotime.make_aware(dt.today()),
|
||||
alert_time__gt=djangotime.make_aware(dt.today())
|
||||
- djangotime.timedelta(days=int(request.data["timeFilter"])),
|
||||
)
|
||||
|
||||
alerts = (
|
||||
Alert.objects.filter(clientFilter)
|
||||
.filter(severityFilter)
|
||||
.filter(resolvedFilter)
|
||||
.filter(snoozedFilter)
|
||||
.filter(timeFilter)
|
||||
)
|
||||
return Response(AlertSerializer(alerts, many=True).data)
|
||||
|
||||
else:
|
||||
alerts = Alert.objects.all()
|
||||
return Response(AlertSerializer(alerts, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
@@ -32,7 +116,40 @@ class GetUpdateDeleteAlert(APIView):
|
||||
def put(self, request, pk):
|
||||
alert = get_object_or_404(Alert, pk=pk)
|
||||
|
||||
serializer = AlertSerializer(instance=alert, data=request.data, partial=True)
|
||||
data = request.data
|
||||
|
||||
if "type" in data.keys():
|
||||
if data["type"] == "resolve":
|
||||
data = {
|
||||
"resolved": True,
|
||||
"resolved_on": djangotime.now(),
|
||||
"snoozed": False,
|
||||
}
|
||||
|
||||
# unable to set snooze_until to none in serialzier
|
||||
alert.snooze_until = None
|
||||
alert.save()
|
||||
elif data["type"] == "snooze":
|
||||
if "snooze_days" in data.keys():
|
||||
data = {
|
||||
"snoozed": True,
|
||||
"snooze_until": djangotime.now()
|
||||
+ djangotime.timedelta(days=int(data["snooze_days"])),
|
||||
}
|
||||
else:
|
||||
return notify_error(
|
||||
"Missing 'snoozed_days' when trying to snooze alert"
|
||||
)
|
||||
elif data["type"] == "unsnooze":
|
||||
data = {"snoozed": False}
|
||||
|
||||
# unable to set snooze_until to none in serialzier
|
||||
alert.snooze_until = None
|
||||
alert.save()
|
||||
else:
|
||||
return notify_error("There was an error in the request data")
|
||||
|
||||
serializer = AlertSerializer(instance=alert, data=data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
@@ -42,3 +159,68 @@ class GetUpdateDeleteAlert(APIView):
|
||||
Alert.objects.get(pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class BulkAlerts(APIView):
|
||||
def post(self, request):
|
||||
if request.data["bulk_action"] == "resolve":
|
||||
Alert.objects.filter(id__in=request.data["alerts"]).update(
|
||||
resolved=True,
|
||||
resolved_on=djangotime.now(),
|
||||
snoozed=False,
|
||||
snooze_until=None,
|
||||
)
|
||||
return Response("ok")
|
||||
elif request.data["bulk_action"] == "snooze":
|
||||
if "snooze_days" in request.data.keys():
|
||||
Alert.objects.filter(id__in=request.data["alerts"]).update(
|
||||
snoozed=True,
|
||||
snooze_until=djangotime.now()
|
||||
+ djangotime.timedelta(days=int(request.data["snooze_days"])),
|
||||
)
|
||||
return Response("ok")
|
||||
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
|
||||
class GetAddAlertTemplates(APIView):
|
||||
def get(self, request):
|
||||
alert_templates = AlertTemplate.objects.all()
|
||||
|
||||
return Response(AlertTemplateSerializer(alert_templates, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
serializer = AlertTemplateSerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class GetUpdateDeleteAlertTemplate(APIView):
|
||||
def get(self, request, pk):
|
||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||
|
||||
return Response(AlertTemplateSerializer(alert_template).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||
|
||||
serializer = AlertTemplateSerializer(
|
||||
instance=alert_template, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(AlertTemplate, pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class RelatedAlertTemplate(APIView):
|
||||
def get(self, request, pk):
|
||||
alert_template = get_object_or_404(AlertTemplate, pk=pk)
|
||||
return Response(AlertTemplateRelationSerializer(alert_template).data)
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
name = "api"
|
||||
@@ -1,11 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
from apiv3 import views as v3_views
|
||||
|
||||
urlpatterns = [
|
||||
path("triggerpatchscan/", views.trigger_patch_scan),
|
||||
path("<int:pk>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<int:pk>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("<int:pk>/saltinfo/", views.SaltInfo.as_view()),
|
||||
path("<int:pk>/meshinfo/", v3_views.MeshInfo.as_view()),
|
||||
]
|
||||
@@ -1,149 +0,0 @@
|
||||
from loguru import logger
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.decorators import (
|
||||
api_view,
|
||||
authentication_classes,
|
||||
permission_classes,
|
||||
)
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
from winupdate.tasks import check_for_updates_task
|
||||
|
||||
from autotasks.serializers import TaskRunnerGetSerializer, TaskRunnerPatchSerializer
|
||||
from checks.serializers import CheckRunnerGetSerializer, CheckResultsSerializer
|
||||
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
@authentication_classes((TokenAuthentication,))
|
||||
@permission_classes((IsAuthenticated,))
|
||||
def trigger_patch_scan(request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
reboot_policy = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
else:
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
else:
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
"""
|
||||
For windows agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
checks = Check.objects.filter(agent__pk=pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request, pk):
|
||||
check = get_object_or_404(Check, pk=pk)
|
||||
|
||||
if check.check_type != "cpuload" and check.check_type != "memory":
|
||||
serializer = CheckResultsSerializer(
|
||||
instance=check, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
|
||||
else:
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
|
||||
check.handle_check(request.data)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class TaskRunner(APIView):
|
||||
"""
|
||||
For the windows python agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
return Response(TaskRunnerGetSerializer(task).data)
|
||||
|
||||
def patch(self, request, pk):
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
serializer = TaskRunnerPatchSerializer(
|
||||
instance=task, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SaltInfo(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
ret = {
|
||||
"latestVer": settings.LATEST_SALT_VER,
|
||||
"currentVer": agent.salt_ver,
|
||||
"salt_id": agent.salt_id,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.salt_ver = request.data["ver"]
|
||||
agent.save(update_fields=["salt_ver"])
|
||||
return Response("ok")
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class Apiv2Config(AppConfig):
|
||||
name = 'apiv2'
|
||||
@@ -1,38 +0,0 @@
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from unittest.mock import patch
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
|
||||
class TestAPIv2(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.models.Agent.salt_api_cmd")
|
||||
def test_sync_modules(self, mock_ret):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
url = "/api/v2/saltminion/"
|
||||
payload = {"agent_id": agent.agent_id}
|
||||
|
||||
mock_ret.return_value = "error"
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
mock_ret.return_value = []
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Modules are already in sync")
|
||||
|
||||
mock_ret.return_value = ["modules.win_agent"]
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
||||
|
||||
mock_ret.return_value = ["askdjaskdjasd", "modules.win_agent"]
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, "Successfully synced salt modules")
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
from apiv3 import views as v3_views
|
||||
|
||||
urlpatterns = [
|
||||
path("newagent/", v3_views.NewAgent.as_view()),
|
||||
path("meshexe/", v3_views.MeshExe.as_view()),
|
||||
path("saltminion/", v3_views.SaltMinion.as_view()),
|
||||
path("<str:agentid>/saltminion/", v3_views.SaltMinion.as_view()),
|
||||
path("sysinfo/", v3_views.SysInfo.as_view()),
|
||||
path("hello/", v3_views.Hello.as_view()),
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
]
|
||||
@@ -1,41 +0,0 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
|
||||
from checks.serializers import CheckRunnerGetSerializerV2
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
"""
|
||||
For the windows python agent
|
||||
"""
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializerV2(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request):
|
||||
check = get_object_or_404(Check, pk=request.data["id"])
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
status = check.handle_checkv2(request.data)
|
||||
return Response(status)
|
||||
@@ -1,11 +1,12 @@
|
||||
import os
|
||||
import json
|
||||
import os
|
||||
from itertools import cycle
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from unittest.mock import patch
|
||||
from model_bakery import baker
|
||||
from itertools import cycle
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class TestAPIv3(TacticalTestCase):
|
||||
@@ -26,46 +27,10 @@ class TestAPIv3(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_salt_minion(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/saltminion/"
|
||||
url2 = f"/api/v2/{self.agent.agent_id}/saltminion/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn("latestVer", r.json().keys())
|
||||
self.assertIn("currentVer", r.json().keys())
|
||||
self.assertIn("salt_id", r.json().keys())
|
||||
self.assertIn("downloadURL", r.json().keys())
|
||||
|
||||
r2 = self.client.get(url2)
|
||||
self.assertEqual(r2.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url2)
|
||||
|
||||
def test_get_mesh_info(self):
|
||||
url = f"/api/v3/{self.agent.pk}/meshinfo/"
|
||||
url2 = f"/api/v1/{self.agent.pk}/meshinfo/"
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
r = self.client.get(url2)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
self.check_not_authenticated("get", url2)
|
||||
|
||||
def test_get_winupdater(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/winupdater/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_sysinfo(self):
|
||||
# TODO replace this with golang wmi sample data
|
||||
|
||||
url = f"/api/v3/sysinfo/"
|
||||
url = "/api/v3/sysinfo/"
|
||||
with open(
|
||||
os.path.join(
|
||||
settings.BASE_DIR, "tacticalrmm/test_data/wmi_python_agent.json"
|
||||
@@ -80,19 +45,11 @@ class TestAPIv3(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_hello_patch(self):
|
||||
url = f"/api/v3/hello/"
|
||||
payload = {
|
||||
"agent_id": self.agent.agent_id,
|
||||
"logged_in_username": "None",
|
||||
"disks": [],
|
||||
}
|
||||
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
def test_checkrunner_interval(self):
|
||||
url = f"/api/v3/{self.agent.agent_id}/checkinterval/"
|
||||
r = self.client.get(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload["logged_in_username"] = "Bob"
|
||||
r = self.client.patch(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
self.assertEqual(
|
||||
r.json(),
|
||||
{"agent": self.agent.pk, "check_interval": self.agent.check_interval},
|
||||
)
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("hello/", views.Hello.as_view()),
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("saltminion/", views.SaltMinion.as_view()),
|
||||
path("<str:agentid>/saltminion/", views.SaltMinion.as_view()),
|
||||
path("<int:pk>/meshinfo/", views.MeshInfo.as_view()),
|
||||
path("meshexe/", views.MeshExe.as_view()),
|
||||
path("sysinfo/", views.SysInfo.as_view()),
|
||||
path("newagent/", views.NewAgent.as_view()),
|
||||
path("winupdater/", views.WinUpdater.as_view()),
|
||||
path("<str:agentid>/winupdater/", views.WinUpdater.as_view()),
|
||||
path("software/", views.Software.as_view()),
|
||||
path("installer/", views.Installer.as_view()),
|
||||
path("checkin/", views.CheckIn.as_view()),
|
||||
path("syncmesh/", views.SyncMeshNodeID.as_view()),
|
||||
path("choco/", views.Choco.as_view()),
|
||||
path("winupdates/", views.WinUpdates.as_view()),
|
||||
path("superseded/", views.SupersededWinUpdate.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,69 +1,89 @@
|
||||
import asyncio
|
||||
import os
|
||||
import requests
|
||||
from loguru import logger
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from django.http import HttpResponse
|
||||
from rest_framework import serializers
|
||||
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
from accounts.models import User
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from checks.serializers import CheckRunnerGetSerializerV3
|
||||
from agents.models import Agent
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
from winupdate.serializers import ApprovedUpdateSerializer
|
||||
|
||||
from agents.tasks import (
|
||||
agent_recovery_email_task,
|
||||
agent_recovery_sms_task,
|
||||
get_wmi_detail_task,
|
||||
sync_salt_modules_task,
|
||||
)
|
||||
from winupdate.tasks import check_for_updates_task
|
||||
from software.tasks import get_installed_software, install_chocolatey
|
||||
from checks.models import Check
|
||||
from checks.serializers import CheckRunnerGetSerializer
|
||||
from checks.utils import bytes2human
|
||||
from tacticalrmm.utils import notify_error
|
||||
from software.models import InstalledSoftware
|
||||
from tacticalrmm.utils import SoftwareList, filter_software, notify_error, reload_nats
|
||||
from winupdate.models import WinUpdate, WinUpdatePolicy
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class Hello(APIView):
|
||||
"""
|
||||
The agent's checkin endpoint
|
||||
patch: called every 30 to 120 seconds
|
||||
post: called on agent windows service startup
|
||||
"""
|
||||
class CheckIn(APIView):
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def patch(self, request):
|
||||
updated = False
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if pyver.parse(request.data["version"]) > pyver.parse(
|
||||
agent.version
|
||||
) or pyver.parse(request.data["version"]) == pyver.parse(
|
||||
settings.LATEST_AGENT_VER
|
||||
):
|
||||
updated = True
|
||||
agent.version = request.data["version"]
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["version", "last_seen"])
|
||||
|
||||
# change agent update pending status to completed if agent has just updated
|
||||
if (
|
||||
updated
|
||||
and agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).exists()
|
||||
):
|
||||
agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
).update(status="completed")
|
||||
|
||||
# handles any alerting actions
|
||||
agent.handle_alert(checkin=True)
|
||||
|
||||
recovery = agent.recoveryactions.filter(last_run=None).last()
|
||||
if recovery is not None:
|
||||
recovery.last_run = djangotime.now()
|
||||
recovery.save(update_fields=["last_run"])
|
||||
handle_agent_recovery_task.delay(pk=recovery.pk)
|
||||
return Response("ok")
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists():
|
||||
agent.handle_pending_actions()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
if request.data["func"] == "disks":
|
||||
disks = request.data["disks"]
|
||||
new = []
|
||||
# python agent
|
||||
if isinstance(disks, dict):
|
||||
for k, v in disks.items():
|
||||
new.append(v)
|
||||
else:
|
||||
# golang agent
|
||||
for disk in disks:
|
||||
tmp = {}
|
||||
for k, v in disk.items():
|
||||
for _, _ in disk.items():
|
||||
tmp["device"] = disk["device"]
|
||||
tmp["fstype"] = disk["fstype"]
|
||||
tmp["total"] = bytes2human(disk["total"])
|
||||
@@ -72,53 +92,173 @@ class Hello(APIView):
|
||||
tmp["percent"] = int(disk["percent"])
|
||||
new.append(tmp)
|
||||
|
||||
if request.data["logged_in_username"] == "None":
|
||||
serializer.save(last_seen=djangotime.now(), disks=new)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(disks=new)
|
||||
return Response("ok")
|
||||
|
||||
if request.data["func"] == "loggedonuser":
|
||||
if request.data["logged_in_username"] != "None":
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_logged_in_user=request.data["logged_in_username"])
|
||||
return Response("ok")
|
||||
|
||||
if request.data["func"] == "software":
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
|
||||
sw = filter_software(raw)
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
serializer.save(
|
||||
last_seen=djangotime.now(),
|
||||
disks=new,
|
||||
last_logged_in_user=request.data["logged_in_username"],
|
||||
s = agent.installedsoftware_set.first()
|
||||
s.software = sw
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("ok")
|
||||
|
||||
# called once during tacticalagent windows service startup
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if not agent.choco_installed:
|
||||
asyncio.run(agent.nats_cmd({"func": "installchoco"}, wait=False))
|
||||
|
||||
time.sleep(0.5)
|
||||
asyncio.run(agent.nats_cmd({"func": "getwinupdates"}, wait=False))
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SyncMeshNodeID(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if agent.mesh_node_id != request.data["nodeid"]:
|
||||
agent.mesh_node_id = request.data["nodeid"]
|
||||
agent.save(update_fields=["mesh_node_id"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Choco(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.choco_installed = request.data["installed"]
|
||||
agent.save(update_fields=["choco_installed"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WinUpdates(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
reboot_policy: str = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["needs_reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
asyncio.run(agent.nats_cmd({"func": "rebootnow"}, wait=False))
|
||||
logger.info(f"{agent.hostname} is rebooting after updates were installed.")
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
u = agent.winupdates.filter(guid=request.data["guid"]).last()
|
||||
success: bool = request.data["success"]
|
||||
if success:
|
||||
u.result = "success"
|
||||
u.downloaded = True
|
||||
u.installed = True
|
||||
u.date_installed = djangotime.now()
|
||||
u.save(
|
||||
update_fields=[
|
||||
"result",
|
||||
"downloaded",
|
||||
"installed",
|
||||
"date_installed",
|
||||
]
|
||||
)
|
||||
else:
|
||||
u.result = "failed"
|
||||
u.save(update_fields=["result"])
|
||||
|
||||
if agent.agentoutages.exists() and agent.agentoutages.last().is_active:
|
||||
last_outage = agent.agentoutages.last()
|
||||
last_outage.recovery_time = djangotime.now()
|
||||
last_outage.save(update_fields=["recovery_time"])
|
||||
|
||||
if agent.overdue_email_alert:
|
||||
agent_recovery_email_task.delay(pk=last_outage.pk)
|
||||
if agent.overdue_text_alert:
|
||||
agent_recovery_sms_task.delay(pk=last_outage.pk)
|
||||
|
||||
recovery = agent.recoveryactions.filter(last_run=None).last()
|
||||
if recovery is not None:
|
||||
recovery.last_run = djangotime.now()
|
||||
recovery.save(update_fields=["last_run"])
|
||||
return Response(recovery.send())
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists():
|
||||
agent.handle_pending_actions()
|
||||
|
||||
agent.delete_superseded_updates()
|
||||
return Response("ok")
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
updates = request.data["wua_updates"]
|
||||
for update in updates:
|
||||
if agent.winupdates.filter(guid=update["guid"]).exists():
|
||||
u = agent.winupdates.filter(guid=update["guid"]).last()
|
||||
u.downloaded = update["downloaded"]
|
||||
u.installed = update["installed"]
|
||||
u.save(update_fields=["downloaded", "installed"])
|
||||
else:
|
||||
try:
|
||||
kb = "KB" + update["kb_article_ids"][0]
|
||||
except:
|
||||
continue
|
||||
|
||||
serializer = WinAgentSerializer(instance=agent, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_seen=djangotime.now())
|
||||
WinUpdate(
|
||||
agent=agent,
|
||||
guid=update["guid"],
|
||||
kb=kb,
|
||||
title=update["title"],
|
||||
installed=update["installed"],
|
||||
downloaded=update["downloaded"],
|
||||
description=update["description"],
|
||||
severity=update["severity"],
|
||||
categories=update["categories"],
|
||||
category_ids=update["category_ids"],
|
||||
kb_article_ids=update["kb_article_ids"],
|
||||
more_info_urls=update["more_info_urls"],
|
||||
support_url=update["support_url"],
|
||||
revision_number=update["revision_number"],
|
||||
).save()
|
||||
|
||||
sync_salt_modules_task.delay(agent.pk)
|
||||
get_installed_software.delay(agent.pk)
|
||||
get_wmi_detail_task.delay(agent.pk)
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": True}
|
||||
)
|
||||
agent.delete_superseded_updates()
|
||||
|
||||
if not agent.choco_installed:
|
||||
install_chocolatey.delay(agent.pk, wait=True)
|
||||
# more superseded updates cleanup
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.4.2"):
|
||||
for u in agent.winupdates.filter(
|
||||
date_installed__isnull=True, result="failed"
|
||||
).exclude(installed=True):
|
||||
u.delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SupersededWinUpdate(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
updates = agent.winupdates.filter(guid=request.data["guid"])
|
||||
for u in updates:
|
||||
u.delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -133,38 +273,33 @@ class CheckRunner(APIView):
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializerV3(checks, many=True).data,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def patch(self, request):
|
||||
from logs.models import AuditLog
|
||||
|
||||
check = get_object_or_404(Check, pk=request.data["id"])
|
||||
check.last_run = djangotime.now()
|
||||
check.save(update_fields=["last_run"])
|
||||
status = check.handle_checkv2(request.data)
|
||||
|
||||
# create audit entry
|
||||
AuditLog.objects.create(
|
||||
username=check.agent.hostname,
|
||||
agent=check.agent.hostname,
|
||||
object_type="agent",
|
||||
action="check_run",
|
||||
message=f"{check.readable_desc} was run on {check.agent.hostname}. Status: {status}",
|
||||
after_value=Check.serialize(check),
|
||||
)
|
||||
|
||||
return Response(status)
|
||||
|
||||
|
||||
class CheckRunnerInterval(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
return Response({"agent": agent.pk, "check_interval": agent.check_interval})
|
||||
|
||||
|
||||
class TaskRunner(APIView):
|
||||
"""
|
||||
For the windows golang agent
|
||||
@@ -191,6 +326,8 @@ class TaskRunner(APIView):
|
||||
serializer.save(last_run=djangotime.now())
|
||||
|
||||
new_task = AutomatedTask.objects.get(pk=task.pk)
|
||||
new_task.handle_alert()
|
||||
|
||||
AuditLog.objects.create(
|
||||
username=agent.hostname,
|
||||
agent=agent.hostname,
|
||||
@@ -203,159 +340,6 @@ class TaskRunner(APIView):
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SaltMinion(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
ret = {
|
||||
"latestVer": settings.LATEST_SALT_VER,
|
||||
"currentVer": agent.salt_ver,
|
||||
"salt_id": agent.salt_id,
|
||||
"downloadURL": agent.winsalt_dl,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
def post(self, request):
|
||||
# accept the salt key
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
if agent.salt_id != request.data["saltid"]:
|
||||
return notify_error("Salt keys do not match")
|
||||
|
||||
try:
|
||||
resp = requests.post(
|
||||
f"http://{settings.SALT_HOST}:8123/run",
|
||||
json=[
|
||||
{
|
||||
"client": "wheel",
|
||||
"fun": "key.accept",
|
||||
"match": request.data["saltid"],
|
||||
"username": settings.SALT_USERNAME,
|
||||
"password": settings.SALT_PASSWORD,
|
||||
"eauth": "pam",
|
||||
}
|
||||
],
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
return notify_error("No communication between agent and salt-api")
|
||||
|
||||
try:
|
||||
data = resp.json()["return"][0]["data"]
|
||||
minion = data["return"]["minions"][0]
|
||||
except Exception:
|
||||
return notify_error("Key error")
|
||||
|
||||
if data["success"] and minion == request.data["saltid"]:
|
||||
return Response("Salt key was accepted")
|
||||
else:
|
||||
return notify_error("Not accepted")
|
||||
|
||||
def patch(self, request):
|
||||
# sync modules
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
r = agent.salt_api_cmd(timeout=45, func="saltutil.sync_modules")
|
||||
|
||||
if r == "timeout" or r == "error":
|
||||
return notify_error("Failed to sync salt modules")
|
||||
|
||||
if isinstance(r, list) and any("modules" in i for i in r):
|
||||
return Response("Successfully synced salt modules")
|
||||
elif isinstance(r, list) and not r:
|
||||
return Response("Modules are already in sync")
|
||||
else:
|
||||
return notify_error(f"Failed to sync salt modules: {str(r)}")
|
||||
|
||||
def put(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
agent.salt_ver = request.data["ver"]
|
||||
agent.save(update_fields=["salt_ver"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class WinUpdater(APIView):
|
||||
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
agent.delete_superseded_updates()
|
||||
patches = agent.winupdates.filter(action="approve").exclude(installed=True)
|
||||
return Response(ApprovedUpdateSerializer(patches, many=True).data)
|
||||
|
||||
# agent sends patch results as it's installing them
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
kb = request.data["kb"]
|
||||
results = request.data["results"]
|
||||
update = agent.winupdates.get(kb=kb)
|
||||
|
||||
if results == "error" or results == "failed":
|
||||
update.result = results
|
||||
update.save(update_fields=["result"])
|
||||
elif results == "success":
|
||||
update.result = "success"
|
||||
update.downloaded = True
|
||||
update.installed = True
|
||||
update.date_installed = djangotime.now()
|
||||
update.save(
|
||||
update_fields=[
|
||||
"result",
|
||||
"downloaded",
|
||||
"installed",
|
||||
"date_installed",
|
||||
]
|
||||
)
|
||||
elif results == "alreadyinstalled":
|
||||
update.result = "success"
|
||||
update.downloaded = True
|
||||
update.installed = True
|
||||
update.save(update_fields=["result", "downloaded", "installed"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
# agent calls this after it's finished installing all patches
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
reboot_policy = agent.get_patch_policy().reboot_after_install
|
||||
reboot = False
|
||||
|
||||
if reboot_policy == "always":
|
||||
reboot = True
|
||||
|
||||
if request.data["reboot"]:
|
||||
if reboot_policy == "required":
|
||||
reboot = True
|
||||
elif reboot_policy == "never":
|
||||
agent.needs_reboot = True
|
||||
agent.save(update_fields=["needs_reboot"])
|
||||
|
||||
if reboot:
|
||||
r = agent.salt_api_cmd(
|
||||
timeout=15,
|
||||
func="system.reboot",
|
||||
arg=7,
|
||||
kwargs={"in_seconds": True},
|
||||
)
|
||||
|
||||
if r == "timeout" or r == "error" or (isinstance(r, bool) and not r):
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"{agent.hostname} is rebooting after updates were installed."
|
||||
)
|
||||
else:
|
||||
check_for_updates_task.apply_async(
|
||||
queue="wupdate", kwargs={"pk": agent.pk, "wait": False}
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class SysInfo(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
@@ -371,21 +355,6 @@ class SysInfo(APIView):
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class MeshInfo(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
return Response(agent.mesh_node_id)
|
||||
|
||||
def patch(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
agent.mesh_node_id = request.data["nodeidhex"]
|
||||
agent.save(update_fields=["mesh_node_id"])
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class MeshExe(APIView):
|
||||
""" Sends the mesh exe to the installer """
|
||||
|
||||
@@ -448,9 +417,7 @@ class NewAgent(APIView):
|
||||
else:
|
||||
WinUpdatePolicy(agent=agent).save()
|
||||
|
||||
# Generate policies for new agent
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
reload_nats()
|
||||
|
||||
# create agent install audit record
|
||||
AuditLog.objects.create(
|
||||
@@ -469,3 +436,42 @@ class NewAgent(APIView):
|
||||
"token": token.key,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Software(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agent_id"])
|
||||
raw: SoftwareList = request.data["software"]
|
||||
if not isinstance(raw, list):
|
||||
return notify_error("err")
|
||||
|
||||
sw = filter_software(raw)
|
||||
if not InstalledSoftware.objects.filter(agent=agent).exists():
|
||||
InstalledSoftware(agent=agent, software=sw).save()
|
||||
else:
|
||||
s = agent.installedsoftware_set.first()
|
||||
s.software = sw
|
||||
s.save(update_fields=["software"])
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class Installer(APIView):
|
||||
def get(self, request):
|
||||
# used to check if token is valid. will return 401 if not
|
||||
return Response("ok")
|
||||
|
||||
def post(self, request):
|
||||
if "version" not in request.data:
|
||||
return notify_error("Invalid data")
|
||||
|
||||
ver = request.data["version"]
|
||||
if pyver.parse(ver) < pyver.parse(settings.LATEST_AGENT_VER):
|
||||
return notify_error(
|
||||
f"Old installer detected (version {ver} ). Latest version is {settings.LATEST_AGENT_VER} Please generate a new installer from the RMM"
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.0.6 on 2020-06-04 17:13
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -6,11 +6,11 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('automation', '0005_auto_20200922_1344'),
|
||||
("automation", "0005_auto_20200922_1344"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name='PolicyExclusions',
|
||||
name="PolicyExclusions",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-12 14:08
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('alerts', '0004_auto_20210212_1408'),
|
||||
('automation', '0006_delete_policyexclusions'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='policy',
|
||||
name='alert_template',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='policies', to='alerts.alerttemplate'),
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.db import models
|
||||
|
||||
from agents.models import Agent
|
||||
from clients.models import Site, Client
|
||||
from core.models import CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
@@ -10,6 +10,36 @@ class Policy(BaseAuditModel):
|
||||
desc = models.CharField(max_length=255, null=True, blank=True)
|
||||
active = models.BooleanField(default=False)
|
||||
enforced = models.BooleanField(default=False)
|
||||
alert_template = models.ForeignKey(
|
||||
"alerts.AlertTemplate",
|
||||
related_name="policies",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from automation.tasks import generate_agent_checks_from_policies_task
|
||||
|
||||
# get old policy if exists
|
||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
# generate agent checks only if active and enforced were changed
|
||||
if old_policy:
|
||||
if old_policy.active != self.active or old_policy.enforced != self.enforced:
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
policypk=self.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
generate_agent_checks_task.delay(agents, create_tasks=True)
|
||||
|
||||
@property
|
||||
def is_default_server_policy(self):
|
||||
@@ -58,6 +88,10 @@ class Policy(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from logs.models import PendingAction
|
||||
|
||||
# List of all tasks to be applied
|
||||
tasks = list()
|
||||
added_task_pks = list()
|
||||
@@ -80,7 +114,7 @@ class Policy(BaseAuditModel):
|
||||
default_policy = CoreSettings.objects.first().server_policy
|
||||
client_policy = client.server_policy
|
||||
site_policy = site.server_policy
|
||||
else:
|
||||
elif agent.monitoring_type == "workstation":
|
||||
default_policy = CoreSettings.objects.first().workstation_policy
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
@@ -107,6 +141,35 @@ class Policy(BaseAuditModel):
|
||||
tasks.append(task)
|
||||
added_task_pks.append(task.pk)
|
||||
|
||||
# remove policy tasks from agent not included in policy
|
||||
for task in agent.autotasks.filter(
|
||||
parent_task__in=[
|
||||
taskpk
|
||||
for taskpk in agent_tasks_parent_pks
|
||||
if taskpk not in added_task_pks
|
||||
]
|
||||
):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
|
||||
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
|
||||
for action in agent.pendingactions.filter(action_type="taskaction").exclude(
|
||||
status="completed"
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=action.details["task_id"])
|
||||
if (
|
||||
task.parent_task in agent_tasks_parent_pks
|
||||
and task.parent_task in added_task_pks
|
||||
):
|
||||
agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=agent,
|
||||
action_type="taskaction",
|
||||
details={"action": "taskcreate", "task_id": task.id},
|
||||
).save()
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
||||
|
||||
@staticmethod
|
||||
@@ -132,7 +195,7 @@ class Policy(BaseAuditModel):
|
||||
default_policy = CoreSettings.objects.first().server_policy
|
||||
client_policy = client.server_policy
|
||||
site_policy = site.server_policy
|
||||
else:
|
||||
elif agent.monitoring_type == "workstation":
|
||||
default_policy = CoreSettings.objects.first().workstation_policy
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
@@ -280,6 +343,15 @@ class Policy(BaseAuditModel):
|
||||
+ eventlog_checks
|
||||
)
|
||||
|
||||
# remove policy checks from agent that fell out of policy scope
|
||||
agent.agentchecks.filter(
|
||||
parent_check__in=[
|
||||
checkpk
|
||||
for checkpk in agent_checks_parent_pks
|
||||
if checkpk not in [check.pk for check in final_list]
|
||||
]
|
||||
).delete()
|
||||
|
||||
return [
|
||||
check for check in final_list if check.pk not in agent_checks_parent_pks
|
||||
]
|
||||
|
||||
@@ -1,20 +1,16 @@
|
||||
from rest_framework.serializers import (
|
||||
ModelSerializer,
|
||||
SerializerMethodField,
|
||||
StringRelatedField,
|
||||
ReadOnlyField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
|
||||
from .models import Policy
|
||||
from agents.models import Agent
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from clients.models import Client, Site
|
||||
from clients.models import Client
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Policy
|
||||
|
||||
|
||||
class PolicySerializer(ModelSerializer):
|
||||
class Meta:
|
||||
@@ -24,15 +20,11 @@ class PolicySerializer(ModelSerializer):
|
||||
|
||||
class PolicyTableSerializer(ModelSerializer):
|
||||
|
||||
server_clients = ClientSerializer(many=True, read_only=True)
|
||||
server_sites = SiteSerializer(many=True, read_only=True)
|
||||
workstation_clients = ClientSerializer(many=True, read_only=True)
|
||||
workstation_sites = SiteSerializer(many=True, read_only=True)
|
||||
agents = AgentHostnameSerializer(many=True, read_only=True)
|
||||
default_server_policy = ReadOnlyField(source="is_default_server_policy")
|
||||
default_workstation_policy = ReadOnlyField(source="is_default_workstation_policy")
|
||||
agents_count = SerializerMethodField(read_only=True)
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
alert_template = ReadOnlyField(source="alert_template.id")
|
||||
|
||||
class Meta:
|
||||
model = Policy
|
||||
@@ -78,49 +70,16 @@ class PolicyCheckSerializer(ModelSerializer):
|
||||
"assignedtask",
|
||||
"text_alert",
|
||||
"email_alert",
|
||||
"dashboard_alert",
|
||||
)
|
||||
depth = 1
|
||||
|
||||
|
||||
class AutoTasksFieldSerializer(ModelSerializer):
|
||||
assigned_check = PolicyCheckSerializer(read_only=True)
|
||||
script = ReadOnlyField(source="script.id")
|
||||
|
||||
class Meta:
|
||||
model = AutomatedTask
|
||||
fields = ("id", "enabled", "name", "schedule", "assigned_check")
|
||||
depth = 1
|
||||
|
||||
|
||||
class AutoTaskPolicySerializer(ModelSerializer):
|
||||
|
||||
autotasks = AutoTasksFieldSerializer(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Policy
|
||||
fields = (
|
||||
"id",
|
||||
"name",
|
||||
"autotasks",
|
||||
)
|
||||
depth = 2
|
||||
|
||||
|
||||
class RelatedClientPolicySerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = ("workstation_policy", "server_policy")
|
||||
depth = 1
|
||||
|
||||
|
||||
class RelatedSitePolicySerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = ("workstation_policy", "server_policy")
|
||||
depth = 1
|
||||
|
||||
|
||||
class RelatedAgentPolicySerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Agent
|
||||
fields = ("policy",)
|
||||
fields = "__all__"
|
||||
depth = 1
|
||||
|
||||
@@ -1,70 +1,91 @@
|
||||
from automation.models import Policy
|
||||
from checks.models import Check
|
||||
from agents.models import Agent
|
||||
|
||||
from automation.models import Policy
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_checks_from_policies_task(
|
||||
###
|
||||
# copies the policy checks to all affected agents
|
||||
#
|
||||
# clear: clears all policy checks first
|
||||
# create_tasks: also create tasks after checks are generated
|
||||
###
|
||||
policypk,
|
||||
clear=False,
|
||||
create_tasks=False,
|
||||
):
|
||||
# generates policy checks on agents affected by a policy and optionally generate automated tasks
|
||||
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
for agent in policy.related_agents():
|
||||
agent.generate_checks_from_policies(clear=clear)
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies(
|
||||
clear=clear,
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents().only("pk")
|
||||
|
||||
for agent in agents:
|
||||
agent.generate_checks_from_policies()
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_checks_by_location_task(
|
||||
location, mon_type, clear=False, create_tasks=False
|
||||
):
|
||||
# generates policy checks on a list of agents and optionally generate automated tasks
|
||||
def generate_agent_checks_task(agentpks, create_tasks=False):
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on agent servers or workstations within a certain client or site and optionally generate automated tasks
|
||||
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies(clear=clear)
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_all_agent_checks_task(mon_type, clear=False, create_tasks=False):
|
||||
# generates policy checks on all agent servers or workstations and optionally generate automated tasks
|
||||
def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
||||
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies(clear=clear)
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# deletes a policy managed check from all agents
|
||||
def delete_policy_check_task(checkpk):
|
||||
|
||||
Check.objects.filter(parent_check=checkpk).delete()
|
||||
|
||||
|
||||
@app.task
|
||||
# updates policy managed check fields on agents
|
||||
def update_policy_check_fields_task(checkpk):
|
||||
|
||||
check = Check.objects.get(pk=checkpk)
|
||||
|
||||
Check.objects.filter(parent_check=checkpk).update(
|
||||
threshold=check.threshold,
|
||||
warning_threshold=check.warning_threshold,
|
||||
error_threshold=check.error_threshold,
|
||||
alert_severity=check.alert_severity,
|
||||
name=check.name,
|
||||
disk=check.disk,
|
||||
fails_b4_alert=check.fails_b4_alert,
|
||||
ip=check.ip,
|
||||
script=check.script,
|
||||
script_args=check.script_args,
|
||||
info_return_codes=check.info_return_codes,
|
||||
warning_return_codes=check.warning_return_codes,
|
||||
timeout=check.timeout,
|
||||
pass_if_start_pending=check.pass_if_start_pending,
|
||||
pass_if_svc_not_exist=check.pass_if_svc_not_exist,
|
||||
@@ -79,28 +100,37 @@ def update_policy_check_fields_task(checkpk):
|
||||
search_last_days=check.search_last_days,
|
||||
email_alert=check.email_alert,
|
||||
text_alert=check.text_alert,
|
||||
dashboard_alert=check.dashboard_alert,
|
||||
)
|
||||
|
||||
|
||||
@app.task
|
||||
def generate_agent_tasks_from_policies_task(policypk, clear=False):
|
||||
# generates policy tasks on agents affected by a policy
|
||||
def generate_agent_tasks_from_policies_task(policypk):
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
for agent in policy.related_agents():
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents().only("pk")
|
||||
|
||||
@app.task
|
||||
def generate_agent_tasks_by_location_task(location, mon_type, clear=False):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
agent.generate_tasks_from_policies(clear=clear)
|
||||
for agent in agents:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
def delete_policy_autotask_task(taskpk):
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
@@ -115,13 +145,23 @@ def run_win_policy_autotask_task(task_pks):
|
||||
|
||||
|
||||
@app.task
|
||||
def update_policy_task_fields_task(taskpk, enabled):
|
||||
from autotasks.models import AutomatedTask
|
||||
def update_policy_task_fields_task(taskpk, update_agent=False):
|
||||
from autotasks.tasks import enable_or_disable_win_task
|
||||
|
||||
tasks = AutomatedTask.objects.filter(parent_task=taskpk)
|
||||
task = AutomatedTask.objects.get(pk=taskpk)
|
||||
|
||||
tasks.update(enabled=enabled)
|
||||
AutomatedTask.objects.filter(parent_task=taskpk).update(
|
||||
alert_severity=task.alert_severity,
|
||||
email_alert=task.email_alert,
|
||||
text_alert=task.text_alert,
|
||||
dashboard_alert=task.dashboard_alert,
|
||||
script=task.script,
|
||||
script_args=task.script_args,
|
||||
name=task.name,
|
||||
timeout=task.timeout,
|
||||
enabled=task.enabled,
|
||||
)
|
||||
|
||||
for autotask in tasks:
|
||||
enable_or_disable_win_task(autotask.pk, enabled)
|
||||
if update_agent:
|
||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||
enable_or_disable_win_task.delay(task.pk, task.enabled)
|
||||
|
||||
@@ -1,21 +1,20 @@
|
||||
from unittest.mock import patch
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from model_bakery import baker, seq
|
||||
from itertools import cycle
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker, seq
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
|
||||
from .serializers import (
|
||||
PolicyTableSerializer,
|
||||
PolicySerializer,
|
||||
PolicyTaskStatusSerializer,
|
||||
AutoTaskPolicySerializer,
|
||||
PolicyOverviewSerializer,
|
||||
PolicyCheckStatusSerializer,
|
||||
AutoTasksFieldSerializer,
|
||||
PolicyCheckSerializer,
|
||||
RelatedAgentPolicySerializer,
|
||||
RelatedSitePolicySerializer,
|
||||
RelatedClientPolicySerializer,
|
||||
PolicyCheckStatusSerializer,
|
||||
PolicyOverviewSerializer,
|
||||
PolicySerializer,
|
||||
PolicyTableSerializer,
|
||||
PolicyTaskStatusSerializer,
|
||||
)
|
||||
|
||||
|
||||
@@ -91,7 +90,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("automation.tasks.generate_agent_checks_from_policies_task.delay")
|
||||
def test_update_policy(self, mock_checks_task):
|
||||
def test_update_policy(self, generate_agent_checks_from_policies_task):
|
||||
# returns 404 for invalid policy pk
|
||||
resp = self.client.put("/automation/policies/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
@@ -110,7 +109,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# only called if active or enforced are updated
|
||||
mock_checks_task.assert_not_called()
|
||||
generate_agent_checks_from_policies_task.assert_not_called()
|
||||
|
||||
data = {
|
||||
"name": "Test Policy Update",
|
||||
@@ -121,42 +120,43 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
mock_checks_task.assert_called_with(
|
||||
policypk=policy.pk, clear=True, create_tasks=True
|
||||
generate_agent_checks_from_policies_task.assert_called_with(
|
||||
policypk=policy.pk, create_tasks=True
|
||||
)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@patch("automation.tasks.generate_agent_checks_from_policies_task.delay")
|
||||
@patch("automation.tasks.generate_agent_tasks_from_policies_task.delay")
|
||||
def test_delete_policy(self, mock_tasks_task, mock_checks_task):
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_delete_policy(self, generate_agent_checks_task):
|
||||
# returns 404 for invalid policy pk
|
||||
resp = self.client.delete("/automation/policies/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy")
|
||||
site = baker.make("clients.Site")
|
||||
agents = baker.make_recipe(
|
||||
"agents.agent", site=site, policy=policy, _quantity=3
|
||||
)
|
||||
url = f"/automation/policies/{policy.pk}/"
|
||||
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
mock_checks_task.assert_called_with(policypk=policy.pk, clear=True)
|
||||
mock_tasks_task.assert_called_with(policypk=policy.pk, clear=True)
|
||||
generate_agent_checks_task.assert_called_with(
|
||||
[agent.pk for agent in agents], create_tasks=True
|
||||
)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_all_policy_tasks(self):
|
||||
# returns 404 for invalid policy pk
|
||||
resp = self.client.get("/automation/500/policyautomatedtasks/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# create policy with tasks
|
||||
policy = baker.make("automation.Policy")
|
||||
baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
url = f"/automation/{policy.pk}/policyautomatedtasks/"
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
serializer = AutoTaskPolicySerializer(policy)
|
||||
serializer = AutoTasksFieldSerializer(tasks, many=True)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
@@ -182,8 +182,9 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_policy_check_status(self):
|
||||
# set data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
policy = baker.make("automation.Policy")
|
||||
policy_diskcheck = baker.make_recipe("checks.diskspace_check", policy=policy)
|
||||
managed_check = baker.make_recipe(
|
||||
@@ -248,274 +249,6 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("agents.models.Agent.generate_checks_from_policies")
|
||||
@patch("automation.tasks.generate_agent_checks_by_location_task.delay")
|
||||
def test_update_policy_add(
|
||||
self,
|
||||
mock_checks_location_task,
|
||||
mock_checks_task,
|
||||
):
|
||||
url = f"/automation/related/"
|
||||
|
||||
# data setup
|
||||
policy = baker.make("automation.Policy")
|
||||
client = baker.make("clients.Client")
|
||||
site = baker.make("clients.Site", client=client)
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
|
||||
# test add client to policy data
|
||||
client_server_payload = {
|
||||
"type": "client",
|
||||
"pk": agent.client.pk,
|
||||
"server_policy": policy.pk,
|
||||
}
|
||||
client_workstation_payload = {
|
||||
"type": "client",
|
||||
"pk": agent.client.pk,
|
||||
"workstation_policy": policy.pk,
|
||||
}
|
||||
|
||||
# test add site to policy data
|
||||
site_server_payload = {
|
||||
"type": "site",
|
||||
"pk": agent.site.pk,
|
||||
"server_policy": policy.pk,
|
||||
}
|
||||
site_workstation_payload = {
|
||||
"type": "site",
|
||||
"pk": agent.site.pk,
|
||||
"workstation_policy": policy.pk,
|
||||
}
|
||||
|
||||
# test add agent to policy data
|
||||
agent_payload = {"type": "agent", "pk": agent.pk, "policy": policy.pk}
|
||||
|
||||
# test client server policy add
|
||||
resp = self.client.post(url, client_server_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
|
||||
# test client workstation policy add
|
||||
resp = self.client.post(url, client_workstation_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
|
||||
# test site add server policy
|
||||
resp = self.client.post(url, site_server_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
|
||||
# test site add workstation policy
|
||||
resp = self.client.post(url, site_workstation_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
|
||||
# test agent add
|
||||
resp = self.client.post(url, agent_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_task.assert_called_with(clear=True)
|
||||
mock_checks_task.reset_mock()
|
||||
|
||||
# Adding the same relations shouldn't trigger mocks
|
||||
resp = self.client.post(url, client_server_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
resp = self.client.post(url, client_workstation_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
mock_checks_location_task.assert_not_called()
|
||||
|
||||
resp = self.client.post(url, site_server_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
resp = self.client.post(url, site_workstation_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
mock_checks_location_task.assert_not_called()
|
||||
|
||||
resp = self.client.post(url, agent_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_task.assert_not_called()
|
||||
|
||||
# test remove client from policy data
|
||||
client_server_payload = {"type": "client", "pk": client.pk, "server_policy": 0}
|
||||
client_workstation_payload = {
|
||||
"type": "client",
|
||||
"pk": client.pk,
|
||||
"workstation_policy": 0,
|
||||
}
|
||||
|
||||
# test remove site from policy data
|
||||
site_server_payload = {"type": "site", "pk": site.pk, "server_policy": 0}
|
||||
site_workstation_payload = {
|
||||
"type": "site",
|
||||
"pk": site.pk,
|
||||
"workstation_policy": 0,
|
||||
}
|
||||
|
||||
# test remove agent from policy
|
||||
agent_payload = {"type": "agent", "pk": agent.pk, "policy": 0}
|
||||
|
||||
# test client server policy remove
|
||||
resp = self.client.post(url, client_server_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
|
||||
# test client workstation policy remove
|
||||
resp = self.client.post(url, client_workstation_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
|
||||
# test site remove server policy
|
||||
resp = self.client.post(url, site_server_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
|
||||
# test site remove workstation policy
|
||||
resp = self.client.post(url, site_workstation_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# called because the relation changed
|
||||
mock_checks_location_task.assert_called_with(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
mock_checks_location_task.reset_mock()
|
||||
|
||||
# test agent remove
|
||||
resp = self.client.post(url, agent_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
# called because the relation changed
|
||||
mock_checks_task.assert_called_with(clear=True)
|
||||
mock_checks_task.reset_mock()
|
||||
|
||||
# adding the same relations shouldn't trigger mocks
|
||||
resp = self.client.post(url, client_server_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
resp = self.client.post(url, client_workstation_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# shouldn't be called since nothing changed
|
||||
mock_checks_location_task.assert_not_called()
|
||||
|
||||
resp = self.client.post(url, site_server_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
resp = self.client.post(url, site_workstation_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# shouldn't be called since nothing changed
|
||||
mock_checks_location_task.assert_not_called()
|
||||
|
||||
resp = self.client.post(url, agent_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# shouldn't be called since nothing changed
|
||||
mock_checks_task.assert_not_called()
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_relation_by_type(self):
|
||||
url = f"/automation/related/"
|
||||
|
||||
# data setup
|
||||
policy = baker.make("automation.Policy")
|
||||
client = baker.make("clients.Client", workstation_policy=policy)
|
||||
site = baker.make("clients.Site", server_policy=policy)
|
||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
||||
|
||||
client_payload = {"type": "client", "pk": client.pk}
|
||||
|
||||
# test add site to policy
|
||||
site_payload = {"type": "site", "pk": site.pk}
|
||||
|
||||
# test add agent to policy
|
||||
agent_payload = {"type": "agent", "pk": agent.pk}
|
||||
|
||||
# test client relation get
|
||||
serializer = RelatedClientPolicySerializer(client)
|
||||
resp = self.client.patch(url, client_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
# test site relation get
|
||||
serializer = RelatedSitePolicySerializer(site)
|
||||
resp = self.client.patch(url, site_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
# test agent relation get
|
||||
serializer = RelatedAgentPolicySerializer(agent)
|
||||
resp = self.client.patch(url, agent_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
|
||||
invalid_payload = {"type": "bad_type", "pk": 5}
|
||||
|
||||
resp = self.client.patch(url, invalid_payload, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_get_policy_task_status(self):
|
||||
|
||||
# policy with a task
|
||||
@@ -749,11 +482,10 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
checks = self.create_checks(policy=policy)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
||||
agent = baker.make_recipe("agents.agent", policy=policy)
|
||||
|
||||
# test policy assigned to agent
|
||||
generate_agent_checks_from_policies_task(policy.id, clear=True)
|
||||
generate_agent_checks_from_policies_task(policy.id)
|
||||
|
||||
# make sure all checks were created. should be 7
|
||||
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
|
||||
@@ -766,16 +498,19 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
if check.check_type == "diskspace":
|
||||
self.assertEqual(check.parent_check, checks[0].id)
|
||||
self.assertEqual(check.disk, checks[0].disk)
|
||||
self.assertEqual(check.threshold, checks[0].threshold)
|
||||
self.assertEqual(check.error_threshold, checks[0].error_threshold)
|
||||
self.assertEqual(check.warning_threshold, checks[0].warning_threshold)
|
||||
elif check.check_type == "ping":
|
||||
self.assertEqual(check.parent_check, checks[1].id)
|
||||
self.assertEqual(check.ip, checks[1].ip)
|
||||
elif check.check_type == "cpuload":
|
||||
self.assertEqual(check.parent_check, checks[2].id)
|
||||
self.assertEqual(check.threshold, checks[2].threshold)
|
||||
self.assertEqual(check.error_threshold, checks[0].error_threshold)
|
||||
self.assertEqual(check.warning_threshold, checks[0].warning_threshold)
|
||||
elif check.check_type == "memory":
|
||||
self.assertEqual(check.parent_check, checks[3].id)
|
||||
self.assertEqual(check.threshold, checks[3].threshold)
|
||||
self.assertEqual(check.error_threshold, checks[0].error_threshold)
|
||||
self.assertEqual(check.warning_threshold, checks[0].warning_threshold)
|
||||
elif check.check_type == "winsvc":
|
||||
self.assertEqual(check.parent_check, checks[4].id)
|
||||
self.assertEqual(check.svc_name, checks[4].svc_name)
|
||||
@@ -811,71 +546,246 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
7,
|
||||
)
|
||||
|
||||
def test_generating_agent_policy_checks_by_location(self):
|
||||
from .tasks import generate_agent_checks_by_location_task
|
||||
@patch("automation.tasks.generate_agent_checks_by_location_task.delay")
|
||||
def test_generating_agent_policy_checks_by_location(
|
||||
self, generate_agent_checks_by_location_task
|
||||
):
|
||||
from automation.tasks import (
|
||||
generate_agent_checks_by_location_task as generate_agent_checks,
|
||||
)
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(policy=policy)
|
||||
clients = baker.make(
|
||||
"clients.Client",
|
||||
_quantity=2,
|
||||
server_policy=policy,
|
||||
workstation_policy=policy,
|
||||
)
|
||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=4)
|
||||
server_agent = baker.make_recipe("agents.server_agent", site=sites[0])
|
||||
workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2])
|
||||
agent1 = baker.make_recipe("agents.server_agent", site=sites[1])
|
||||
agent2 = baker.make_recipe("agents.workstation_agent", site=sites[3])
|
||||
|
||||
generate_agent_checks_by_location_task(
|
||||
{"site_id": sites[0].id},
|
||||
"server",
|
||||
clear=True,
|
||||
baker.make(
|
||||
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
|
||||
)
|
||||
|
||||
server_agent = baker.make_recipe("agents.server_agent")
|
||||
workstation_agent = baker.make_recipe("agents.workstation_agent")
|
||||
|
||||
# no checks should be preset on agents
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
|
||||
# set workstation policy on client and policy checks should be there
|
||||
workstation_agent.client.workstation_policy = policy
|
||||
workstation_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# server_agent should have policy checks and the other agents should not
|
||||
# make sure the checks were added
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7
|
||||
)
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||
|
||||
# remove workstation policy from client
|
||||
workstation_agent.client.workstation_policy = None
|
||||
workstation_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# make sure the checks were removed
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||
|
||||
# set server policy on client and policy checks should be there
|
||||
server_agent.client.server_policy = policy
|
||||
server_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# make sure checks were added
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0)
|
||||
|
||||
generate_agent_checks_by_location_task(
|
||||
{"site__client_id": clients[0].id},
|
||||
"workstation",
|
||||
clear=True,
|
||||
# remove server policy from client
|
||||
server_agent.client.server_policy = None
|
||||
server_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
# workstation_agent should now have policy checks and the other agents should not
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# make sure checks were removed
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
|
||||
# set workstation policy on site and policy checks should be there
|
||||
workstation_agent.site.workstation_policy = policy
|
||||
workstation_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# make sure checks were added on workstation
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 7
|
||||
)
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
|
||||
self.assertEqual(Agent.objects.get(pk=agent1.id).agentchecks.count(), 0)
|
||||
self.assertEqual(Agent.objects.get(pk=agent2.id).agentchecks.count(), 0)
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||
|
||||
def test_generating_policy_checks_for_all_agents(self):
|
||||
from .tasks import generate_all_agent_checks_task
|
||||
# remove workstation policy from site
|
||||
workstation_agent.site.workstation_policy = None
|
||||
workstation_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# make sure checks were removed
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||
|
||||
# set server policy on site and policy checks should be there
|
||||
server_agent.site.server_policy = policy
|
||||
server_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# make sure checks were added
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 7)
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
|
||||
# remove server policy from site
|
||||
server_agent.site.server_policy = None
|
||||
server_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# make sure checks were removed
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).agentchecks.count(), 0)
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_generating_policy_checks_for_all_agents(
|
||||
self, generate_all_agent_checks_task
|
||||
):
|
||||
from core.models import CoreSettings
|
||||
|
||||
from .tasks import generate_all_agent_checks_task as generate_all_checks
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(policy=policy)
|
||||
|
||||
site = baker.make("clients.Site")
|
||||
server_agents = baker.make_recipe("agents.server_agent", site=site, _quantity=3)
|
||||
workstation_agents = baker.make_recipe(
|
||||
"agents.workstation_agent", site=site, _quantity=4
|
||||
)
|
||||
server_agents = baker.make_recipe("agents.server_agent", _quantity=3)
|
||||
workstation_agents = baker.make_recipe("agents.workstation_agent", _quantity=4)
|
||||
core = CoreSettings.objects.first()
|
||||
core.server_policy = policy
|
||||
core.workstation_policy = policy
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task("server", clear=True, create_tasks=True)
|
||||
generate_all_agent_checks_task.assert_called_with(
|
||||
mon_type="server", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_all_checks(mon_type="server", create_tasks=True)
|
||||
|
||||
# all servers should have 7 checks
|
||||
for agent in server_agents:
|
||||
@@ -884,24 +794,50 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
for agent in workstation_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||
|
||||
generate_all_agent_checks_task("workstation", clear=True, create_tasks=True)
|
||||
core.server_policy = None
|
||||
core.workstation_policy = policy
|
||||
core.save()
|
||||
|
||||
# all agents should have 7 checks now
|
||||
generate_all_agent_checks_task.assert_any_call(
|
||||
mon_type="workstation", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.assert_any_call(
|
||||
mon_type="server", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_all_checks(mon_type="server", create_tasks=True)
|
||||
generate_all_checks(mon_type="workstation", create_tasks=True)
|
||||
|
||||
# all workstations should have 7 checks
|
||||
for agent in server_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||
|
||||
for agent in workstation_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||
|
||||
core.workstation_policy = None
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task.assert_called_with(
|
||||
mon_type="workstation", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_all_checks(mon_type="workstation", create_tasks=True)
|
||||
|
||||
# nothing should have the checks
|
||||
for agent in server_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||
|
||||
for agent in workstation_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||
|
||||
def test_delete_policy_check(self):
|
||||
from .tasks import delete_policy_check_task
|
||||
from .models import Policy
|
||||
from .tasks import delete_policy_check_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(policy=policy)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||
agent.generate_checks_from_policies()
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
# make sure agent has 7 checks
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||
@@ -920,13 +856,12 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
)
|
||||
|
||||
def update_policy_check_fields(self):
|
||||
from .tasks import update_policy_check_fields_task
|
||||
from .models import Policy
|
||||
from .tasks import update_policy_check_fields_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(policy=policy)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
# make sure agent has 7 checks
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||
@@ -958,10 +893,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
tasks = baker.make(
|
||||
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
|
||||
)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
generate_agent_tasks_from_policies_task(policy.id, clear=True)
|
||||
generate_agent_tasks_from_policies_task(policy.id)
|
||||
|
||||
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
|
||||
|
||||
@@ -980,63 +914,19 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEqual(task.parent_task, tasks[2].id)
|
||||
self.assertEqual(task.name, tasks[2].name)
|
||||
|
||||
def test_generate_agent_tasks_by_location(self):
|
||||
from .tasks import generate_agent_tasks_by_location_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
baker.make(
|
||||
"autotasks.AutomatedTask", policy=policy, name=seq("Task"), _quantity=3
|
||||
)
|
||||
clients = baker.make(
|
||||
"clients.Client",
|
||||
_quantity=2,
|
||||
server_policy=policy,
|
||||
workstation_policy=policy,
|
||||
)
|
||||
sites = baker.make("clients.Site", client=cycle(clients), _quantity=4)
|
||||
server_agent = baker.make_recipe("agents.server_agent", site=sites[0])
|
||||
workstation_agent = baker.make_recipe("agents.workstation_agent", site=sites[2])
|
||||
agent1 = baker.make_recipe("agents.agent", site=sites[1])
|
||||
agent2 = baker.make_recipe("agents.agent", site=sites[3])
|
||||
|
||||
generate_agent_tasks_by_location_task(
|
||||
{"site_id": sites[0].id}, "server", clear=True
|
||||
)
|
||||
|
||||
# all servers in site1 and site2 should have 3 tasks
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 0
|
||||
)
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).autotasks.count(), 3)
|
||||
self.assertEqual(Agent.objects.get(pk=agent1.id).autotasks.count(), 0)
|
||||
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
|
||||
|
||||
generate_agent_tasks_by_location_task(
|
||||
{"site__client_id": clients[0].id}, "workstation", clear=True
|
||||
)
|
||||
|
||||
# all workstations in Default1 should have 3 tasks
|
||||
self.assertEqual(
|
||||
Agent.objects.get(pk=workstation_agent.id).autotasks.count(), 3
|
||||
)
|
||||
self.assertEqual(Agent.objects.get(pk=server_agent.id).autotasks.count(), 3)
|
||||
self.assertEqual(Agent.objects.get(pk=agent1.id).autotasks.count(), 0)
|
||||
self.assertEqual(Agent.objects.get(pk=agent2.id).autotasks.count(), 0)
|
||||
|
||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||
def test_delete_policy_tasks(self, delete_win_task_schedule):
|
||||
from .tasks import delete_policy_autotask_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||
agent.generate_tasks_from_policies()
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
delete_policy_autotask_task(tasks[0].id)
|
||||
|
||||
delete_win_task_schedule.assert_called_with(agent.autotasks.first().id)
|
||||
delete_win_task_schedule.assert_called_with(
|
||||
agent.autotasks.get(parent_task=tasks[0].id).id
|
||||
)
|
||||
|
||||
@patch("autotasks.tasks.run_win_task.delay")
|
||||
def test_run_policy_task(self, run_win_task):
|
||||
@@ -1051,22 +941,46 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
for task in tasks:
|
||||
run_win_task.assert_any_call(task.id)
|
||||
|
||||
def test_update_policy_tasks(self):
|
||||
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
|
||||
def test_update_policy_tasks(self, enable_or_disable_win_task):
|
||||
from .tasks import update_policy_task_fields_task
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make(
|
||||
"autotasks.AutomatedTask", enabled=True, policy=policy, _quantity=3
|
||||
)
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.server_agent", site=site, policy=policy)
|
||||
agent.generate_tasks_from_policies()
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
tasks[0].enabled = False
|
||||
tasks[0].save()
|
||||
|
||||
update_policy_task_fields_task(tasks[0].id, enabled=False)
|
||||
update_policy_task_fields_task(tasks[0].id)
|
||||
enable_or_disable_win_task.assert_not_called()
|
||||
|
||||
self.assertFalse(AutomatedTask.objects.get(parent_task=tasks[0].id).enabled)
|
||||
self.assertFalse(agent.autotasks.get(parent_task=tasks[0].id).enabled)
|
||||
|
||||
update_policy_task_fields_task(tasks[0].id, update_agent=True)
|
||||
enable_or_disable_win_task.assert_called_with(
|
||||
agent.autotasks.get(parent_task=tasks[0].id).id, False
|
||||
)
|
||||
|
||||
@patch("agents.models.Agent.generate_tasks_from_policies")
|
||||
@patch("agents.models.Agent.generate_checks_from_policies")
|
||||
def test_generate_agent_checks_with_agentpks(self, generate_checks, generate_tasks):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
agents = baker.make_recipe("agents.agent", _quantity=5)
|
||||
|
||||
# reset because creating agents triggers it
|
||||
generate_checks.reset_mock()
|
||||
generate_tasks.reset_mock()
|
||||
|
||||
generate_agent_checks_task([agent.pk for agent in agents])
|
||||
self.assertEquals(generate_checks.call_count, 5)
|
||||
generate_tasks.assert_not_called()
|
||||
generate_checks.reset_mock()
|
||||
|
||||
generate_agent_checks_task([agent.pk for agent in agents], create_tasks=True)
|
||||
self.assertEquals(generate_checks.call_count, 5)
|
||||
self.assertEquals(generate_checks.call_count, 5)
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("policies/", views.GetAddPolicies.as_view()),
|
||||
path("policies/<int:pk>/related/", views.GetRelated.as_view()),
|
||||
path("related/", views.GetRelated.as_view()),
|
||||
path("policies/overview/", views.OverviewPolicy.as_view()),
|
||||
path("policies/<int:pk>/", views.GetUpdateDeletePolicy.as_view()),
|
||||
path("<int:pk>/policychecks/", views.PolicyCheck.as_view()),
|
||||
|
||||
@@ -1,39 +1,27 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from .models import Policy
|
||||
from agents.models import Agent
|
||||
from clients.models import Client, Site
|
||||
from checks.models import Check
|
||||
from autotasks.models import AutomatedTask
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from clients.models import Client
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Policy
|
||||
from .serializers import (
|
||||
AutoTasksFieldSerializer,
|
||||
PolicyCheckSerializer,
|
||||
PolicyCheckStatusSerializer,
|
||||
PolicyOverviewSerializer,
|
||||
PolicySerializer,
|
||||
PolicyTableSerializer,
|
||||
PolicyOverviewSerializer,
|
||||
PolicyCheckStatusSerializer,
|
||||
PolicyCheckSerializer,
|
||||
PolicyTaskStatusSerializer,
|
||||
AutoTaskPolicySerializer,
|
||||
RelatedClientPolicySerializer,
|
||||
RelatedSitePolicySerializer,
|
||||
RelatedAgentPolicySerializer,
|
||||
)
|
||||
|
||||
from .tasks import (
|
||||
generate_agent_checks_from_policies_task,
|
||||
generate_agent_checks_by_location_task,
|
||||
generate_agent_tasks_from_policies_task,
|
||||
run_win_policy_autotask_task,
|
||||
)
|
||||
from .tasks import run_win_policy_autotask_task
|
||||
|
||||
|
||||
class GetAddPolicies(APIView):
|
||||
@@ -72,30 +60,14 @@ class GetUpdateDeletePolicy(APIView):
|
||||
def put(self, request, pk):
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
|
||||
old_active = policy.active
|
||||
old_enforced = policy.enforced
|
||||
|
||||
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
saved_policy = serializer.save()
|
||||
|
||||
# Generate agent checks only if active and enforced were changed
|
||||
if saved_policy.active != old_active or saved_policy.enforced != old_enforced:
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
policypk=policy.pk,
|
||||
clear=(not saved_policy.active or not saved_policy.enforced),
|
||||
create_tasks=(saved_policy.active != old_active),
|
||||
)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
|
||||
# delete all managed policy checks off of agents
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
||||
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk, clear=True)
|
||||
policy.delete()
|
||||
get_object_or_404(Policy, pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -104,8 +76,8 @@ class PolicyAutoTask(APIView):
|
||||
|
||||
# tasks associated with policy
|
||||
def get(self, request, pk):
|
||||
policy = get_object_or_404(Policy, pk=pk)
|
||||
return Response(AutoTaskPolicySerializer(policy).data)
|
||||
tasks = AutomatedTask.objects.filter(policy=pk)
|
||||
return Response(AutoTasksFieldSerializer(tasks, many=True).data)
|
||||
|
||||
# get status of all tasks
|
||||
def patch(self, request, task):
|
||||
@@ -184,213 +156,12 @@ class GetRelated(APIView):
|
||||
).data
|
||||
|
||||
response["agents"] = AgentHostnameSerializer(
|
||||
policy.related_agents(),
|
||||
policy.related_agents().only("pk", "hostname"),
|
||||
many=True,
|
||||
).data
|
||||
|
||||
return Response(response)
|
||||
|
||||
# update agents, clients, sites to policy
|
||||
def post(self, request):
|
||||
|
||||
related_type = request.data["type"]
|
||||
pk = request.data["pk"]
|
||||
|
||||
# workstation policy is set
|
||||
if (
|
||||
"workstation_policy" in request.data
|
||||
and request.data["workstation_policy"] != 0
|
||||
):
|
||||
policy = get_object_or_404(Policy, pk=request.data["workstation_policy"])
|
||||
|
||||
if related_type == "client":
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
# Check and see if workstation policy changed and regenerate policies
|
||||
if (
|
||||
not client.workstation_policy
|
||||
or client.workstation_policy
|
||||
and client.workstation_policy.pk != policy.pk
|
||||
):
|
||||
client.workstation_policy = policy
|
||||
client.save()
|
||||
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if related_type == "site":
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
|
||||
# Check and see if workstation policy changed and regenerate policies
|
||||
if (
|
||||
not site.workstation_policy
|
||||
or site.workstation_policy
|
||||
and site.workstation_policy.pk != policy.pk
|
||||
):
|
||||
site.workstation_policy = policy
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# server policy is set
|
||||
if "server_policy" in request.data and request.data["server_policy"] != 0:
|
||||
policy = get_object_or_404(Policy, pk=request.data["server_policy"])
|
||||
|
||||
if related_type == "client":
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
# Check and see if server policy changed and regenerate policies
|
||||
if (
|
||||
not client.server_policy
|
||||
or client.server_policy
|
||||
and client.server_policy.pk != policy.pk
|
||||
):
|
||||
client.server_policy = policy
|
||||
client.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if related_type == "site":
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
|
||||
# Check and see if server policy changed and regenerate policies
|
||||
if (
|
||||
not site.server_policy
|
||||
or site.server_policy
|
||||
and site.server_policy.pk != policy.pk
|
||||
):
|
||||
site.server_policy = policy
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# If workstation policy was cleared
|
||||
if (
|
||||
"workstation_policy" in request.data
|
||||
and request.data["workstation_policy"] == 0
|
||||
):
|
||||
if related_type == "client":
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
# Check if workstation policy is set and update it to None
|
||||
if client.workstation_policy:
|
||||
|
||||
client.workstation_policy = None
|
||||
client.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if related_type == "site":
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
|
||||
# Check if workstation policy is set and update it to None
|
||||
if site.workstation_policy:
|
||||
|
||||
site.workstation_policy = None
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.id},
|
||||
mon_type="workstation",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# server policy cleared
|
||||
if "server_policy" in request.data and request.data["server_policy"] == 0:
|
||||
|
||||
if related_type == "client":
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
# Check if server policy is set and update it to None
|
||||
if client.server_policy:
|
||||
|
||||
client.server_policy = None
|
||||
client.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": client.id},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if related_type == "site":
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
# Check if server policy is set and update it to None
|
||||
if site.server_policy:
|
||||
|
||||
site.server_policy = None
|
||||
site.save()
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": site.pk},
|
||||
mon_type="server",
|
||||
clear=True,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
# agent policies
|
||||
if related_type == "agent":
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
|
||||
if "policy" in request.data and request.data["policy"] != 0:
|
||||
policy = Policy.objects.get(pk=request.data["policy"])
|
||||
|
||||
# Check and see if policy changed and regenerate policies
|
||||
if not agent.policy or agent.policy and agent.policy.pk != policy.pk:
|
||||
agent.policy = policy
|
||||
agent.save()
|
||||
agent.generate_checks_from_policies(clear=True)
|
||||
agent.generate_tasks_from_policies(clear=True)
|
||||
else:
|
||||
if agent.policy:
|
||||
agent.policy = None
|
||||
agent.save()
|
||||
agent.generate_checks_from_policies(clear=True)
|
||||
agent.generate_tasks_from_policies(clear=True)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
# view to get policies set on client, site, and workstation
|
||||
def patch(self, request):
|
||||
related_type = request.data["type"]
|
||||
|
||||
# client, site, or agent pk
|
||||
pk = request.data["pk"]
|
||||
|
||||
if related_type == "agent":
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
return Response(RelatedAgentPolicySerializer(agent).data)
|
||||
|
||||
if related_type == "site":
|
||||
site = Site.objects.get(pk=pk)
|
||||
return Response(RelatedSitePolicySerializer(site).data)
|
||||
|
||||
if related_type == "client":
|
||||
client = Client.objects.get(pk=pk)
|
||||
return Response(RelatedClientPolicySerializer(client).data)
|
||||
|
||||
content = {"error": "Data was submitted incorrectly"}
|
||||
return Response(content, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class UpdatePatchPolicy(APIView):
|
||||
|
||||
@@ -422,11 +193,15 @@ class UpdatePatchPolicy(APIView):
|
||||
|
||||
agents = None
|
||||
if "client" in request.data:
|
||||
agents = Agent.objects.filter(site__client_id=request.data["client"])
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site__client_id=request.data["client"]
|
||||
)
|
||||
elif "site" in request.data:
|
||||
agents = Agent.objects.filter(site_id=request.data["site"])
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").filter(
|
||||
site_id=request.data["site"]
|
||||
)
|
||||
else:
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.prefetch_related("winupdatepolicy").only("pk")
|
||||
|
||||
for agent in agents:
|
||||
winupdatepolicy = agent.winupdatepolicy.get()
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from autotasks.tasks import remove_orphaned_win_tasks
|
||||
|
||||
@@ -7,7 +8,7 @@ class Command(BaseCommand):
|
||||
help = "Checks for orphaned tasks on all agents and removes them"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
agents = Agent.objects.all()
|
||||
agents = Agent.objects.only("pk", "last_seen", "overdue_time", "offline_time")
|
||||
online = [i for i in agents if i.status == "online"]
|
||||
for agent in online:
|
||||
remove_orphaned_win_tasks.delay(agent.pk)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Generated by Django 3.0.6 on 2020-05-31 01:23
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-29 09:12
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0008_auto_20201030_1515"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="automatedtask",
|
||||
name="run_time_bit_weekdays",
|
||||
field=models.IntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,34 @@
|
||||
from django.db import migrations
|
||||
|
||||
from tacticalrmm.utils import get_bit_days
|
||||
|
||||
DAYS_OF_WEEK = {
|
||||
0: "Monday",
|
||||
1: "Tuesday",
|
||||
2: "Wednesday",
|
||||
3: "Thursday",
|
||||
4: "Friday",
|
||||
5: "Saturday",
|
||||
6: "Sunday",
|
||||
}
|
||||
|
||||
|
||||
def migrate_days(apps, schema_editor):
|
||||
AutomatedTask = apps.get_model("autotasks", "AutomatedTask")
|
||||
for task in AutomatedTask.objects.exclude(run_time_days__isnull=True).exclude(
|
||||
run_time_days=[]
|
||||
):
|
||||
run_days = [DAYS_OF_WEEK.get(day) for day in task.run_time_days]
|
||||
task.run_time_bit_weekdays = get_bit_days(run_days)
|
||||
task.save(update_fields=["run_time_bit_weekdays"])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("autotasks", "0009_automatedtask_run_time_bit_weekdays"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_days),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-27 22:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0010_migrate_days_to_bitdays'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='alert_severity',
|
||||
field=models.CharField(blank=True, choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='None', max_length=30, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-28 04:17
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0011_automatedtask_alert_severity'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='email_alert',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='email_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='text_alert',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='text_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-29 03:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0012_auto_20210128_0417'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='alert_severity',
|
||||
field=models.CharField(choices=[('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=30),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-29 21:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0013_auto_20210129_0307'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='dashboard_alert',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-05 17:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0014_automatedtask_dashboard_alert'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='resolved_email_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='resolved_text_sent',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-05 21:17
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0015_auto_20210205_1728'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('passing', 'Passing'), ('failing', 'Failing'), ('pending', 'Pending')], default='pending', max_length=30),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 3.1.4 on 2021-02-10 15:12
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0016_automatedtask_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='automatedtask',
|
||||
name='email_sent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='automatedtask',
|
||||
name='resolved_email_sent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='automatedtask',
|
||||
name='resolved_text_sent',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='automatedtask',
|
||||
name='text_sent',
|
||||
),
|
||||
]
|
||||
@@ -1,13 +1,20 @@
|
||||
import pytz
|
||||
import datetime as dt
|
||||
import random
|
||||
import string
|
||||
import datetime as dt
|
||||
|
||||
from django.db import models
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import DateTimeField
|
||||
from automation.models import Policy
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from logs.models import BaseAuditModel
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
RUN_TIME_DAY_CHOICES = [
|
||||
(0, "Monday"),
|
||||
@@ -32,6 +39,12 @@ SYNC_STATUS_CHOICES = [
|
||||
("pendingdeletion", "Pending Deletion on Agent"),
|
||||
]
|
||||
|
||||
TASK_STATUS_CHOICES = [
|
||||
("passing", "Passing"),
|
||||
("failing", "Failing"),
|
||||
("pending", "Pending"),
|
||||
]
|
||||
|
||||
|
||||
class AutomatedTask(BaseAuditModel):
|
||||
agent = models.ForeignKey(
|
||||
@@ -42,7 +55,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
blank=True,
|
||||
)
|
||||
policy = models.ForeignKey(
|
||||
Policy,
|
||||
"automation.Policy",
|
||||
related_name="autotasks",
|
||||
null=True,
|
||||
blank=True,
|
||||
@@ -69,6 +82,8 @@ class AutomatedTask(BaseAuditModel):
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
name = models.CharField(max_length=255)
|
||||
run_time_bit_weekdays = models.IntegerField(null=True, blank=True)
|
||||
# run_time_days is deprecated, use bit weekdays
|
||||
run_time_days = ArrayField(
|
||||
models.IntegerField(choices=RUN_TIME_DAY_CHOICES, null=True, blank=True),
|
||||
null=True,
|
||||
@@ -91,9 +106,18 @@ class AutomatedTask(BaseAuditModel):
|
||||
execution_time = models.CharField(max_length=100, default="0.0000")
|
||||
last_run = models.DateTimeField(null=True, blank=True)
|
||||
enabled = models.BooleanField(default=True)
|
||||
status = models.CharField(
|
||||
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
|
||||
)
|
||||
sync_status = models.CharField(
|
||||
max_length=100, choices=SYNC_STATUS_CHOICES, default="notsynced"
|
||||
)
|
||||
alert_severity = models.CharField(
|
||||
max_length=30, choices=SEVERITY_CHOICES, default="info"
|
||||
)
|
||||
email_alert = models.BooleanField(default=False)
|
||||
text_alert = models.BooleanField(default=False)
|
||||
dashboard_alert = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
@@ -107,20 +131,11 @@ class AutomatedTask(BaseAuditModel):
|
||||
elif self.task_type == "runonce":
|
||||
return f'Run once on {self.run_time_date.strftime("%m/%d/%Y %I:%M%p")}'
|
||||
elif self.task_type == "scheduled":
|
||||
ret = []
|
||||
for i in self.run_time_days:
|
||||
for j in RUN_TIME_DAY_CHOICES:
|
||||
if i in j:
|
||||
ret.append(j[1][0:3])
|
||||
|
||||
run_time_nice = dt.datetime.strptime(
|
||||
self.run_time_minute, "%H:%M"
|
||||
).strftime("%I:%M %p")
|
||||
|
||||
if len(ret) == 7:
|
||||
return f"Every day at {run_time_nice}"
|
||||
else:
|
||||
days = ",".join(ret)
|
||||
days = bitdays_to_string(self.run_time_bit_weekdays)
|
||||
return f"{days} at {run_time_nice}"
|
||||
|
||||
@property
|
||||
@@ -147,28 +162,56 @@ class AutomatedTask(BaseAuditModel):
|
||||
def create_policy_task(self, agent=None, policy=None):
|
||||
from .tasks import create_win_task_schedule
|
||||
|
||||
# if policy is present, then this task is being copied to another policy
|
||||
# if agent is present, then this task is being created on an agent from a policy
|
||||
# exit if neither are set or if both are set
|
||||
if not agent and not policy or agent and policy:
|
||||
return
|
||||
|
||||
assigned_check = None
|
||||
|
||||
# get correct assigned check to task if set
|
||||
if agent and self.assigned_check:
|
||||
assigned_check = agent.agentchecks.get(parent_check=self.assigned_check.pk)
|
||||
# check if there is a matching check on the agent
|
||||
if agent.agentchecks.filter(parent_check=self.assigned_check.pk).exists():
|
||||
assigned_check = agent.agentchecks.filter(
|
||||
parent_check=self.assigned_check.pk
|
||||
).first()
|
||||
# check was overriden by agent and we need to use that agents check
|
||||
else:
|
||||
if agent.agentchecks.filter(
|
||||
check_type=self.assigned_check.check_type, overriden_by_policy=True
|
||||
).exists():
|
||||
assigned_check = agent.agentchecks.filter(
|
||||
check_type=self.assigned_check.check_type,
|
||||
overriden_by_policy=True,
|
||||
).first()
|
||||
elif policy and self.assigned_check:
|
||||
assigned_check = policy.policychecks.get(name=self.assigned_check.name)
|
||||
if policy.policychecks.filter(name=self.assigned_check.name).exists():
|
||||
assigned_check = policy.policychecks.filter(
|
||||
name=self.assigned_check.name
|
||||
).first()
|
||||
else:
|
||||
assigned_check = policy.policychecks.filter(
|
||||
check_type=self.assigned_check.check_type
|
||||
).first()
|
||||
|
||||
task = AutomatedTask.objects.create(
|
||||
agent=agent,
|
||||
policy=policy,
|
||||
managed_by_policy=bool(agent),
|
||||
parent_task=(self.pk if agent else None),
|
||||
alert_severity=self.alert_severity,
|
||||
email_alert=self.email_alert,
|
||||
text_alert=self.text_alert,
|
||||
dashboard_alert=self.dashboard_alert,
|
||||
script=self.script,
|
||||
script_args=self.script_args,
|
||||
assigned_check=assigned_check,
|
||||
name=self.name,
|
||||
run_time_days=self.run_time_days,
|
||||
run_time_minute=self.run_time_minute,
|
||||
run_time_bit_weekdays=self.run_time_bit_weekdays,
|
||||
run_time_date=self.run_time_date,
|
||||
task_type=self.task_type,
|
||||
win_task_name=self.win_task_name,
|
||||
@@ -178,3 +221,215 @@ class AutomatedTask(BaseAuditModel):
|
||||
)
|
||||
|
||||
create_win_task_schedule.delay(task.pk)
|
||||
|
||||
def handle_alert(self) -> None:
|
||||
from alerts.models import Alert
|
||||
from autotasks.tasks import (
|
||||
handle_resolved_task_email_alert,
|
||||
handle_resolved_task_sms_alert,
|
||||
handle_task_email_alert,
|
||||
handle_task_sms_alert,
|
||||
)
|
||||
|
||||
self.status = "failing" if self.retcode != 0 else "passing"
|
||||
self.save()
|
||||
|
||||
# return if agent is in maintenance mode
|
||||
if self.agent.maintenance_mode:
|
||||
return
|
||||
|
||||
# see if agent has an alert template and use that
|
||||
alert_template = self.agent.get_alert_template()
|
||||
|
||||
# resolve alert if it exists
|
||||
if self.status == "passing":
|
||||
if Alert.objects.filter(assigned_task=self, resolved=False).exists():
|
||||
alert = Alert.objects.get(assigned_task=self, resolved=False)
|
||||
alert.resolve()
|
||||
|
||||
# check if resolved email should be send
|
||||
if (
|
||||
not alert.resolved_email_sent
|
||||
and self.email_alert
|
||||
or alert_template
|
||||
and alert_template.task_email_on_resolved
|
||||
):
|
||||
handle_resolved_task_email_alert.delay(pk=alert.pk)
|
||||
|
||||
# check if resolved text should be sent
|
||||
if (
|
||||
not alert.resolved_sms_sent
|
||||
and self.text_alert
|
||||
or alert_template
|
||||
and alert_template.task_text_on_resolved
|
||||
):
|
||||
handle_resolved_task_sms_alert.delay(pk=alert.pk)
|
||||
|
||||
# check if resolved script should be run
|
||||
if (
|
||||
alert_template
|
||||
and alert_template.resolved_action
|
||||
and not alert.resolved_action_run
|
||||
):
|
||||
|
||||
r = self.agent.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert_template.resolved_action_args,
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
)
|
||||
|
||||
# command was successful
|
||||
if type(r) == dict:
|
||||
alert.resolved_action_retcode = r["retcode"]
|
||||
alert.resolved_action_stdout = r["stdout"]
|
||||
alert.resolved_action_stderr = r["stderr"]
|
||||
alert.resolved_action_execution_time = "{:.4f}".format(
|
||||
r["execution_time"]
|
||||
)
|
||||
alert.resolved_action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} resolved alert for task: {self.name}"
|
||||
)
|
||||
|
||||
# create alert if task is failing
|
||||
else:
|
||||
if not Alert.objects.filter(assigned_task=self, resolved=False).exists():
|
||||
alert = Alert.create_task_alert(self)
|
||||
else:
|
||||
alert = Alert.objects.get(assigned_task=self, resolved=False)
|
||||
|
||||
# check if alert severity changed on task and update the alert
|
||||
if self.alert_severity != alert.severity:
|
||||
alert.severity = self.alert_severity
|
||||
alert.save(update_fields=["severity"])
|
||||
|
||||
# create alert in dashboard if enabled
|
||||
if (
|
||||
self.dashboard_alert
|
||||
or alert_template
|
||||
and alert_template.task_always_alert
|
||||
):
|
||||
alert.hidden = False
|
||||
alert.save()
|
||||
|
||||
# send email if enabled
|
||||
if (
|
||||
not alert.email_sent
|
||||
and self.email_alert
|
||||
or alert_template
|
||||
and self.alert_severity in alert_template.task_email_alert_severity
|
||||
and alert_template.check_always_email
|
||||
):
|
||||
handle_task_email_alert.delay(
|
||||
pk=alert.pk,
|
||||
alert_template=alert_template.check_periodic_alert_days
|
||||
if alert_template
|
||||
else None,
|
||||
)
|
||||
|
||||
# send text if enabled
|
||||
if (
|
||||
not alert.sms_sent
|
||||
and self.text_alert
|
||||
or alert_template
|
||||
and self.alert_severity in alert_template.task_text_alert_severity
|
||||
and alert_template.check_always_text
|
||||
):
|
||||
handle_task_sms_alert.delay(
|
||||
pk=alert.pk,
|
||||
alert_template=alert_template.check_periodic_alert_days
|
||||
if alert_template
|
||||
else None,
|
||||
)
|
||||
|
||||
# check if any scripts should be run
|
||||
if alert_template and alert_template.action and not alert.action_run:
|
||||
r = self.agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert_template.action_args,
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
run_on_any=True,
|
||||
)
|
||||
|
||||
# command was successful
|
||||
if type(r) == dict:
|
||||
alert.action_retcode = r["retcode"]
|
||||
alert.action_stdout = r["stdout"]
|
||||
alert.action_stderr = r["stderr"]
|
||||
alert.action_execution_time = "{:.4f}".format(r["execution_time"])
|
||||
alert.action_run = djangotime.now()
|
||||
alert.save()
|
||||
else:
|
||||
logger.error(
|
||||
f"Failure action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} failure alert for task: {self.name}"
|
||||
)
|
||||
|
||||
def send_email(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
alert_template = self.agent.get_alert_template()
|
||||
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
body = (
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, alert_template)
|
||||
|
||||
def send_sms(self):
|
||||
|
||||
from core.models import CoreSettings
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
alert_template = self.agent.get_alert_template()
|
||||
|
||||
if self.agent:
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Failed"
|
||||
else:
|
||||
subject = f"{self} Failed"
|
||||
|
||||
body = (
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_sms(body, alert_template=alert_template)
|
||||
|
||||
def send_resolved_email(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
alert_template = self.agent.get_alert_template()
|
||||
|
||||
CORE = CoreSettings.objects.first()
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||
body = (
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
|
||||
CORE.send_mail(subject, body, alert_template=alert_template)
|
||||
|
||||
def send_resolved_sms(self):
|
||||
from core.models import CoreSettings
|
||||
|
||||
alert_template = self.agent.get_alert_template()
|
||||
CORE = CoreSettings.objects.first()
|
||||
subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"
|
||||
body = (
|
||||
subject
|
||||
+ f" - Return code: {self.retcode}\nStdout:{self.stdout}\nStderr: {self.stderr}"
|
||||
)
|
||||
CORE.send_sms(body, alert_template=alert_template)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user