Compare commits
391 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dd8d39e698 | ||
|
|
afb1316daa | ||
|
|
04d7017536 | ||
|
|
6a1c75b060 | ||
|
|
5c94611f3b | ||
|
|
4e5676e80f | ||
|
|
c96d688a9c | ||
|
|
804242e9a5 | ||
|
|
0ec9760b17 | ||
|
|
d481ae3da4 | ||
|
|
4742c14fc1 | ||
|
|
509b0d501b | ||
|
|
d4c9b04d4e | ||
|
|
16fb4d331b | ||
|
|
e9e5bf31a7 | ||
|
|
221418120e | ||
|
|
46f852e26e | ||
|
|
4234cf0a31 | ||
|
|
7f3daea648 | ||
|
|
2eb16c82f4 | ||
|
|
e00b2ce591 | ||
|
|
d71e1311ca | ||
|
|
2cf16963e3 | ||
|
|
10bf7b7fb4 | ||
|
|
182c85a228 | ||
|
|
94b1988b90 | ||
|
|
6f7e62e9a0 | ||
|
|
aa7076af04 | ||
|
|
c928e8f0d4 | ||
|
|
5c6b106f68 | ||
|
|
d45bcea1ff | ||
|
|
6ff2dc79f8 | ||
|
|
b752329987 | ||
|
|
f21465335a | ||
|
|
0801adfc4b | ||
|
|
5bee8052d5 | ||
|
|
68dca5dfef | ||
|
|
3f51dd1d2f | ||
|
|
7f80889d77 | ||
|
|
efc61c0222 | ||
|
|
6fc0a05d34 | ||
|
|
a9be872d7a | ||
|
|
6ca85f099e | ||
|
|
86ff677b8a | ||
|
|
35e295df86 | ||
|
|
cd4d301790 | ||
|
|
93bb329c3d | ||
|
|
7c1e0f2c30 | ||
|
|
b57f471f44 | ||
|
|
252a9a2ed6 | ||
|
|
7258d4d787 | ||
|
|
75522fa295 | ||
|
|
4ba8f41d95 | ||
|
|
f326f8e4de | ||
|
|
f863dc058e | ||
|
|
20891db251 | ||
|
|
f1d05f1342 | ||
|
|
8dd636b0eb | ||
|
|
6b5bda8ee1 | ||
|
|
ddc5597157 | ||
|
|
ae112c7257 | ||
|
|
c22f10f96a | ||
|
|
18d10c9bec | ||
|
|
890e430cb7 | ||
|
|
dadc3d4cd7 | ||
|
|
d98b4d7320 | ||
|
|
340f532238 | ||
|
|
7669f68e7c | ||
|
|
3557e5514f | ||
|
|
a9f09b7614 | ||
|
|
845b9e4568 | ||
|
|
24a6092dcf | ||
|
|
195ae7d8b1 | ||
|
|
a5c6ea7ffc | ||
|
|
eb7a4ac29f | ||
|
|
508ef73fde | ||
|
|
838d6d8076 | ||
|
|
762c3159b8 | ||
|
|
7a88a06bcf | ||
|
|
0b1e3d7de5 | ||
|
|
9a83c73f21 | ||
|
|
aa50c7b268 | ||
|
|
179a5a80f4 | ||
|
|
0ddae527ef | ||
|
|
ee7a46de26 | ||
|
|
95522fda74 | ||
|
|
e58881c2bd | ||
|
|
36a902a44e | ||
|
|
16b74549a2 | ||
|
|
da7ededfb1 | ||
|
|
790bb08718 | ||
|
|
e6765f421f | ||
|
|
7e8f1fe904 | ||
|
|
eacce4578a | ||
|
|
07b2543972 | ||
|
|
d1c3fc8493 | ||
|
|
f453b16010 | ||
|
|
05151d8978 | ||
|
|
8218e1acc3 | ||
|
|
30212fc89a | ||
|
|
b31c13fcae | ||
|
|
6b95fc6f1d | ||
|
|
369cf17eb2 | ||
|
|
4dd8f512cc | ||
|
|
26cfec7d80 | ||
|
|
67a87ccf00 | ||
|
|
667cebcf94 | ||
|
|
bc1747ca1c | ||
|
|
945d8647bf | ||
|
|
dfe2e94627 | ||
|
|
09a5591eec | ||
|
|
f2bf06a0ba | ||
|
|
eedad4ab1c | ||
|
|
336a62ab29 | ||
|
|
b5603a5233 | ||
|
|
73890f553c | ||
|
|
f6243b8968 | ||
|
|
3770dc74d4 | ||
|
|
45f4e947c5 | ||
|
|
9928d7c6e1 | ||
|
|
bf776eeb2b | ||
|
|
ae7c0e9195 | ||
|
|
e90b640602 | ||
|
|
ba7529d3f5 | ||
|
|
34667f252e | ||
|
|
d18bddcb7b | ||
|
|
96dff49d33 | ||
|
|
b389728338 | ||
|
|
cdc7da86f3 | ||
|
|
4745cc0378 | ||
|
|
434f132479 | ||
|
|
fb0f31ffc7 | ||
|
|
bb1d73c0ae | ||
|
|
0e823d1191 | ||
|
|
48f4199ff3 | ||
|
|
eaf379587b | ||
|
|
672446b7d1 | ||
|
|
dfe52c1b07 | ||
|
|
d63df03ad8 | ||
|
|
aba4f9f2ce | ||
|
|
ac5c1e7803 | ||
|
|
d521dbf50e | ||
|
|
f210ed3e6a | ||
|
|
df3cac4ea6 | ||
|
|
f778c5175b | ||
|
|
6c66ff28dd | ||
|
|
d5b6ec702b | ||
|
|
c62a5fcef2 | ||
|
|
59c47e9200 | ||
|
|
4ba44d8932 | ||
|
|
27dae05e1b | ||
|
|
a251ae9b90 | ||
|
|
7e960b2bde | ||
|
|
5df4825158 | ||
|
|
8984d06d93 | ||
|
|
eed7aac047 | ||
|
|
54b068de4a | ||
|
|
f0f33b00b6 | ||
|
|
1043405088 | ||
|
|
0131b10805 | ||
|
|
a19b441f62 | ||
|
|
28edc31d43 | ||
|
|
0f9872a818 | ||
|
|
76ce4296f3 | ||
|
|
3dd2671380 | ||
|
|
298ca31332 | ||
|
|
8f911aa6b9 | ||
|
|
82a5c7d9b1 | ||
|
|
7f013dcdba | ||
|
|
68e2e16076 | ||
|
|
ea23c763c9 | ||
|
|
5dcecb3206 | ||
|
|
5bd48e2d0e | ||
|
|
afd0a02589 | ||
|
|
2379192d53 | ||
|
|
a6489290c8 | ||
|
|
5f74c43415 | ||
|
|
aa8b84a302 | ||
|
|
b987d041b0 | ||
|
|
b62e37307e | ||
|
|
61a59aa6ac | ||
|
|
f79ec27f1d | ||
|
|
b993fe380f | ||
|
|
d974b5f55f | ||
|
|
f21ae93197 | ||
|
|
342ff18be8 | ||
|
|
a8236f69bf | ||
|
|
ab15a2448d | ||
|
|
6ff4d8f558 | ||
|
|
bb04ba528c | ||
|
|
b94a795189 | ||
|
|
9968184733 | ||
|
|
1be6f8f87a | ||
|
|
426821cceb | ||
|
|
4fec0deaf7 | ||
|
|
144ac5b6ce | ||
|
|
97c73786fa | ||
|
|
82e59d7da0 | ||
|
|
b2c10de6af | ||
|
|
d72029c2c6 | ||
|
|
17b9987063 | ||
|
|
fde07da2b7 | ||
|
|
c23bc29511 | ||
|
|
714cad2a52 | ||
|
|
357d5d2fde | ||
|
|
d477cce901 | ||
|
|
eb6af52ad1 | ||
|
|
aae75023a7 | ||
|
|
41dcd4f458 | ||
|
|
4651ae4495 | ||
|
|
ed61e0b0fc | ||
|
|
1eefc6fbf4 | ||
|
|
09ebf2cea2 | ||
|
|
b3b0c4cd65 | ||
|
|
f4b7924e8f | ||
|
|
ea68d38b82 | ||
|
|
dfbaa71132 | ||
|
|
6c328deb08 | ||
|
|
add564d5bf | ||
|
|
fa94acb426 | ||
|
|
6827468f13 | ||
|
|
53fd43868f | ||
|
|
9ced7561c5 | ||
|
|
31d55d3425 | ||
|
|
171d2a5bb9 | ||
|
|
c5d05c1205 | ||
|
|
2973e0559a | ||
|
|
ec27288dcf | ||
|
|
f92e5c7093 | ||
|
|
7c67155c49 | ||
|
|
b102cd4652 | ||
|
|
67f9a48c37 | ||
|
|
a0c8a1ee65 | ||
|
|
7e7d272b06 | ||
|
|
3c642240ae | ||
|
|
b5157fcaf1 | ||
|
|
d1cb42f1bc | ||
|
|
84cde1a16a | ||
|
|
877f5db1ce | ||
|
|
787164e245 | ||
|
|
d77fc5e7c5 | ||
|
|
cca39a67d6 | ||
|
|
a6c9a0431a | ||
|
|
729a80a639 | ||
|
|
31cb3001f6 | ||
|
|
5d0f54a329 | ||
|
|
c8c3f5b5b7 | ||
|
|
ba473ed75a | ||
|
|
7236fd59f8 | ||
|
|
9471e8f1fd | ||
|
|
a2d39b51bb | ||
|
|
2920934b55 | ||
|
|
3f709d448e | ||
|
|
b79f66183f | ||
|
|
8672f57e55 | ||
|
|
1e99c82351 | ||
|
|
1a2ff851f3 | ||
|
|
f1c27c3959 | ||
|
|
b30dac0f15 | ||
|
|
cc79e5cdaf | ||
|
|
d9a3b2f2cb | ||
|
|
479b528d09 | ||
|
|
461fb84fb9 | ||
|
|
bd7685e3fa | ||
|
|
cd98cb64b3 | ||
|
|
0f32a3ec24 | ||
|
|
ca446cac87 | ||
|
|
6ea907ffda | ||
|
|
5287baa70d | ||
|
|
25935fec84 | ||
|
|
e855a063ff | ||
|
|
c726b8c9f0 | ||
|
|
13cb99290e | ||
|
|
cea9413fd1 | ||
|
|
1432853b39 | ||
|
|
6d6c2b86e8 | ||
|
|
77b1d964b5 | ||
|
|
549936fc09 | ||
|
|
c9c32f09c5 | ||
|
|
77f7778d4a | ||
|
|
84b6be9364 | ||
|
|
1e43b55804 | ||
|
|
ba9bdaae0a | ||
|
|
7dfd7bde8e | ||
|
|
5e6c4161d0 | ||
|
|
d75d56dfc9 | ||
|
|
1d9d350091 | ||
|
|
5744053c6f | ||
|
|
65589b6ca2 | ||
|
|
e03a9d1137 | ||
|
|
29f80f2276 | ||
|
|
a9b74aa69b | ||
|
|
63ebfd3210 | ||
|
|
87fa5ff7a6 | ||
|
|
b686b53a9c | ||
|
|
258261dc64 | ||
|
|
9af5c9ead9 | ||
|
|
382654188c | ||
|
|
fa1df082b7 | ||
|
|
5c227d8f80 | ||
|
|
81dabdbfb7 | ||
|
|
91f89f5a33 | ||
|
|
9f92746aa0 | ||
|
|
5d6e6f9441 | ||
|
|
01395a2726 | ||
|
|
465d75c65d | ||
|
|
4634f8927e | ||
|
|
74a287f9fe | ||
|
|
7ff6c79835 | ||
|
|
3629982237 | ||
|
|
ddb610f1bc | ||
|
|
f899905d27 | ||
|
|
3e4531b5c5 | ||
|
|
a9e189e51d | ||
|
|
58ba08a8f3 | ||
|
|
9078ff27d8 | ||
|
|
6f43e61c24 | ||
|
|
4be0d3f212 | ||
|
|
00e47e5a27 | ||
|
|
152e145b32 | ||
|
|
54e55e8f57 | ||
|
|
05b8707f9e | ||
|
|
543e952023 | ||
|
|
6e5f40ea06 | ||
|
|
bbafb0be87 | ||
|
|
1c9c5232fe | ||
|
|
598d79a502 | ||
|
|
37d8360b77 | ||
|
|
82d9ca3317 | ||
|
|
4e4238d486 | ||
|
|
c77dbe44dc | ||
|
|
e03737f15f | ||
|
|
a02629bcd7 | ||
|
|
6c3fc23d78 | ||
|
|
0fe40f9ccb | ||
|
|
9bd7c8edd1 | ||
|
|
83ba480863 | ||
|
|
f158ea25e9 | ||
|
|
0227519eab | ||
|
|
616a9685fa | ||
|
|
fe61b01320 | ||
|
|
7b25144311 | ||
|
|
9d42fbbdd7 | ||
|
|
39ac5b088b | ||
|
|
c14ffd08a0 | ||
|
|
6e1239340b | ||
|
|
a297dc8b3b | ||
|
|
8d4ecc0898 | ||
|
|
eae9c04429 | ||
|
|
a41c48a9c5 | ||
|
|
ff2a94bd9b | ||
|
|
4a1f5558b8 | ||
|
|
608db9889f | ||
|
|
012b697337 | ||
|
|
0580506cf3 | ||
|
|
ff4ab9b661 | ||
|
|
b7ce5fdd3e | ||
|
|
a11e617322 | ||
|
|
d0beac7e2b | ||
|
|
9db497092f | ||
|
|
8eb91c08aa | ||
|
|
ded5437522 | ||
|
|
9348657951 | ||
|
|
bca85933f7 | ||
|
|
c32bb35f1c | ||
|
|
4b84062d62 | ||
|
|
d6d0f8fa17 | ||
|
|
dd72c875d3 | ||
|
|
1a1df50300 | ||
|
|
53cbb527b4 | ||
|
|
8b87b2717e | ||
|
|
1007d6dac7 | ||
|
|
6799fac120 | ||
|
|
558e6288ca | ||
|
|
d9cb73291b | ||
|
|
d0f7be3ac3 | ||
|
|
331e16d3ca | ||
|
|
0db246c311 | ||
|
|
94dc62ff58 | ||
|
|
e68ecf6844 | ||
|
|
5167b0a8c6 | ||
|
|
77e3d3786d | ||
|
|
708d4d39bc | ||
|
|
2a8cda2a1e | ||
|
|
8d783840ad | ||
|
|
abe39d5790 | ||
|
|
d7868e9e5a | ||
|
|
7b84e36e15 | ||
|
|
6cab6d69d8 | ||
|
|
87846d7aef | ||
|
|
2557769c6a |
@@ -1,7 +1,6 @@
|
||||
FROM python:3.9.2-slim
|
||||
|
||||
ENV TACTICAL_DIR /opt/tactical
|
||||
ENV TACTICAL_GO_DIR /usr/local/rmmgo
|
||||
ENV TACTICAL_READY_FILE ${TACTICAL_DIR}/tmp/tactical.ready
|
||||
ENV WORKSPACE_DIR /workspace
|
||||
ENV TACTICAL_USER tactical
|
||||
@@ -9,14 +8,11 @@ ENV VIRTUAL_ENV ${WORKSPACE_DIR}/api/tacticalrmm/env
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 8000
|
||||
EXPOSE 8000 8383 8005
|
||||
|
||||
RUN groupadd -g 1000 tactical && \
|
||||
useradd -u 1000 -g 1000 tactical
|
||||
|
||||
# Copy Go Files
|
||||
COPY --from=golang:1.16 /usr/local/go ${TACTICAL_GO_DIR}/go
|
||||
|
||||
# Copy Dev python reqs
|
||||
COPY ./requirements.txt /
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ version: '3.4'
|
||||
|
||||
services:
|
||||
api-dev:
|
||||
container_name: trmm-api-dev
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
@@ -21,9 +22,10 @@ services:
|
||||
- tactical-backend
|
||||
|
||||
app-dev:
|
||||
image: node:12-alpine
|
||||
container_name: trmm-app-dev
|
||||
image: node:14-alpine
|
||||
restart: always
|
||||
command: /bin/sh -c "npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
command: /bin/sh -c "npm install npm@latest -g && npm install && npm run serve -- --host 0.0.0.0 --port ${APP_PORT}"
|
||||
working_dir: /workspace/web
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
@@ -36,6 +38,7 @@ services:
|
||||
|
||||
# nats
|
||||
nats-dev:
|
||||
container_name: trmm-nats-dev
|
||||
image: ${IMAGE_REPO}tactical-nats:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -55,6 +58,7 @@ services:
|
||||
|
||||
# meshcentral container
|
||||
meshcentral-dev:
|
||||
container_name: trmm-meshcentral-dev
|
||||
image: ${IMAGE_REPO}tactical-meshcentral:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -77,6 +81,7 @@ services:
|
||||
|
||||
# mongodb container for meshcentral
|
||||
mongodb-dev:
|
||||
container_name: trmm-mongodb-dev
|
||||
image: mongo:4.4
|
||||
restart: always
|
||||
environment:
|
||||
@@ -92,6 +97,7 @@ services:
|
||||
|
||||
# postgres database for api service
|
||||
postgres-dev:
|
||||
container_name: trmm-postgres-dev
|
||||
image: postgres:13-alpine
|
||||
restart: always
|
||||
environment:
|
||||
@@ -107,6 +113,7 @@ services:
|
||||
|
||||
# redis container for celery tasks
|
||||
redis-dev:
|
||||
container_name: trmm-redis-dev
|
||||
restart: always
|
||||
image: redis:6.0-alpine
|
||||
networks:
|
||||
@@ -115,6 +122,7 @@ services:
|
||||
- tactical-redis
|
||||
|
||||
init-dev:
|
||||
container_name: trmm-init-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -143,6 +151,7 @@ services:
|
||||
|
||||
# container for celery worker service
|
||||
celery-dev:
|
||||
container_name: trmm-celery-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -160,6 +169,7 @@ services:
|
||||
|
||||
# container for celery beat service
|
||||
celerybeat-dev:
|
||||
container_name: trmm-celerybeat-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
@@ -175,8 +185,29 @@ services:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
nginx-dev:
|
||||
# container for websockets communication
|
||||
websockets-dev:
|
||||
container_name: trmm-websockets-dev
|
||||
image: api-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-websockets-dev"]
|
||||
restart: always
|
||||
networks:
|
||||
dev:
|
||||
aliases:
|
||||
- tactical-websockets
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
- ..:/workspace:cached
|
||||
depends_on:
|
||||
- postgres-dev
|
||||
- redis-dev
|
||||
|
||||
# container for tactical reverse proxy
|
||||
nginx-dev:
|
||||
container_name: trmm-nginx-dev
|
||||
image: ${IMAGE_REPO}tactical-nginx:${VERSION}
|
||||
restart: always
|
||||
environment:
|
||||
@@ -196,6 +227,21 @@ services:
|
||||
volumes:
|
||||
- tactical-data-dev:/opt/tactical
|
||||
|
||||
mkdocs-dev:
|
||||
container_name: trmm-mkdocs-dev
|
||||
image: api-dev
|
||||
restart: always
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./api.dockerfile
|
||||
command: ["tactical-mkdocs-dev"]
|
||||
ports:
|
||||
- "8005:8005"
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
networks:
|
||||
- dev
|
||||
|
||||
volumes:
|
||||
tactical-data-dev:
|
||||
postgres-data-dev:
|
||||
|
||||
@@ -136,10 +136,11 @@ if [ "$1" = 'tactical-init-dev' ]; then
|
||||
webenv="$(cat << EOF
|
||||
PROD_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
DEV_URL = "${HTTP_PROTOCOL}://${API_HOST}"
|
||||
APP_URL = https://${APP_HOST}
|
||||
APP_URL = "https://${APP_HOST}"
|
||||
DOCKER_BUILD = 1
|
||||
EOF
|
||||
)"
|
||||
echo "${webenv}" | tee ${WORKSPACE_DIR}/web/.env > /dev/null
|
||||
echo "${webenv}" | tee "${WORKSPACE_DIR}"/web/.env > /dev/null
|
||||
|
||||
# chown everything to tactical user
|
||||
chown -R "${TACTICAL_USER}":"${TACTICAL_USER}" "${WORKSPACE_DIR}"
|
||||
@@ -150,9 +151,6 @@ EOF
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-api' ]; then
|
||||
cp "${WORKSPACE_DIR}"/api/tacticalrmm/core/goinstaller/bin/goversioninfo /usr/local/bin/goversioninfo
|
||||
chmod +x /usr/local/bin/goversioninfo
|
||||
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/python manage.py runserver 0.0.0.0:"${API_PORT}"
|
||||
fi
|
||||
@@ -167,3 +165,13 @@ if [ "$1" = 'tactical-celerybeat-dev' ]; then
|
||||
test -f "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid" && rm "${WORKSPACE_DIR}/api/tacticalrmm/celerybeat.pid"
|
||||
"${VIRTUAL_ENV}"/bin/celery -A tacticalrmm beat -l debug
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-websockets-dev' ]; then
|
||||
check_tactical_ready
|
||||
"${VIRTUAL_ENV}"/bin/daphne tacticalrmm.asgi:application --port 8383 -b 0.0.0.0
|
||||
fi
|
||||
|
||||
if [ "$1" = 'tactical-mkdocs-dev' ]; then
|
||||
cd "${WORKSPACE_DIR}/docs"
|
||||
"${VIRTUAL_ENV}"/bin/mkdocs serve
|
||||
fi
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file
|
||||
asyncio-nats-client
|
||||
celery
|
||||
channels
|
||||
Django
|
||||
django-cors-headers
|
||||
django-rest-knox
|
||||
@@ -30,3 +31,5 @@ mkdocs-material
|
||||
pymdown-extensions
|
||||
Pygments
|
||||
mypy
|
||||
pysnooper
|
||||
isort
|
||||
|
||||
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a bug report
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Server Info (please complete the following information):**
|
||||
- OS: [e.g. Ubuntu 20.04, Debian 10]
|
||||
- Browser: [e.g. chrome, safari]
|
||||
- RMM Version (as shown in top left of web UI):
|
||||
|
||||
**Installation Method:**
|
||||
- [ ] Standard
|
||||
- [ ] Docker
|
||||
|
||||
**Agent Info (please complete the following information):**
|
||||
- Agent version (as shown in the 'Summary' tab of the agent from web UI):
|
||||
- Agent OS: [e.g. Win 10 v2004, Server 2012 R2]
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
2
.github/workflows/deploy-docs.yml
vendored
2
.github/workflows/deploy-docs.yml
vendored
@@ -2,7 +2,7 @@ name: Deploy Docs
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- master
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
||||
@@ -8,11 +8,9 @@
|
||||
Tactical RMM is a remote monitoring & management tool for Windows computers, built with Django and Vue.\
|
||||
It uses an [agent](https://github.com/wh1te909/rmmagent) written in golang and integrates with [MeshCentral](https://github.com/Ylianst/MeshCentral)
|
||||
|
||||
# [LIVE DEMO](https://rmm.xlawgaming.com/)
|
||||
# [LIVE DEMO](https://rmm.tacticalrmm.io/)
|
||||
Demo database resets every hour. Alot of features are disabled for obvious reasons due to the nature of this app.
|
||||
|
||||
*Tactical RMM is currently in alpha and subject to breaking changes. Use in production at your own risk.*
|
||||
|
||||
### [Discord Chat](https://discord.gg/upGTkWp)
|
||||
|
||||
### [Documentation](https://wh1te909.github.io/tacticalrmm/)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2 on 2021-04-11 01:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0013_user_client_tree_sort'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='client_tree_splitter',
|
||||
field=models.PositiveIntegerField(default=11),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2 on 2021-04-11 03:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0014_user_client_tree_splitter'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='loading_bar_color',
|
||||
field=models.CharField(default='red', max_length=255),
|
||||
),
|
||||
]
|
||||
@@ -36,6 +36,8 @@ class User(AbstractUser, BaseAuditModel):
|
||||
client_tree_sort = models.CharField(
|
||||
max_length=50, choices=CLIENT_TREE_SORT_CHOICES, default="alphafail"
|
||||
)
|
||||
client_tree_splitter = models.PositiveIntegerField(default=11)
|
||||
loading_bar_color = models.CharField(max_length=255, default="red")
|
||||
|
||||
agent = models.OneToOneField(
|
||||
"agents.Agent",
|
||||
|
||||
@@ -13,6 +13,8 @@ class UserUISerializer(ModelSerializer):
|
||||
"agent_dblclick_action",
|
||||
"default_agent_tbl_tab",
|
||||
"client_tree_sort",
|
||||
"client_tree_splitter",
|
||||
"loading_bar_color",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -278,6 +278,8 @@ class TestUserAction(TacticalTestCase):
|
||||
"agent_dblclick_action": "editagent",
|
||||
"default_agent_tbl_tab": "mixed",
|
||||
"client_tree_sort": "alpha",
|
||||
"client_tree_splitter": 14,
|
||||
"loading_bar_color": "green",
|
||||
}
|
||||
r = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Agent, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
|
||||
admin.site.register(Agent)
|
||||
admin.site.register(RecoveryAction)
|
||||
admin.site.register(Note)
|
||||
admin.site.register(AgentCustomField)
|
||||
|
||||
24
api/tacticalrmm/agents/migrations/0032_agentcustomfield.py
Normal file
24
api/tacticalrmm/agents/migrations/0032_agentcustomfield.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-17 14:45
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0014_customfield'),
|
||||
('agents', '0031_agent_alert_template'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AgentCustomField',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('value', models.TextField(blank=True, null=True)),
|
||||
('agent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='agents.agent')),
|
||||
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='agent_fields', to='core.customfield')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 02:51
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0032_agentcustomfield'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agentcustomfield',
|
||||
name='multiple_value',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 03:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0033_agentcustomfield_multiple_value'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agentcustomfield',
|
||||
name='checkbox_value',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/agents/migrations/0035_auto_20210329_1709.py
Normal file
23
api/tacticalrmm/agents/migrations/0035_auto_20210329_1709.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 17:09
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0034_agentcustomfield_checkbox_value'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='agentcustomfield',
|
||||
old_name='checkbox_value',
|
||||
new_name='bool_value',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='agentcustomfield',
|
||||
old_name='value',
|
||||
new_name='string_value',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-17 01:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agents', '0035_auto_20210329_1709'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='agent',
|
||||
name='block_policy_inheritance',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -4,7 +4,7 @@ import re
|
||||
import time
|
||||
from collections import Counter
|
||||
from distutils.version import LooseVersion
|
||||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
import msgpack
|
||||
import validators
|
||||
@@ -13,12 +13,12 @@ from Crypto.Hash import SHA3_384
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Util.Padding import pad
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from nats.aio.client import Client as NATS
|
||||
from nats.aio.errors import ErrTimeout
|
||||
from packaging import version as pyver
|
||||
|
||||
from core.models import TZ_CHOICES, CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
@@ -63,6 +63,7 @@ class Agent(BaseAuditModel):
|
||||
max_length=255, choices=TZ_CHOICES, null=True, blank=True
|
||||
)
|
||||
maintenance_mode = models.BooleanField(default=False)
|
||||
block_policy_inheritance = models.BooleanField(default=False)
|
||||
alert_template = models.ForeignKey(
|
||||
"alerts.AlertTemplate",
|
||||
related_name="agents",
|
||||
@@ -96,9 +97,9 @@ class Agent(BaseAuditModel):
|
||||
# or if site has changed on agent and if so generate-policies
|
||||
if (
|
||||
not old_agent
|
||||
or old_agent
|
||||
and old_agent.policy != self.policy
|
||||
or old_agent.site != self.site
|
||||
or (old_agent and old_agent.policy != self.policy)
|
||||
or (old_agent.site != self.site)
|
||||
or (old_agent.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
self.generate_checks_from_policies()
|
||||
self.generate_tasks_from_policies()
|
||||
@@ -110,14 +111,6 @@ class Agent(BaseAuditModel):
|
||||
def client(self):
|
||||
return self.site.client
|
||||
|
||||
@property
|
||||
def has_nats(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.0")
|
||||
|
||||
@property
|
||||
def has_gotasks(self):
|
||||
return pyver.parse(self.version) >= pyver.parse("1.1.1")
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
# return the default timezone unless the timezone is explicity set per agent
|
||||
@@ -174,7 +167,7 @@ class Agent(BaseAuditModel):
|
||||
|
||||
@property
|
||||
def checks(self):
|
||||
total, passing, failing = 0, 0, 0
|
||||
total, passing, failing, warning, info = 0, 0, 0, 0, 0
|
||||
|
||||
if self.agentchecks.exists(): # type: ignore
|
||||
for i in self.agentchecks.all(): # type: ignore
|
||||
@@ -182,13 +175,20 @@ class Agent(BaseAuditModel):
|
||||
if i.status == "passing":
|
||||
passing += 1
|
||||
elif i.status == "failing":
|
||||
failing += 1
|
||||
if i.alert_severity == "error":
|
||||
failing += 1
|
||||
elif i.alert_severity == "warning":
|
||||
warning += 1
|
||||
elif i.alert_severity == "info":
|
||||
info += 1
|
||||
|
||||
ret = {
|
||||
"total": total,
|
||||
"passing": passing,
|
||||
"failing": failing,
|
||||
"has_failing_checks": failing > 0,
|
||||
"warning": warning,
|
||||
"info": info,
|
||||
"has_failing_checks": failing > 0 or warning > 0,
|
||||
}
|
||||
return ret
|
||||
|
||||
@@ -203,6 +203,27 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
return ["unknown cpu model"]
|
||||
|
||||
@property
|
||||
def graphics(self):
|
||||
ret, mrda = [], []
|
||||
try:
|
||||
graphics = self.wmi_detail["graphics"]
|
||||
for i in graphics:
|
||||
caption = [x["Caption"] for x in i if "Caption" in x][0]
|
||||
if "microsoft remote display adapter" in caption.lower():
|
||||
mrda.append("yes")
|
||||
continue
|
||||
|
||||
ret.append([x["Caption"] for x in i if "Caption" in x][0])
|
||||
|
||||
# only return this if no other graphics cards
|
||||
if not ret and mrda:
|
||||
return "Microsoft Remote Display Adapter"
|
||||
|
||||
return ", ".join(ret)
|
||||
except:
|
||||
return "Graphics info requires agent v1.4.14"
|
||||
|
||||
@property
|
||||
def local_ips(self):
|
||||
ret = []
|
||||
@@ -304,10 +325,13 @@ class Agent(BaseAuditModel):
|
||||
from scripts.models import Script
|
||||
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
|
||||
parsed_args = script.parse_script_args(self, script.shell, args)
|
||||
|
||||
data = {
|
||||
"func": "runscriptfull" if full else "runscript",
|
||||
"timeout": timeout,
|
||||
"script_args": args,
|
||||
"script_args": parsed_args,
|
||||
"payload": {
|
||||
"code": script.code,
|
||||
"shell": script.shell,
|
||||
@@ -327,7 +351,7 @@ class Agent(BaseAuditModel):
|
||||
online = [
|
||||
agent
|
||||
for agent in Agent.objects.only(
|
||||
"pk", "last_seen", "overdue_time", "offline_time"
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
if agent.status == "online"
|
||||
]
|
||||
@@ -398,21 +422,34 @@ class Agent(BaseAuditModel):
|
||||
|
||||
# check site policy if agent policy doesn't have one
|
||||
elif site.server_policy and site.server_policy.winupdatepolicy.exists():
|
||||
patch_policy = site.server_policy.winupdatepolicy.get()
|
||||
# make sure agent isn;t blocking policy inheritance
|
||||
if not self.block_policy_inheritance:
|
||||
patch_policy = site.server_policy.winupdatepolicy.get()
|
||||
|
||||
# if site doesn't have a patch policy check the client
|
||||
elif (
|
||||
site.client.server_policy
|
||||
and site.client.server_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = site.client.server_policy.winupdatepolicy.get()
|
||||
# make sure agent and site are not blocking inheritance
|
||||
if (
|
||||
not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
patch_policy = site.client.server_policy.winupdatepolicy.get()
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.server_policy
|
||||
and core_settings.server_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = core_settings.server_policy.winupdatepolicy.get()
|
||||
|
||||
elif self.monitoring_type == "workstation":
|
||||
# check agent policy first which should override client or site policy
|
||||
@@ -423,21 +460,36 @@ class Agent(BaseAuditModel):
|
||||
site.workstation_policy
|
||||
and site.workstation_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = site.workstation_policy.winupdatepolicy.get()
|
||||
# make sure agent isn;t blocking policy inheritance
|
||||
if not self.block_policy_inheritance:
|
||||
patch_policy = site.workstation_policy.winupdatepolicy.get()
|
||||
|
||||
# if site doesn't have a patch policy check the client
|
||||
elif (
|
||||
site.client.workstation_policy
|
||||
and site.client.workstation_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = site.client.workstation_policy.winupdatepolicy.get()
|
||||
# make sure agent and site are not blocking inheritance
|
||||
if (
|
||||
not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
patch_policy = site.client.workstation_policy.winupdatepolicy.get()
|
||||
|
||||
# if patch policy still doesn't exist check default policy
|
||||
elif (
|
||||
core_settings.workstation_policy
|
||||
and core_settings.workstation_policy.winupdatepolicy.exists()
|
||||
):
|
||||
patch_policy = core_settings.workstation_policy.winupdatepolicy.get()
|
||||
# make sure agent site and client are not blocking inheritance
|
||||
if (
|
||||
not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not site.client.block_policy_inheritance
|
||||
):
|
||||
patch_policy = (
|
||||
core_settings.workstation_policy.winupdatepolicy.get()
|
||||
)
|
||||
|
||||
# if policy still doesn't exist return the agent patch policy
|
||||
if not patch_policy:
|
||||
@@ -504,6 +556,7 @@ class Agent(BaseAuditModel):
|
||||
and site.server_policy
|
||||
and site.server_policy.alert_template
|
||||
and site.server_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
):
|
||||
templates.append(site.server_policy.alert_template)
|
||||
if (
|
||||
@@ -511,6 +564,7 @@ class Agent(BaseAuditModel):
|
||||
and site.workstation_policy
|
||||
and site.workstation_policy.alert_template
|
||||
and site.workstation_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
):
|
||||
templates.append(site.workstation_policy.alert_template)
|
||||
|
||||
@@ -524,6 +578,8 @@ class Agent(BaseAuditModel):
|
||||
and client.server_policy
|
||||
and client.server_policy.alert_template
|
||||
and client.server_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
templates.append(client.server_policy.alert_template)
|
||||
if (
|
||||
@@ -531,15 +587,28 @@ class Agent(BaseAuditModel):
|
||||
and client.workstation_policy
|
||||
and client.workstation_policy.alert_template
|
||||
and client.workstation_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
templates.append(client.workstation_policy.alert_template)
|
||||
|
||||
# check if alert template is on client and return
|
||||
if client.alert_template and client.alert_template.is_active:
|
||||
if (
|
||||
client.alert_template
|
||||
and client.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
):
|
||||
templates.append(client.alert_template)
|
||||
|
||||
# check if alert template is applied globally and return
|
||||
if core.alert_template and core.alert_template.is_active:
|
||||
if (
|
||||
core.alert_template
|
||||
and core.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.alert_template)
|
||||
|
||||
# if agent is a workstation, check if policy with alert template is assigned to the site, client, or core
|
||||
@@ -548,6 +617,9 @@ class Agent(BaseAuditModel):
|
||||
and core.server_policy
|
||||
and core.server_policy.alert_template
|
||||
and core.server_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.server_policy.alert_template)
|
||||
if (
|
||||
@@ -555,6 +627,9 @@ class Agent(BaseAuditModel):
|
||||
and core.workstation_policy
|
||||
and core.workstation_policy.alert_template
|
||||
and core.workstation_policy.alert_template.is_active
|
||||
and not self.block_policy_inheritance
|
||||
and not site.block_policy_inheritance
|
||||
and not client.block_policy_inheritance
|
||||
):
|
||||
templates.append(core.workstation_policy.alert_template)
|
||||
|
||||
@@ -656,7 +731,11 @@ class Agent(BaseAuditModel):
|
||||
except ErrTimeout:
|
||||
ret = "timeout"
|
||||
else:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
try:
|
||||
ret = msgpack.loads(msg.data) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
ret = str(e)
|
||||
|
||||
await nc.close()
|
||||
return ret
|
||||
@@ -704,36 +783,6 @@ class Agent(BaseAuditModel):
|
||||
except:
|
||||
pass
|
||||
|
||||
# define how the agent should handle pending actions
|
||||
def handle_pending_actions(self):
|
||||
pending_actions = self.pendingactions.filter(status="pending") # type: ignore
|
||||
|
||||
for action in pending_actions:
|
||||
if action.action_type == "taskaction":
|
||||
from autotasks.tasks import (
|
||||
create_win_task_schedule,
|
||||
delete_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
|
||||
task_id = action.details["task_id"]
|
||||
|
||||
if action.details["action"] == "taskcreate":
|
||||
create_win_task_schedule.delay(task_id, pending_action=action.id)
|
||||
elif action.details["action"] == "tasktoggle":
|
||||
enable_or_disable_win_task.delay(
|
||||
task_id, action.details["value"], pending_action=action.id
|
||||
)
|
||||
elif action.details["action"] == "taskdelete":
|
||||
delete_win_task_schedule.delay(task_id, pending_action=action.id)
|
||||
|
||||
# for clearing duplicate pending actions on agent
|
||||
def remove_matching_pending_task_actions(self, task_id):
|
||||
# remove any other pending actions on agent with same task_id
|
||||
for action in self.pendingactions.filter(action_type="taskaction").exclude(status="completed"): # type: ignore
|
||||
if action.details["task_id"] == task_id:
|
||||
action.delete()
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
self.overdue_dashboard_alert
|
||||
@@ -820,12 +869,6 @@ class RecoveryAction(models.Model):
|
||||
def __str__(self):
|
||||
return f"{self.agent.hostname} - {self.mode}"
|
||||
|
||||
def send(self):
|
||||
ret = {"recovery": self.mode}
|
||||
if self.mode == "command":
|
||||
ret["cmd"] = self.command
|
||||
return ret
|
||||
|
||||
|
||||
class Note(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
@@ -845,3 +888,38 @@ class Note(models.Model):
|
||||
|
||||
def __str__(self):
|
||||
return self.agent.hostname
|
||||
|
||||
|
||||
class AgentCustomField(models.Model):
|
||||
agent = models.ForeignKey(
|
||||
Agent,
|
||||
related_name="custom_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
related_name="agent_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
string_value = models.TextField(null=True, blank=True)
|
||||
bool_value = models.BooleanField(blank=True, default=False)
|
||||
multiple_value = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.field
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
if self.field.type == "multiple":
|
||||
return self.multiple_value
|
||||
elif self.field.type == "checkbox":
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
@@ -4,7 +4,7 @@ from rest_framework import serializers
|
||||
from clients.serializers import ClientSerializer
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent, Note
|
||||
from .models import Agent, AgentCustomField, Note
|
||||
|
||||
|
||||
class AgentSerializer(serializers.ModelSerializer):
|
||||
@@ -16,6 +16,7 @@ class AgentSerializer(serializers.ModelSerializer):
|
||||
local_ips = serializers.ReadOnlyField()
|
||||
make_model = serializers.ReadOnlyField()
|
||||
physical_disks = serializers.ReadOnlyField()
|
||||
graphics = serializers.ReadOnlyField()
|
||||
checks = serializers.ReadOnlyField()
|
||||
timezone = serializers.ReadOnlyField()
|
||||
all_timezones = serializers.SerializerMethodField()
|
||||
@@ -115,14 +116,35 @@ class AgentTableSerializer(serializers.ModelSerializer):
|
||||
"logged_username",
|
||||
"italic",
|
||||
"policy",
|
||||
"block_policy_inheritance",
|
||||
]
|
||||
depth = 2
|
||||
|
||||
|
||||
class AgentCustomFieldSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = AgentCustomField
|
||||
fields = (
|
||||
"id",
|
||||
"field",
|
||||
"agent",
|
||||
"value",
|
||||
"string_value",
|
||||
"bool_value",
|
||||
"multiple_value",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"string_value": {"write_only": True},
|
||||
"bool_value": {"write_only": True},
|
||||
"multiple_value": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class AgentEditSerializer(serializers.ModelSerializer):
|
||||
winupdatepolicy = WinUpdatePolicySerializer(many=True, read_only=True)
|
||||
all_timezones = serializers.SerializerMethodField()
|
||||
client = ClientSerializer(read_only=True)
|
||||
custom_fields = AgentCustomFieldSerializer(many=True, read_only=True)
|
||||
|
||||
def get_all_timezones(self, obj):
|
||||
return pytz.all_timezones
|
||||
@@ -146,6 +168,7 @@ class AgentEditSerializer(serializers.ModelSerializer):
|
||||
"all_timezones",
|
||||
"winupdatepolicy",
|
||||
"policy",
|
||||
"custom_fields",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import random
|
||||
import urllib.parse
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
@@ -10,21 +11,21 @@ from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from core.models import CodeSignToken, CoreSettings
|
||||
from logs.models import PendingAction
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.celery import app
|
||||
from tacticalrmm.utils import run_nats_api_cmd
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
def agent_update(pk: int) -> str:
|
||||
def agent_update(pk: int, codesigntoken: str = None) -> str:
|
||||
from agents.utils import get_exegen_url
|
||||
|
||||
agent = Agent.objects.get(pk=pk)
|
||||
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.1.11"):
|
||||
logger.warning(
|
||||
f"{agent.hostname} v{agent.version} is running an unsupported version. Refusing to auto update."
|
||||
)
|
||||
if pyver.parse(agent.version) <= pyver.parse("1.3.0"):
|
||||
return "not supported"
|
||||
|
||||
# skip if we can't determine the arch
|
||||
@@ -34,18 +35,15 @@ def agent_update(pk: int) -> str:
|
||||
)
|
||||
return "noarch"
|
||||
|
||||
# removed sqlite in 1.4.0 to get rid of cgo dependency
|
||||
# 1.3.0 has migration func to move from sqlite to win registry, so force an upgrade to 1.3.0 if old agent
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.3.0"):
|
||||
version = settings.LATEST_AGENT_VER
|
||||
url = agent.winagent_dl
|
||||
inno = agent.win_inno_exe
|
||||
version = settings.LATEST_AGENT_VER
|
||||
inno = agent.win_inno_exe
|
||||
|
||||
if codesigntoken is not None and pyver.parse(version) >= pyver.parse("1.5.0"):
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {"version": version, "arch": agent.arch, "token": codesigntoken}
|
||||
url = base_url + urllib.parse.urlencode(params)
|
||||
else:
|
||||
version = "1.3.0"
|
||||
inno = (
|
||||
"winagent-v1.3.0.exe" if agent.arch == "64" else "winagent-v1.3.0-x86.exe"
|
||||
)
|
||||
url = f"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/{inno}"
|
||||
url = agent.winagent_dl
|
||||
|
||||
if agent.pendingactions.filter(
|
||||
action_type="agentupdate", status="pending"
|
||||
@@ -78,10 +76,15 @@ def agent_update(pk: int) -> str:
|
||||
|
||||
@app.task
|
||||
def send_agent_update_task(pks: list[int]) -> None:
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk)
|
||||
agent_update(pk, codesigntoken)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
@@ -92,6 +95,11 @@ def auto_self_agent_update_task() -> None:
|
||||
if not core.agent_auto_update:
|
||||
return
|
||||
|
||||
try:
|
||||
codesigntoken = CodeSignToken.objects.first().token
|
||||
except:
|
||||
codesigntoken = None
|
||||
|
||||
q = Agent.objects.only("pk", "version")
|
||||
pks: list[int] = [
|
||||
i.pk
|
||||
@@ -102,7 +110,7 @@ def auto_self_agent_update_task() -> None:
|
||||
chunks = (pks[i : i + 30] for i in range(0, len(pks), 30))
|
||||
for chunk in chunks:
|
||||
for pk in chunk:
|
||||
agent_update(pk)
|
||||
agent_update(pk, codesigntoken)
|
||||
sleep(0.05)
|
||||
sleep(4)
|
||||
|
||||
@@ -252,3 +260,21 @@ def run_script_email_results_task(
|
||||
server.quit()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
|
||||
@app.task
|
||||
def monitor_agents_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ids = [i.agent_id for i in agents if i.status != "online"]
|
||||
run_nats_api_cmd("monitor", ids)
|
||||
|
||||
|
||||
@app.task
|
||||
def get_wmi_task() -> None:
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
ids = [i.agent_id for i in agents if i.status == "online"]
|
||||
run_nats_api_cmd("wmi", ids)
|
||||
|
||||
@@ -12,7 +12,7 @@ from tacticalrmm.test import TacticalTestCase
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
|
||||
from .models import Agent
|
||||
from .models import Agent, AgentCustomField
|
||||
from .serializers import AgentSerializer
|
||||
from .tasks import auto_self_agent_update_task
|
||||
|
||||
@@ -198,11 +198,6 @@ class TestAgentViews(TacticalTestCase):
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_get_processes(self, mock_ret):
|
||||
agent_old = baker.make_recipe("agents.online_agent", version="1.1.12")
|
||||
url_old = f"/agents/{agent_old.pk}/getprocs/"
|
||||
r = self.client.get(url_old)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
agent = baker.make_recipe("agents.online_agent", version="1.2.0")
|
||||
url = f"/agents/{agent.pk}/getprocs/"
|
||||
|
||||
@@ -340,6 +335,7 @@ class TestAgentViews(TacticalTestCase):
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"deleteafter": True,
|
||||
"trigger": "once",
|
||||
"name": r.data["task_name"], # type: ignore
|
||||
"year": 2025,
|
||||
@@ -367,9 +363,8 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("os.path.exists")
|
||||
@patch("subprocess.run")
|
||||
def test_install_agent(self, mock_subprocess, mock_file_exists):
|
||||
url = f"/agents/installagent/"
|
||||
def test_install_agent(self, mock_file_exists):
|
||||
url = "/agents/installagent/"
|
||||
|
||||
site = baker.make("clients.Site")
|
||||
data = {
|
||||
@@ -377,38 +372,29 @@ class TestAgentViews(TacticalTestCase):
|
||||
"site": site.id, # type: ignore
|
||||
"arch": "64",
|
||||
"expires": 23,
|
||||
"installMethod": "exe",
|
||||
"installMethod": "manual",
|
||||
"api": "https://api.example.com",
|
||||
"agenttype": "server",
|
||||
"rdp": 1,
|
||||
"ping": 0,
|
||||
"power": 0,
|
||||
"fileName": "rmm-client-site-server.exe",
|
||||
}
|
||||
|
||||
mock_file_exists.return_value = False
|
||||
mock_subprocess.return_value.returncode = 0
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 406)
|
||||
|
||||
mock_file_exists.return_value = True
|
||||
mock_subprocess.return_value.returncode = 1
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 413)
|
||||
|
||||
mock_file_exists.return_value = True
|
||||
mock_subprocess.return_value.returncode = 0
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
data["arch"] = "32"
|
||||
mock_subprocess.return_value.returncode = 0
|
||||
mock_file_exists.return_value = False
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 415)
|
||||
|
||||
data["installMethod"] = "manual"
|
||||
data["arch"] = "64"
|
||||
mock_subprocess.return_value.returncode = 0
|
||||
mock_file_exists.return_value = True
|
||||
r = self.client.post(url, data, format="json")
|
||||
self.assertIn("rdp", r.json()["cmd"])
|
||||
@@ -419,6 +405,9 @@ class TestAgentViews(TacticalTestCase):
|
||||
self.assertIn("power", r.json()["cmd"])
|
||||
self.assertIn("ping", r.json()["cmd"])
|
||||
|
||||
data["installMethod"] = "powershell"
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
@@ -538,6 +527,35 @@ class TestAgentViews(TacticalTestCase):
|
||||
data = WinUpdatePolicySerializer(policy).data
|
||||
self.assertEqual(data["run_time_days"], [2, 3, 6])
|
||||
|
||||
# test adding custom fields
|
||||
field = baker.make("core.CustomField", model="agent", type="number")
|
||||
edit = {
|
||||
"id": self.agent.pk,
|
||||
"site": site.id, # type: ignore
|
||||
"description": "asjdk234andasd",
|
||||
"custom_fields": [{"field": field.id, "string_value": "123"}], # type: ignore
|
||||
}
|
||||
|
||||
r = self.client.patch(url, edit, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(
|
||||
AgentCustomField.objects.filter(agent=self.agent, field=field).exists()
|
||||
)
|
||||
|
||||
# test edit custom field
|
||||
edit = {
|
||||
"id": self.agent.pk,
|
||||
"site": site.id, # type: ignore
|
||||
"description": "asjdk234andasd",
|
||||
"custom_fields": [{"field": field.id, "string_value": "456"}], # type: ignore
|
||||
}
|
||||
|
||||
r = self.client.patch(url, edit, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
AgentCustomField.objects.get(agent=agent, field=field).value,
|
||||
"456",
|
||||
)
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("agents.models.Agent.get_login_token")
|
||||
@@ -825,7 +843,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_agent_counts(self):
|
||||
""" def test_agent_counts(self):
|
||||
url = "/agents/agent_counts/"
|
||||
|
||||
# create some data
|
||||
@@ -852,7 +870,7 @@ class TestAgentViewsNew(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
self.check_not_authenticated("post", url) """
|
||||
|
||||
def test_agent_maintenance_mode(self):
|
||||
url = "/agents/maintenance/"
|
||||
@@ -896,8 +914,9 @@ class TestAgentTasks(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("agents.utils.get_exegen_url")
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_agent_update(self, nats_cmd):
|
||||
def test_agent_update(self, nats_cmd, get_exe):
|
||||
from agents.tasks import agent_update
|
||||
|
||||
agent_noarch = baker.make_recipe(
|
||||
@@ -908,63 +927,96 @@ class TestAgentTasks(TacticalTestCase):
|
||||
r = agent_update(agent_noarch.pk)
|
||||
self.assertEqual(r, "noarch")
|
||||
|
||||
agent_1111 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.11",
|
||||
)
|
||||
r = agent_update(agent_1111.pk)
|
||||
self.assertEqual(r, "not supported")
|
||||
|
||||
agent64_1112 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.1.12",
|
||||
)
|
||||
|
||||
r = agent_update(agent64_1112.pk)
|
||||
self.assertEqual(r, "created")
|
||||
action = PendingAction.objects.get(agent__pk=agent64_1112.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
self.assertEqual(
|
||||
action.details["url"],
|
||||
"https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
)
|
||||
self.assertEqual(action.details["inno"], "winagent-v1.3.0.exe")
|
||||
self.assertEqual(action.details["version"], "1.3.0")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": "https://github.com/wh1te909/rmmagent/releases/download/v1.3.0/winagent-v1.3.0.exe",
|
||||
"version": "1.3.0",
|
||||
"inno": "winagent-v1.3.0.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
|
||||
agent_64_130 = baker.make_recipe(
|
||||
agent_130 = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.3.0",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
r = agent_update(agent_64_130.pk)
|
||||
r = agent_update(agent_130.pk)
|
||||
self.assertEqual(r, "not supported")
|
||||
|
||||
# test __without__ code signing
|
||||
agent64_nosign = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.4.14",
|
||||
)
|
||||
|
||||
r = agent_update(agent64_nosign.pk, None)
|
||||
self.assertEqual(r, "created")
|
||||
action = PendingAction.objects.get(agent__pk=agent64_nosign.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
self.assertEqual(
|
||||
action.details["url"],
|
||||
f"https://github.com/wh1te909/rmmagent/releases/download/v{settings.LATEST_AGENT_VER}/winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
)
|
||||
self.assertEqual(
|
||||
action.details["inno"], f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||
)
|
||||
self.assertEqual(action.details["version"], settings.LATEST_AGENT_VER)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": settings.DL_64,
|
||||
"url": f"https://github.com/wh1te909/rmmagent/releases/download/v{settings.LATEST_AGENT_VER}/winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
action = PendingAction.objects.get(agent__pk=agent_64_130.pk)
|
||||
|
||||
# test __with__ code signing (64 bit)
|
||||
codesign = baker.make("core.CodeSignToken", token="testtoken123")
|
||||
agent64_sign = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 64 bit (build 19041.450)",
|
||||
version="1.4.14",
|
||||
)
|
||||
|
||||
nats_cmd.return_value = "ok"
|
||||
get_exe.return_value = "https://exe.tacticalrmm.io"
|
||||
r = agent_update(agent64_sign.pk, codesign.token) # type: ignore
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=64&token=testtoken123", # type: ignore
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
action = PendingAction.objects.get(agent__pk=agent64_sign.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
# test __with__ code signing (32 bit)
|
||||
agent32_sign = baker.make_recipe(
|
||||
"agents.agent",
|
||||
operating_system="Windows 10 Pro, 32 bit (build 19041.450)",
|
||||
version="1.4.14",
|
||||
)
|
||||
|
||||
nats_cmd.return_value = "ok"
|
||||
get_exe.return_value = "https://exe.tacticalrmm.io"
|
||||
r = agent_update(agent32_sign.pk, codesign.token) # type: ignore
|
||||
self.assertEqual(r, "created")
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "agentupdate",
|
||||
"payload": {
|
||||
"url": f"https://exe.tacticalrmm.io/api/v1/winagents/?version={settings.LATEST_AGENT_VER}&arch=32&token=testtoken123", # type: ignore
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"inno": f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe",
|
||||
},
|
||||
},
|
||||
wait=False,
|
||||
)
|
||||
action = PendingAction.objects.get(agent__pk=agent32_sign.pk)
|
||||
self.assertEqual(action.action_type, "agentupdate")
|
||||
self.assertEqual(action.status, "pending")
|
||||
|
||||
|
||||
@@ -27,7 +27,6 @@ urlpatterns = [
|
||||
path("<int:pk>/notes/", views.GetAddNotes.as_view()),
|
||||
path("<int:pk>/note/", views.GetEditDeleteNote.as_view()),
|
||||
path("bulk/", views.bulk),
|
||||
path("agent_counts/", views.agent_counts),
|
||||
path("maintenance/", views.agent_maintenance),
|
||||
path("<int:pk>/wmi/", views.WMI.as_view()),
|
||||
]
|
||||
|
||||
37
api/tacticalrmm/agents/utils.py
Normal file
37
api/tacticalrmm/agents/utils.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import random
|
||||
import urllib.parse
|
||||
|
||||
import requests
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
def get_exegen_url() -> str:
|
||||
urls: list[str] = settings.EXE_GEN_URLS
|
||||
for url in urls:
|
||||
try:
|
||||
r = requests.get(url, timeout=10)
|
||||
except:
|
||||
continue
|
||||
|
||||
if r.status_code == 200:
|
||||
return url
|
||||
|
||||
return random.choice(urls)
|
||||
|
||||
|
||||
def get_winagent_url(arch: str) -> str:
|
||||
from core.models import CodeSignToken
|
||||
|
||||
try:
|
||||
codetoken = CodeSignToken.objects.first().token
|
||||
base_url = get_exegen_url() + "/api/v1/winagents/?"
|
||||
params = {
|
||||
"version": settings.LATEST_AGENT_VER,
|
||||
"arch": arch,
|
||||
"token": codetoken,
|
||||
}
|
||||
dl_url = base_url + urllib.parse.urlencode(params)
|
||||
except:
|
||||
dl_url = settings.DL_64 if arch == "64" else settings.DL_32
|
||||
|
||||
return dl_url
|
||||
@@ -18,17 +18,13 @@ from core.models import CoreSettings
|
||||
from logs.models import AuditLog, PendingAction
|
||||
from scripts.models import Script
|
||||
from scripts.tasks import handle_bulk_command_task, handle_bulk_script_task
|
||||
from tacticalrmm.utils import (
|
||||
generate_installer_exe,
|
||||
get_default_timezone,
|
||||
notify_error,
|
||||
reload_nats,
|
||||
)
|
||||
from tacticalrmm.utils import get_default_timezone, notify_error, reload_nats
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
from winupdate.tasks import bulk_check_for_updates_task, bulk_install_updates_task
|
||||
|
||||
from .models import Agent, Note, RecoveryAction
|
||||
from .models import Agent, AgentCustomField, Note, RecoveryAction
|
||||
from .serializers import (
|
||||
AgentCustomFieldSerializer,
|
||||
AgentEditSerializer,
|
||||
AgentHostnameSerializer,
|
||||
AgentOverdueActionSerializer,
|
||||
@@ -44,7 +40,7 @@ logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
@api_view()
|
||||
def get_agent_versions(request):
|
||||
agents = Agent.objects.only("pk")
|
||||
agents = Agent.objects.prefetch_related("site").only("pk", "hostname")
|
||||
return Response(
|
||||
{
|
||||
"versions": [settings.LATEST_AGENT_VER],
|
||||
@@ -69,10 +65,9 @@ def update_agents(request):
|
||||
def ping(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
status = "offline"
|
||||
if agent.has_nats:
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
r = asyncio.run(agent.nats_cmd({"func": "ping"}, timeout=5))
|
||||
if r == "pong":
|
||||
status = "online"
|
||||
|
||||
return Response({"name": agent.hostname, "status": status})
|
||||
|
||||
@@ -80,8 +75,7 @@ def ping(request, pk):
|
||||
@api_view(["DELETE"])
|
||||
def uninstall(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if agent.has_nats:
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
asyncio.run(agent.nats_cmd({"func": "uninstall"}, wait=False))
|
||||
|
||||
name = agent.hostname
|
||||
agent.delete()
|
||||
@@ -89,7 +83,7 @@ def uninstall(request):
|
||||
return Response(f"{name} will now be uninstalled.")
|
||||
|
||||
|
||||
@api_view(["PATCH"])
|
||||
@api_view(["PATCH", "PUT"])
|
||||
def edit_agent(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["id"])
|
||||
|
||||
@@ -105,6 +99,29 @@ def edit_agent(request):
|
||||
p_serializer.is_valid(raise_exception=True)
|
||||
p_serializer.save()
|
||||
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["agent"] = agent.id # type: ignore
|
||||
|
||||
if AgentCustomField.objects.filter(
|
||||
field=field["field"], agent=agent.id # type: ignore
|
||||
):
|
||||
value = AgentCustomField.objects.get(
|
||||
field=field["field"], agent=agent.id # type: ignore
|
||||
)
|
||||
serializer = AgentCustomFieldSerializer(
|
||||
instance=value, data=custom_field
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
else:
|
||||
serializer = AgentCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
@@ -147,9 +164,6 @@ def agent_detail(request, pk):
|
||||
@api_view()
|
||||
def get_processes(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.2.0"):
|
||||
return notify_error("Requires agent version 1.2.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(data={"func": "procs"}, timeout=5))
|
||||
if r == "timeout":
|
||||
return notify_error("Unable to contact the agent")
|
||||
@@ -159,9 +173,6 @@ def get_processes(request, pk):
|
||||
@api_view()
|
||||
def kill_proc(request, pk, pid):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(
|
||||
agent.nats_cmd({"func": "killproc", "procpid": int(pid)}, timeout=15)
|
||||
)
|
||||
@@ -177,8 +188,6 @@ def kill_proc(request, pk, pid):
|
||||
@api_view()
|
||||
def get_event_log(request, pk, logtype, days):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = 180 if logtype == "Security" else 30
|
||||
data = {
|
||||
"func": "eventlog",
|
||||
@@ -198,8 +207,6 @@ def get_event_log(request, pk, logtype, days):
|
||||
@api_view(["POST"])
|
||||
def send_raw_cmd(request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
timeout = int(request.data["timeout"])
|
||||
data = {
|
||||
"func": "rawcmd",
|
||||
@@ -296,9 +303,6 @@ class Reboot(APIView):
|
||||
# reboot now
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "rebootnow"}, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
@@ -308,8 +312,6 @@ class Reboot(APIView):
|
||||
# reboot later
|
||||
def patch(self, request):
|
||||
agent = get_object_or_404(Agent, pk=request.data["pk"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
try:
|
||||
obj = dt.datetime.strptime(request.data["datetime"], "%Y-%m-%d %H:%M")
|
||||
@@ -324,6 +326,7 @@ class Reboot(APIView):
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "schedreboot",
|
||||
"deleteafter": True,
|
||||
"trigger": "once",
|
||||
"name": task_name,
|
||||
"year": int(dt.datetime.strftime(obj, "%Y")),
|
||||
@@ -334,9 +337,6 @@ class Reboot(APIView):
|
||||
},
|
||||
}
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.1.2"):
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
if r != "ok":
|
||||
return notify_error(r)
|
||||
@@ -355,6 +355,8 @@ class Reboot(APIView):
|
||||
def install_agent(request):
|
||||
from knox.models import AuthToken
|
||||
|
||||
from agents.utils import get_winagent_url
|
||||
|
||||
client_id = request.data["client"]
|
||||
site_id = request.data["site"]
|
||||
version = settings.LATEST_AGENT_VER
|
||||
@@ -375,26 +377,26 @@ def install_agent(request):
|
||||
inno = (
|
||||
f"winagent-v{version}.exe" if arch == "64" else f"winagent-v{version}-x86.exe"
|
||||
)
|
||||
download_url = settings.DL_64 if arch == "64" else settings.DL_32
|
||||
download_url = get_winagent_url(arch)
|
||||
|
||||
_, token = AuthToken.objects.create(
|
||||
user=request.user, expiry=dt.timedelta(hours=request.data["expires"])
|
||||
)
|
||||
|
||||
if request.data["installMethod"] == "exe":
|
||||
return generate_installer_exe(
|
||||
file_name="rmm-installer.exe",
|
||||
goarch="amd64" if arch == "64" else "386",
|
||||
inno=inno,
|
||||
api=request.data["api"],
|
||||
client_id=client_id,
|
||||
site_id=site_id,
|
||||
atype=request.data["agenttype"],
|
||||
from tacticalrmm.utils import generate_winagent_exe
|
||||
|
||||
return generate_winagent_exe(
|
||||
client=client_id,
|
||||
site=site_id,
|
||||
agent_type=request.data["agenttype"],
|
||||
rdp=request.data["rdp"],
|
||||
ping=request.data["ping"],
|
||||
power=request.data["power"],
|
||||
download_url=download_url,
|
||||
arch=arch,
|
||||
token=token,
|
||||
api=request.data["api"],
|
||||
file_name=request.data["fileName"],
|
||||
)
|
||||
|
||||
elif request.data["installMethod"] == "manual":
|
||||
@@ -561,9 +563,6 @@ def run_script(request):
|
||||
@api_view()
|
||||
def recover_mesh(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
data = {"func": "recover", "payload": {"mode": "mesh"}}
|
||||
r = asyncio.run(agent.nats_cmd(data, timeout=45))
|
||||
if r != "ok":
|
||||
@@ -674,49 +673,6 @@ def bulk(request):
|
||||
return notify_error("Something went wrong")
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def agent_counts(request):
|
||||
|
||||
server_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
workstation_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"total_server_count": Agent.objects.filter(
|
||||
monitoring_type="server"
|
||||
).count(),
|
||||
"total_server_offline_count": server_offline_count,
|
||||
"total_workstation_count": Agent.objects.filter(
|
||||
monitoring_type="workstation"
|
||||
).count(),
|
||||
"total_workstation_offline_count": workstation_offline_count,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@api_view(["POST"])
|
||||
def agent_maintenance(request):
|
||||
if request.data["type"] == "Client":
|
||||
@@ -743,9 +699,6 @@ def agent_maintenance(request):
|
||||
class WMI(APIView):
|
||||
def get(self, request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if pyver.parse(agent.version) < pyver.parse("1.1.2"):
|
||||
return notify_error("Requires agent version 1.1.2 or greater")
|
||||
|
||||
r = asyncio.run(agent.nats_cmd({"func": "sysinfo"}, timeout=20))
|
||||
if r != "ok":
|
||||
return notify_error("Unable to contact the agent")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Union
|
||||
|
||||
from django.conf import settings
|
||||
@@ -297,7 +298,7 @@ class Alert(models.Model):
|
||||
if alert_template and alert_template.action and not alert.action_run:
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.action.pk,
|
||||
args=alert_template.action_args,
|
||||
args=alert.parse_script_args(alert_template.action_args),
|
||||
timeout=alert_template.action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
@@ -406,7 +407,7 @@ class Alert(models.Model):
|
||||
):
|
||||
r = agent.run_script(
|
||||
scriptpk=alert_template.resolved_action.pk,
|
||||
args=alert_template.resolved_action_args,
|
||||
args=alert.parse_script_args(alert_template.resolved_action_args),
|
||||
timeout=alert_template.resolved_action_timeout,
|
||||
wait=True,
|
||||
full=True,
|
||||
@@ -428,6 +429,36 @@ class Alert(models.Model):
|
||||
f"Resolved action: {alert_template.action.name} failed to run on any agent for {agent.hostname} resolved alert"
|
||||
)
|
||||
|
||||
def parse_script_args(self, args: list[str]):
|
||||
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = list()
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{alert\\.(.*)\\}\\}.*")
|
||||
|
||||
for arg in args:
|
||||
match = pattern.match(arg)
|
||||
if match:
|
||||
name = match.group(1)
|
||||
|
||||
if hasattr(self, name):
|
||||
value = getattr(self, name)
|
||||
else:
|
||||
continue
|
||||
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", "'" + value + "'", arg)) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
|
||||
else:
|
||||
temp_args.append(arg)
|
||||
|
||||
return temp_args
|
||||
|
||||
|
||||
class AlertTemplate(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
@@ -5,6 +5,7 @@ from unittest.mock import patch
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
@@ -112,6 +113,23 @@ class TestAPIv3(TacticalTestCase):
|
||||
{"agent": self.agent.pk, "check_interval": 15},
|
||||
)
|
||||
|
||||
def test_run_checks(self):
|
||||
# force run all checks regardless of interval
|
||||
agent = baker.make_recipe("agents.online_agent")
|
||||
baker.make_recipe("checks.ping_check", agent=agent)
|
||||
baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
baker.make_recipe("checks.cpuload_check", agent=agent)
|
||||
baker.make_recipe("checks.memory_check", agent=agent)
|
||||
baker.make_recipe("checks.eventlog_check", agent=agent)
|
||||
for _ in range(10):
|
||||
baker.make_recipe("checks.script_check", agent=agent)
|
||||
|
||||
url = f"/api/v3/{agent.agent_id}/runchecks/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.json()["agent"], agent.pk)
|
||||
self.assertIsInstance(r.json()["check_interval"], int)
|
||||
self.assertEqual(len(r.json()["checks"]), 15)
|
||||
|
||||
def test_checkin_patch(self):
|
||||
from logs.models import PendingAction
|
||||
|
||||
@@ -186,3 +204,138 @@ class TestAPIv3(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json(), {"mode": "rpc", "shellcmd": ""})
|
||||
reload_nats.assert_called_once()
|
||||
|
||||
def test_task_runner_get(self):
|
||||
from autotasks.serializers import TaskGOGetSerializer
|
||||
|
||||
r = self.client.get("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
|
||||
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(TaskGOGetSerializer(task).data, r.data) # type: ignore
|
||||
|
||||
def test_task_runner_results(self):
|
||||
from agents.models import AgentCustomField
|
||||
|
||||
r = self.client.patch("/api/v3/500/asdf9df9dfdf/taskrunner/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
|
||||
url = f"/api/v3/{task.pk}/{agent.agent_id}/taskrunner/" # type: ignore
|
||||
|
||||
# test passing task
|
||||
data = {
|
||||
"stdout": "test test \ntestest stdgsd\n",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "passing") # type: ignore
|
||||
|
||||
# test failing task
|
||||
data = {
|
||||
"stdout": "test test \ntestest stdgsd\n",
|
||||
"stderr": "",
|
||||
"retcode": 1,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
|
||||
|
||||
# test collector task
|
||||
text = baker.make("core.CustomField", model="agent", type="text", name="Test")
|
||||
boolean = baker.make(
|
||||
"core.CustomField", model="agent", type="checkbox", name="Test1"
|
||||
)
|
||||
multiple = baker.make(
|
||||
"core.CustomField", model="agent", type="multiple", name="Test2"
|
||||
)
|
||||
|
||||
# test text fields
|
||||
task.custom_field = text # type: ignore
|
||||
task.save() # type: ignore
|
||||
|
||||
# test failing failing with stderr
|
||||
data = {
|
||||
"stdout": "test test \nthe last line",
|
||||
"stderr": "This is an error",
|
||||
"retcode": 1,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(AutomatedTask.objects.get(pk=task.pk).status == "failing") # type: ignore
|
||||
|
||||
# test saving to text field
|
||||
data = {
|
||||
"stdout": "test test \nthe last line",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||
self.assertEqual(AgentCustomField.objects.get(field=text, agent=task.agent).value, "the last line") # type: ignore
|
||||
|
||||
# test saving to checkbox field
|
||||
task.custom_field = boolean # type: ignore
|
||||
task.save() # type: ignore
|
||||
|
||||
data = {
|
||||
"stdout": "1",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||
self.assertTrue(AgentCustomField.objects.get(field=boolean, agent=task.agent).value) # type: ignore
|
||||
|
||||
# test saving to multiple field with commas
|
||||
task.custom_field = multiple # type: ignore
|
||||
task.save() # type: ignore
|
||||
|
||||
data = {
|
||||
"stdout": "this,is,an,array",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this", "is", "an", "array"]) # type: ignore
|
||||
|
||||
# test mutiple with a single value
|
||||
data = {
|
||||
"stdout": "this",
|
||||
"stderr": "",
|
||||
"retcode": 0,
|
||||
"execution_time": 3.560,
|
||||
}
|
||||
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(AutomatedTask.objects.get(pk=task.pk).status, "passing") # type: ignore
|
||||
self.assertEqual(AgentCustomField.objects.get(field=multiple, agent=task.agent).value, ["this"]) # type: ignore
|
||||
|
||||
@@ -5,6 +5,7 @@ from . import views
|
||||
urlpatterns = [
|
||||
path("checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
|
||||
path("<str:agentid>/runchecks/", views.RunChecks.as_view()),
|
||||
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
|
||||
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
|
||||
path("meshexe/", views.MeshExe.as_view()),
|
||||
|
||||
@@ -15,7 +15,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from accounts.models import User
|
||||
from agents.models import Agent
|
||||
from agents.models import Agent, AgentCustomField
|
||||
from agents.serializers import WinAgentSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.serializers import TaskGOGetSerializer, TaskRunnerPatchSerializer
|
||||
@@ -65,9 +65,17 @@ class CheckIn(APIView):
|
||||
if Alert.objects.filter(agent=agent, resolved=False).exists():
|
||||
Alert.handle_alert_resolve(agent)
|
||||
|
||||
# get any pending actions
|
||||
if agent.pendingactions.filter(status="pending").exists(): # type: ignore
|
||||
agent.handle_pending_actions()
|
||||
# sync scheduled tasks
|
||||
if agent.autotasks.exclude(sync_status="synced").exists(): # type: ignore
|
||||
tasks = agent.autotasks.exclude(sync_status="synced") # type: ignore
|
||||
|
||||
for task in tasks:
|
||||
if task.sync_status == "pendingdeletion":
|
||||
task.delete_task_on_agent()
|
||||
elif task.sync_status == "initial":
|
||||
task.modify_task_on_agent()
|
||||
elif task.sync_status == "notsynced":
|
||||
task.create_task_on_agent()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -260,6 +268,21 @@ class SupersededWinUpdate(APIView):
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class RunChecks(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, agentid):
|
||||
agent = get_object_or_404(Agent, agent_id=agentid)
|
||||
checks = Check.objects.filter(agent__pk=agent.pk, overriden_by_policy=False)
|
||||
ret = {
|
||||
"agent": agent.pk,
|
||||
"check_interval": agent.check_interval,
|
||||
"checks": CheckRunnerGetSerializer(checks, many=True).data,
|
||||
}
|
||||
return Response(ret)
|
||||
|
||||
|
||||
class CheckRunner(APIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
@@ -336,11 +359,42 @@ class TaskRunner(APIView):
|
||||
instance=task, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save(last_run=djangotime.now())
|
||||
new_task = serializer.save(last_run=djangotime.now())
|
||||
|
||||
status = "failing" if task.retcode != 0 else "passing"
|
||||
# check if task is a collector and update the custom field
|
||||
if task.custom_field:
|
||||
if not task.stderr:
|
||||
|
||||
if AgentCustomField.objects.filter(
|
||||
field=task.custom_field, agent=task.agent
|
||||
).exists():
|
||||
agent_field = AgentCustomField.objects.get(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
else:
|
||||
agent_field = AgentCustomField.objects.create(
|
||||
field=task.custom_field, agent=task.agent
|
||||
)
|
||||
|
||||
# get last line of stdout
|
||||
value = new_task.stdout.split("\n")[-1].strip()
|
||||
|
||||
if task.custom_field.type in ["text", "number", "single", "datetime"]:
|
||||
agent_field.string_value = value
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "multiple":
|
||||
agent_field.multiple_value = value.split(",")
|
||||
agent_field.save()
|
||||
elif task.custom_field.type == "checkbox":
|
||||
agent_field.bool_value = bool(value)
|
||||
agent_field.save()
|
||||
|
||||
status = "passing"
|
||||
else:
|
||||
status = "failing"
|
||||
else:
|
||||
status = "failing" if task.retcode != 0 else "passing"
|
||||
|
||||
new_task: AutomatedTask = AutomatedTask.objects.get(pk=task.pk)
|
||||
new_task.status = status
|
||||
new_task.save()
|
||||
|
||||
@@ -378,7 +432,7 @@ class SysInfo(APIView):
|
||||
|
||||
|
||||
class MeshExe(APIView):
|
||||
""" Sends the mesh exe to the installer """
|
||||
"""Sends the mesh exe to the installer"""
|
||||
|
||||
def post(self, request):
|
||||
exe = "meshagent.exe" if request.data["arch"] == "64" else "meshagent-x86.exe"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from django.db import models
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from django.db import models
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
|
||||
@@ -29,7 +28,8 @@ class Policy(BaseAuditModel):
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_from_policies_task
|
||||
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old policy if exists
|
||||
old_policy = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
@@ -38,8 +38,8 @@ class Policy(BaseAuditModel):
|
||||
# generate agent checks only if active and enforced were changed
|
||||
if old_policy:
|
||||
if old_policy.active != self.active or old_policy.enforced != self.enforced:
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
policypk=self.pk,
|
||||
generate_agent_checks_task.delay(
|
||||
policy=self.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -52,7 +52,10 @@ class Policy(BaseAuditModel):
|
||||
agents = list(self.related_agents().only("pk").values_list("pk", flat=True))
|
||||
super(BaseAuditModel, self).delete(*args, **kwargs)
|
||||
|
||||
generate_agent_checks_task.delay(agents, create_tasks=True)
|
||||
generate_agent_checks_task.delay(agents=agents, create_tasks=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def is_default_server_policy(self):
|
||||
@@ -62,9 +65,6 @@ class Policy(BaseAuditModel):
|
||||
def is_default_workstation_policy(self):
|
||||
return self.default_workstation_policy.exists() # type: ignore
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def is_agent_excluded(self, agent):
|
||||
return (
|
||||
agent in self.excluded_agents.all()
|
||||
@@ -94,20 +94,29 @@ class Policy(BaseAuditModel):
|
||||
|
||||
filtered_agents_pks = Policy.objects.none()
|
||||
|
||||
filtered_agents_pks |= Agent.objects.filter(
|
||||
site__in=[
|
||||
site
|
||||
for site in explicit_sites
|
||||
if site.client not in explicit_clients
|
||||
and site.client not in self.excluded_clients.all()
|
||||
],
|
||||
monitoring_type=mon_type,
|
||||
).values_list("pk", flat=True)
|
||||
filtered_agents_pks |= (
|
||||
Agent.objects.exclude(block_policy_inheritance=True)
|
||||
.filter(
|
||||
site__in=[
|
||||
site
|
||||
for site in explicit_sites
|
||||
if site.client not in explicit_clients
|
||||
and site.client not in self.excluded_clients.all()
|
||||
],
|
||||
monitoring_type=mon_type,
|
||||
)
|
||||
.values_list("pk", flat=True)
|
||||
)
|
||||
|
||||
filtered_agents_pks |= Agent.objects.filter(
|
||||
site__client__in=[client for client in explicit_clients],
|
||||
monitoring_type=mon_type,
|
||||
).values_list("pk", flat=True)
|
||||
filtered_agents_pks |= (
|
||||
Agent.objects.exclude(block_policy_inheritance=True)
|
||||
.exclude(site__block_policy_inheritance=True)
|
||||
.filter(
|
||||
site__client__in=[client for client in explicit_clients],
|
||||
monitoring_type=mon_type,
|
||||
)
|
||||
.values_list("pk", flat=True)
|
||||
)
|
||||
|
||||
return Agent.objects.filter(
|
||||
models.Q(pk__in=filtered_agents_pks)
|
||||
@@ -123,9 +132,6 @@ class Policy(BaseAuditModel):
|
||||
|
||||
@staticmethod
|
||||
def cascade_policy_tasks(agent):
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
from logs.models import PendingAction
|
||||
|
||||
# List of all tasks to be applied
|
||||
tasks = list()
|
||||
@@ -154,6 +160,17 @@ class Policy(BaseAuditModel):
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
|
||||
# check if client/site/agent is blocking inheritance and blank out policies
|
||||
if agent.block_policy_inheritance:
|
||||
site_policy = None
|
||||
client_policy = None
|
||||
default_policy = None
|
||||
elif site.block_policy_inheritance:
|
||||
client_policy = None
|
||||
default_policy = None
|
||||
elif client.block_policy_inheritance:
|
||||
default_policy = None
|
||||
|
||||
if (
|
||||
agent_policy
|
||||
and agent_policy.active
|
||||
@@ -200,26 +217,16 @@ class Policy(BaseAuditModel):
|
||||
if taskpk not in added_task_pks
|
||||
]
|
||||
):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
if task.sync_status == "initial":
|
||||
task.delete()
|
||||
else:
|
||||
task.sync_status = "pendingdeletion"
|
||||
task.save()
|
||||
|
||||
# handle matching tasks that haven't synced to agent yet or pending deletion due to agent being offline
|
||||
for action in agent.pendingactions.filter(action_type="taskaction").exclude(
|
||||
status="completed"
|
||||
):
|
||||
task = AutomatedTask.objects.get(pk=action.details["task_id"])
|
||||
if (
|
||||
task.parent_task in agent_tasks_parent_pks
|
||||
and task.parent_task in added_task_pks
|
||||
):
|
||||
agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=agent,
|
||||
action_type="taskaction",
|
||||
details={"action": "taskcreate", "task_id": task.id},
|
||||
).save()
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
# change tasks from pendingdeletion to notsynced if policy was added or changed
|
||||
agent.autotasks.filter(sync_status="pendingdeletion").filter(
|
||||
parent_task__in=[taskpk for taskpk in added_task_pks]
|
||||
).update(sync_status="notsynced")
|
||||
|
||||
return [task for task in tasks if task.pk not in agent_tasks_parent_pks]
|
||||
|
||||
@@ -251,6 +258,17 @@ class Policy(BaseAuditModel):
|
||||
client_policy = client.workstation_policy
|
||||
site_policy = site.workstation_policy
|
||||
|
||||
# check if client/site/agent is blocking inheritance and blank out policies
|
||||
if agent.block_policy_inheritance:
|
||||
site_policy = None
|
||||
client_policy = None
|
||||
default_policy = None
|
||||
elif site.block_policy_inheritance:
|
||||
client_policy = None
|
||||
default_policy = None
|
||||
elif client.block_policy_inheritance:
|
||||
default_policy = None
|
||||
|
||||
# Used to hold the policies that will be applied and the order in which they are applied
|
||||
# Enforced policies are applied first
|
||||
enforced_checks = list()
|
||||
|
||||
@@ -1,169 +1,143 @@
|
||||
from agents.models import Agent
|
||||
from automation.models import Policy
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on agents affected by a policy and optionally generate automated tasks
|
||||
def generate_agent_checks_from_policies_task(policypk, create_tasks=False):
|
||||
def generate_agent_checks_task(
|
||||
policy: int = None,
|
||||
site: int = None,
|
||||
client: int = None,
|
||||
agents: List[int] = list(),
|
||||
all: bool = False,
|
||||
create_tasks: bool = False,
|
||||
) -> Union[str, None]:
|
||||
from agents.models import Agent
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
from automation.models import Policy
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
p = Policy.objects.get(pk=policy) if policy else None
|
||||
|
||||
# generate checks on all agents if all is specified or if policy is default server/workstation policy
|
||||
if (p and p.is_default_server_policy and p.is_default_workstation_policy) or all:
|
||||
a = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
|
||||
# generate checks on all servers if policy is a default servers policy
|
||||
elif p and p.is_default_server_policy:
|
||||
a = Agent.objects.filter(monitoring_type="server").only("pk", "monitoring_type")
|
||||
|
||||
# generate checks on all workstations if policy is a default workstations policy
|
||||
elif p and p.is_default_workstation_policy:
|
||||
a = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
|
||||
# generate checks on a list of supplied agents
|
||||
elif agents:
|
||||
a = Agent.objects.filter(pk__in=agents)
|
||||
|
||||
# generate checks on agents affected by supplied policy
|
||||
elif policy:
|
||||
a = p.related_agents().only("pk")
|
||||
|
||||
# generate checks that has specified site
|
||||
elif site:
|
||||
a = Agent.objects.filter(site_id=site)
|
||||
|
||||
# generate checks that has specified client
|
||||
elif client:
|
||||
a = Agent.objects.filter(site__client_id=client)
|
||||
else:
|
||||
agents = policy.related_agents().only("pk")
|
||||
a = []
|
||||
|
||||
for agent in agents:
|
||||
for agent in a:
|
||||
agent.generate_checks_from_policies()
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on a list of agents and optionally generate automated tasks
|
||||
def generate_agent_checks_task(agentpks, create_tasks=False):
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on agent servers or workstations within a certain client or site and optionally generate automated tasks
|
||||
def generate_agent_checks_by_location_task(location, mon_type, create_tasks=False):
|
||||
|
||||
for agent in Agent.objects.filter(**location).filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy checks on all agent servers or workstations and optionally generate automated tasks
|
||||
def generate_all_agent_checks_task(mon_type, create_tasks=False):
|
||||
for agent in Agent.objects.filter(monitoring_type=mon_type):
|
||||
agent.generate_checks_from_policies()
|
||||
|
||||
if create_tasks:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
|
||||
@app.task
|
||||
# deletes a policy managed check from all agents
|
||||
def delete_policy_check_task(checkpk):
|
||||
|
||||
Check.objects.filter(parent_check=checkpk).delete()
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
# updates policy managed check fields on agents
|
||||
def update_policy_check_fields_task(checkpk):
|
||||
def update_policy_check_fields_task(check: int) -> str:
|
||||
from checks.models import Check
|
||||
|
||||
check = Check.objects.get(pk=checkpk)
|
||||
c: Check = Check.objects.get(pk=check)
|
||||
update_fields: Dict[Any, Any] = {}
|
||||
|
||||
Check.objects.filter(parent_check=checkpk).update(
|
||||
warning_threshold=check.warning_threshold,
|
||||
error_threshold=check.error_threshold,
|
||||
alert_severity=check.alert_severity,
|
||||
name=check.name,
|
||||
run_interval=check.run_interval,
|
||||
disk=check.disk,
|
||||
fails_b4_alert=check.fails_b4_alert,
|
||||
ip=check.ip,
|
||||
script=check.script,
|
||||
script_args=check.script_args,
|
||||
info_return_codes=check.info_return_codes,
|
||||
warning_return_codes=check.warning_return_codes,
|
||||
timeout=check.timeout,
|
||||
pass_if_start_pending=check.pass_if_start_pending,
|
||||
pass_if_svc_not_exist=check.pass_if_svc_not_exist,
|
||||
restart_if_stopped=check.restart_if_stopped,
|
||||
log_name=check.log_name,
|
||||
event_id=check.event_id,
|
||||
event_id_is_wildcard=check.event_id_is_wildcard,
|
||||
event_type=check.event_type,
|
||||
event_source=check.event_source,
|
||||
event_message=check.event_message,
|
||||
fail_when=check.fail_when,
|
||||
search_last_days=check.search_last_days,
|
||||
number_of_events_b4_alert=check.number_of_events_b4_alert,
|
||||
email_alert=check.email_alert,
|
||||
text_alert=check.text_alert,
|
||||
dashboard_alert=check.dashboard_alert,
|
||||
)
|
||||
for field in c.policy_fields_to_copy:
|
||||
update_fields[field] = getattr(c, field)
|
||||
|
||||
Check.objects.filter(parent_check=check).update(**update_fields)
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
# generates policy tasks on agents affected by a policy
|
||||
def generate_agent_tasks_from_policies_task(policypk):
|
||||
def generate_agent_autotasks_task(policy: int = None) -> str:
|
||||
from agents.models import Agent
|
||||
|
||||
policy = Policy.objects.get(pk=policypk)
|
||||
from automation.models import Policy
|
||||
|
||||
if policy.is_default_server_policy and policy.is_default_workstation_policy:
|
||||
p: Policy = Policy.objects.get(pk=policy)
|
||||
|
||||
if p and p.is_default_server_policy and p.is_default_workstation_policy:
|
||||
agents = Agent.objects.prefetch_related("policy").only("pk", "monitoring_type")
|
||||
elif policy.is_default_server_policy:
|
||||
elif p and p.is_default_server_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
elif policy.is_default_workstation_policy:
|
||||
elif p and p.is_default_workstation_policy:
|
||||
agents = Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk", "monitoring_type"
|
||||
)
|
||||
else:
|
||||
agents = policy.related_agents().only("pk")
|
||||
agents = p.related_agents().only("pk")
|
||||
|
||||
for agent in agents:
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def delete_policy_autotask_task(taskpk):
|
||||
def delete_policy_autotasks_task(task: int) -> str:
|
||||
from autotasks.models import AutomatedTask
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||
delete_win_task_schedule.delay(task.pk)
|
||||
for t in AutomatedTask.objects.filter(parent_task=task):
|
||||
t.delete_task_on_agent()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_win_policy_autotask_task(task_pks):
|
||||
from autotasks.tasks import run_win_task
|
||||
def run_win_policy_autotasks_task(task: int) -> str:
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
for task in task_pks:
|
||||
run_win_task.delay(task)
|
||||
for t in AutomatedTask.objects.filter(parent_task=task):
|
||||
t.run_win_task()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def update_policy_task_fields_task(taskpk, update_agent=False):
|
||||
from autotasks.tasks import enable_or_disable_win_task
|
||||
def update_policy_autotasks_fields_task(task: int, update_agent: bool = False) -> str:
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
task = AutomatedTask.objects.get(pk=taskpk)
|
||||
t = AutomatedTask.objects.get(pk=task)
|
||||
update_fields: Dict[str, Any] = {}
|
||||
|
||||
AutomatedTask.objects.filter(parent_task=taskpk).update(
|
||||
alert_severity=task.alert_severity,
|
||||
email_alert=task.email_alert,
|
||||
text_alert=task.text_alert,
|
||||
dashboard_alert=task.dashboard_alert,
|
||||
script=task.script,
|
||||
script_args=task.script_args,
|
||||
name=task.name,
|
||||
timeout=task.timeout,
|
||||
enabled=task.enabled,
|
||||
)
|
||||
for field in t.policy_fields_to_copy:
|
||||
update_fields[field] = getattr(t, field)
|
||||
|
||||
AutomatedTask.objects.filter(parent_task=task).update(**update_fields)
|
||||
|
||||
if update_agent:
|
||||
for task in AutomatedTask.objects.filter(parent_task=taskpk):
|
||||
enable_or_disable_win_task.delay(task.pk, task.enabled)
|
||||
for t in AutomatedTask.objects.filter(parent_task=task).exclude(
|
||||
sync_status="initial"
|
||||
):
|
||||
t.modify_task_on_agent()
|
||||
|
||||
return "ok"
|
||||
|
||||
@@ -52,7 +52,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_policy(self):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_add_policy(self, create_task):
|
||||
url = "/automation/policies/"
|
||||
|
||||
data = {
|
||||
@@ -90,8 +91,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@patch("automation.tasks.generate_agent_checks_from_policies_task.delay")
|
||||
def test_update_policy(self, generate_agent_checks_from_policies_task):
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_update_policy(self, generate_agent_checks_task):
|
||||
# returns 404 for invalid policy pk
|
||||
resp = self.client.put("/automation/policies/500/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
@@ -110,7 +111,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
# only called if active or enforced are updated
|
||||
generate_agent_checks_from_policies_task.assert_not_called()
|
||||
generate_agent_checks_task.assert_not_called()
|
||||
|
||||
data = {
|
||||
"name": "Test Policy Update",
|
||||
@@ -121,8 +122,8 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
resp = self.client.put(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
generate_agent_checks_from_policies_task.assert_called_with(
|
||||
policypk=policy.pk, create_tasks=True # type: ignore
|
||||
generate_agent_checks_task.assert_called_with(
|
||||
policy=policy.pk, create_tasks=True # type: ignore
|
||||
)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
@@ -145,7 +146,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
generate_agent_checks_task.assert_called_with(
|
||||
[agent.pk for agent in agents], create_tasks=True
|
||||
agents=[agent.pk for agent in agents], create_tasks=True
|
||||
)
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
@@ -271,7 +272,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("automation.tasks.run_win_policy_autotask_task.delay")
|
||||
@patch("automation.tasks.run_win_policy_autotasks_task.delay")
|
||||
def test_run_win_task(self, mock_task):
|
||||
|
||||
# create managed policy tasks
|
||||
@@ -281,11 +282,12 @@ class TestPolicyViews(TacticalTestCase):
|
||||
parent_task=1,
|
||||
_quantity=6,
|
||||
)
|
||||
|
||||
url = "/automation/runwintask/1/"
|
||||
resp = self.client.put(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
mock_task.assert_called_once_with([task.pk for task in tasks]) # type: ignore
|
||||
mock_task.assert_called() # type: ignore
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
@@ -426,7 +428,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("automation.tasks.generate_agent_checks_from_policies_task.delay")
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_sync_policy(self, generate_checks):
|
||||
url = "/automation/sync/"
|
||||
|
||||
@@ -441,7 +443,7 @@ class TestPolicyViews(TacticalTestCase):
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
generate_checks.assert_called_with(policy.pk, create_tasks=True) # type: ignore
|
||||
generate_checks.assert_called_with(policy=policy.pk, create_tasks=True) # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -497,7 +499,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEquals(len(resp.data["agents"]), 10) # type: ignore
|
||||
|
||||
def test_generating_agent_policy_checks(self):
|
||||
from .tasks import generate_agent_checks_from_policies_task
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -505,7 +507,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent", policy=policy)
|
||||
|
||||
# test policy assigned to agent
|
||||
generate_agent_checks_from_policies_task(policy.id) # type: ignore
|
||||
generate_agent_checks_task(policy=policy.id) # type: ignore
|
||||
|
||||
# make sure all checks were created. should be 7
|
||||
agent_checks = Agent.objects.get(pk=agent.id).agentchecks.all()
|
||||
@@ -545,7 +547,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEqual(check.event_type, checks[6].event_type)
|
||||
|
||||
def test_generating_agent_policy_checks_with_enforced(self):
|
||||
from .tasks import generate_agent_checks_from_policies_task
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True, enforced=True)
|
||||
@@ -555,7 +557,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent", site=site, policy=policy)
|
||||
self.create_checks(agent=agent, script=script)
|
||||
|
||||
generate_agent_checks_from_policies_task(policy.id, create_tasks=True) # type: ignore
|
||||
generate_agent_checks_task(policy=policy.id, create_tasks=True) # type: ignore
|
||||
|
||||
# make sure each agent check says overriden_by_policy
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 14)
|
||||
@@ -566,13 +568,12 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
7,
|
||||
)
|
||||
|
||||
@patch("automation.tasks.generate_agent_checks_by_location_task.delay")
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_generating_agent_policy_checks_by_location(
|
||||
self, generate_agent_checks_by_location_task
|
||||
self, generate_agent_checks_mock, create_task
|
||||
):
|
||||
from automation.tasks import (
|
||||
generate_agent_checks_by_location_task as generate_agent_checks,
|
||||
)
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -596,16 +597,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
workstation_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
client=workstation_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_task(
|
||||
client=workstation_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -620,16 +619,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
workstation_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
client=workstation_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": workstation_agent.client.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_task(
|
||||
client=workstation_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -644,16 +641,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
server_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
client=server_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_task(
|
||||
client=server_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -668,16 +663,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
server_agent.client.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
client=server_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site__client_id": server_agent.client.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_task(
|
||||
client=server_agent.client.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -692,16 +685,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
workstation_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
site=workstation_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_task(
|
||||
site=workstation_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -716,16 +707,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
workstation_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
site=workstation_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": workstation_agent.site.pk},
|
||||
mon_type="workstation",
|
||||
generate_agent_checks_task(
|
||||
site=workstation_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -740,16 +729,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
server_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
site=server_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_task(
|
||||
site=server_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -764,16 +751,14 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
server_agent.site.save()
|
||||
|
||||
# should trigger task in save method on core
|
||||
generate_agent_checks_by_location_task.assert_called_with(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_mock.assert_called_with(
|
||||
site=server_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_by_location_task.reset_mock()
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
|
||||
generate_agent_checks(
|
||||
location={"site_id": server_agent.site.pk},
|
||||
mon_type="server",
|
||||
generate_agent_checks_task(
|
||||
site=server_agent.site.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
@@ -783,13 +768,10 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
Agent.objects.get(pk=workstation_agent.id).agentchecks.count(), 0
|
||||
)
|
||||
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_generating_policy_checks_for_all_agents(
|
||||
self, generate_all_agent_checks_task
|
||||
):
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_generating_policy_checks_for_all_agents(self, generate_agent_checks_mock):
|
||||
from core.models import CoreSettings
|
||||
|
||||
from .tasks import generate_all_agent_checks_task as generate_all_checks
|
||||
from .tasks import generate_agent_checks_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -801,11 +783,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
core.server_policy = policy
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task.assert_called_with(
|
||||
mon_type="server", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_all_checks(mon_type="server", create_tasks=True)
|
||||
generate_agent_checks_mock.assert_called_with(all=True, create_tasks=True)
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
generate_agent_checks_task(all=True, create_tasks=True)
|
||||
|
||||
# all servers should have 7 checks
|
||||
for agent in server_agents:
|
||||
@@ -818,15 +798,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
core.workstation_policy = policy
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task.assert_any_call(
|
||||
mon_type="workstation", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.assert_any_call(
|
||||
mon_type="server", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_all_checks(mon_type="server", create_tasks=True)
|
||||
generate_all_checks(mon_type="workstation", create_tasks=True)
|
||||
generate_agent_checks_mock.assert_any_call(all=True, create_tasks=True)
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
generate_agent_checks_task(all=True, create_tasks=True)
|
||||
|
||||
# all workstations should have 7 checks
|
||||
for agent in server_agents:
|
||||
@@ -838,11 +812,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
core.workstation_policy = None
|
||||
core.save()
|
||||
|
||||
generate_all_agent_checks_task.assert_called_with(
|
||||
mon_type="workstation", create_tasks=True
|
||||
)
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_all_checks(mon_type="workstation", create_tasks=True)
|
||||
generate_agent_checks_mock.assert_called_with(all=True, create_tasks=True)
|
||||
generate_agent_checks_mock.reset_mock()
|
||||
generate_agent_checks_task(all=True, create_tasks=True)
|
||||
|
||||
# nothing should have the checks
|
||||
for agent in server_agents:
|
||||
@@ -851,31 +823,8 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
for agent in workstation_agents:
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 0)
|
||||
|
||||
def test_delete_policy_check(self):
|
||||
from .models import Policy
|
||||
from .tasks import delete_policy_check_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
self.create_checks(policy=policy)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
# make sure agent has 7 checks
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 7)
|
||||
|
||||
# pick a policy check and delete it from the agent
|
||||
policy_check_id = Policy.objects.get(pk=policy.id).policychecks.first().id # type: ignore
|
||||
|
||||
delete_policy_check_task(policy_check_id)
|
||||
|
||||
# make sure policy check doesn't exist on agent
|
||||
self.assertEqual(Agent.objects.get(pk=agent.id).agentchecks.count(), 6)
|
||||
self.assertFalse(
|
||||
Agent.objects.get(pk=agent.id)
|
||||
.agentchecks.filter(parent_check=policy_check_id)
|
||||
.exists()
|
||||
)
|
||||
|
||||
def update_policy_check_fields(self):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def update_policy_check_fields(self, create_task):
|
||||
from .models import Policy
|
||||
from .tasks import update_policy_check_fields_task
|
||||
|
||||
@@ -905,8 +854,9 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
"12.12.12.12",
|
||||
)
|
||||
|
||||
def test_generate_agent_tasks(self):
|
||||
from .tasks import generate_agent_tasks_from_policies_task
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_generate_agent_tasks(self, create_task):
|
||||
from .tasks import generate_agent_autotasks_task
|
||||
|
||||
# create test data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
@@ -915,7 +865,7 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
generate_agent_tasks_from_policies_task(policy.id) # type: ignore
|
||||
generate_agent_autotasks_task(policy=policy.id) # type: ignore
|
||||
|
||||
agent_tasks = Agent.objects.get(pk=agent.id).autotasks.all()
|
||||
|
||||
@@ -934,56 +884,61 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
self.assertEqual(task.parent_task, tasks[2].id) # type: ignore
|
||||
self.assertEqual(task.name, tasks[2].name) # type: ignore
|
||||
|
||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||
def test_delete_policy_tasks(self, delete_win_task_schedule):
|
||||
from .tasks import delete_policy_autotask_task
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.delete_task_on_agent")
|
||||
def test_delete_policy_tasks(self, delete_task_on_agent, create_task):
|
||||
from .tasks import delete_policy_autotasks_task
|
||||
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
delete_policy_autotask_task(tasks[0].id) # type: ignore
|
||||
delete_policy_autotasks_task(task=tasks[0].id) # type: ignore
|
||||
|
||||
delete_win_task_schedule.assert_called_with(
|
||||
agent.autotasks.get(parent_task=tasks[0].id).id # type: ignore
|
||||
)
|
||||
delete_task_on_agent.assert_called()
|
||||
|
||||
@patch("autotasks.tasks.run_win_task.delay")
|
||||
def test_run_policy_task(self, run_win_task):
|
||||
from .tasks import run_win_policy_autotask_task
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.run_win_task")
|
||||
def test_run_policy_task(self, run_win_task, create_task):
|
||||
from .tasks import run_win_policy_autotasks_task
|
||||
|
||||
tasks = baker.make("autotasks.AutomatedTask", _quantity=3)
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make("autotasks.AutomatedTask", policy=policy, _quantity=3)
|
||||
baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
run_win_policy_autotask_task([task.id for task in tasks]) # type: ignore
|
||||
run_win_policy_autotasks_task(task=tasks[0].id) # type: ignore
|
||||
|
||||
run_win_task.side_effect = [task.id for task in tasks] # type: ignore
|
||||
self.assertEqual(run_win_task.call_count, 3)
|
||||
for task in tasks: # type: ignore
|
||||
run_win_task.assert_any_call(task.id) # type: ignore
|
||||
run_win_task.assert_called_once()
|
||||
|
||||
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
|
||||
def test_update_policy_tasks(self, enable_or_disable_win_task):
|
||||
from .tasks import update_policy_task_fields_task
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
@patch("autotasks.models.AutomatedTask.modify_task_on_agent")
|
||||
def test_update_policy_tasks(self, modify_task_on_agent, create_task):
|
||||
from .tasks import update_policy_autotasks_fields_task
|
||||
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
tasks = baker.make(
|
||||
"autotasks.AutomatedTask", enabled=True, policy=policy, _quantity=3
|
||||
"autotasks.AutomatedTask",
|
||||
enabled=True,
|
||||
policy=policy,
|
||||
_quantity=3,
|
||||
)
|
||||
agent = baker.make_recipe("agents.server_agent", policy=policy)
|
||||
|
||||
tasks[0].enabled = False # type: ignore
|
||||
tasks[0].save() # type: ignore
|
||||
|
||||
update_policy_task_fields_task(tasks[0].id) # type: ignore
|
||||
enable_or_disable_win_task.assert_not_called()
|
||||
update_policy_autotasks_fields_task(task=tasks[0].id) # type: ignore
|
||||
modify_task_on_agent.assert_not_called()
|
||||
|
||||
self.assertFalse(agent.autotasks.get(parent_task=tasks[0].id).enabled) # type: ignore
|
||||
|
||||
update_policy_task_fields_task(tasks[0].id, update_agent=True) # type: ignore
|
||||
enable_or_disable_win_task.assert_called_with(
|
||||
agent.autotasks.get(parent_task=tasks[0].id).id, False # type: ignore
|
||||
)
|
||||
update_policy_autotasks_fields_task(task=tasks[0].id, update_agent=True) # type: ignore
|
||||
modify_task_on_agent.assert_not_called()
|
||||
|
||||
agent.autotasks.update(sync_status="synced")
|
||||
update_policy_autotasks_fields_task(task=tasks[0].id, update_agent=True) # type: ignore
|
||||
modify_task_on_agent.assert_called_once()
|
||||
|
||||
@patch("agents.models.Agent.generate_tasks_from_policies")
|
||||
@patch("agents.models.Agent.generate_checks_from_policies")
|
||||
@@ -996,17 +951,19 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
generate_checks.reset_mock()
|
||||
generate_tasks.reset_mock()
|
||||
|
||||
generate_agent_checks_task([agent.pk for agent in agents])
|
||||
generate_agent_checks_task(agents=[agent.pk for agent in agents])
|
||||
self.assertEquals(generate_checks.call_count, 5)
|
||||
generate_tasks.assert_not_called()
|
||||
generate_checks.reset_mock()
|
||||
|
||||
generate_agent_checks_task([agent.pk for agent in agents], create_tasks=True)
|
||||
generate_agent_checks_task(
|
||||
agents=[agent.pk for agent in agents], create_tasks=True
|
||||
)
|
||||
self.assertEquals(generate_checks.call_count, 5)
|
||||
self.assertEquals(generate_checks.call_count, 5)
|
||||
|
||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||
def test_policy_exclusions(self, delete_task):
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_policy_exclusions(self, create_task):
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
baker.make_recipe("checks.memory_check", policy=policy)
|
||||
@@ -1028,8 +985,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
self.assertEqual(policy.related_agents().count(), 0) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 0) # type: ignore
|
||||
delete_task.assert_called()
|
||||
delete_task.reset_mock()
|
||||
|
||||
# delete agent tasks
|
||||
agent.autotasks.all().delete()
|
||||
@@ -1051,8 +1006,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
self.assertEqual(policy.related_agents().count(), 0) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 0) # type: ignore
|
||||
delete_task.assert_called()
|
||||
delete_task.reset_mock()
|
||||
|
||||
# delete agent tasks and reset
|
||||
agent.autotasks.all().delete()
|
||||
@@ -1074,8 +1027,6 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
self.assertEqual(policy.related_agents().count(), 0) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 0) # type: ignore
|
||||
delete_task.assert_called()
|
||||
delete_task.reset_mock()
|
||||
|
||||
# delete agent tasks and reset
|
||||
agent.autotasks.all().delete()
|
||||
@@ -1103,11 +1054,82 @@ class TestPolicyTasks(TacticalTestCase):
|
||||
|
||||
self.assertEqual(policy.related_agents().count(), 0) # type: ignore
|
||||
self.assertEqual(agent.agentchecks.count(), 0) # type: ignore
|
||||
delete_task.assert_called()
|
||||
delete_task.reset_mock()
|
||||
|
||||
def test_removing_duplicate_pending_task_actions(self):
|
||||
pass
|
||||
@patch("autotasks.models.AutomatedTask.create_task_on_agent")
|
||||
def test_policy_inheritance_blocking(self, create_task):
|
||||
# setup data
|
||||
policy = baker.make("automation.Policy", active=True)
|
||||
baker.make_recipe("checks.memory_check", policy=policy)
|
||||
baker.make("autotasks.AutomatedTask", policy=policy)
|
||||
agent = baker.make_recipe("agents.agent", monitoring_type="server")
|
||||
|
||||
def test_creating_checks_with_assigned_tasks(self):
|
||||
pass
|
||||
core = CoreSettings.objects.first()
|
||||
core.server_policy = policy
|
||||
core.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
# should get policies from default policy
|
||||
self.assertTrue(agent.autotasks.all())
|
||||
self.assertTrue(agent.agentchecks.all())
|
||||
|
||||
# test client blocking inheritance
|
||||
agent.site.client.block_policy_inheritance = True
|
||||
agent.site.client.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
self.assertFalse(agent.autotasks.all())
|
||||
self.assertFalse(agent.agentchecks.all())
|
||||
|
||||
agent.site.client.server_policy = policy
|
||||
agent.site.client.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
# should get policies from client policy
|
||||
self.assertTrue(agent.autotasks.all())
|
||||
self.assertTrue(agent.agentchecks.all())
|
||||
|
||||
# test site blocking inheritance
|
||||
agent.site.block_policy_inheritance = True
|
||||
agent.site.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
self.assertFalse(agent.autotasks.all())
|
||||
self.assertFalse(agent.agentchecks.all())
|
||||
|
||||
agent.site.server_policy = policy
|
||||
agent.site.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
# should get policies from site policy
|
||||
self.assertTrue(agent.autotasks.all())
|
||||
self.assertTrue(agent.agentchecks.all())
|
||||
|
||||
# test agent blocking inheritance
|
||||
agent.block_policy_inheritance = True
|
||||
agent.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
self.assertFalse(agent.autotasks.all())
|
||||
self.assertFalse(agent.agentchecks.all())
|
||||
|
||||
agent.policy = policy
|
||||
agent.save()
|
||||
|
||||
agent.generate_checks_from_policies()
|
||||
agent.generate_tasks_from_policies()
|
||||
|
||||
# should get policies from agent policy
|
||||
self.assertTrue(agent.autotasks.all())
|
||||
self.assertTrue(agent.agentchecks.all())
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from agents.serializers import AgentHostnameSerializer
|
||||
from autotasks.models import AutomatedTask
|
||||
from checks.models import Check
|
||||
from clients.models import Client
|
||||
from clients.serializers import ClientSerializer, SiteSerializer
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from tacticalrmm.utils import notify_error
|
||||
from winupdate.models import WinUpdatePolicy
|
||||
from winupdate.serializers import WinUpdatePolicySerializer
|
||||
@@ -22,7 +21,6 @@ from .serializers import (
|
||||
PolicyTableSerializer,
|
||||
PolicyTaskStatusSerializer,
|
||||
)
|
||||
from .tasks import run_win_policy_autotask_task
|
||||
|
||||
|
||||
class GetAddPolicies(APIView):
|
||||
@@ -76,10 +74,10 @@ class GetUpdateDeletePolicy(APIView):
|
||||
class PolicySync(APIView):
|
||||
def post(self, request):
|
||||
if "policy" in request.data.keys():
|
||||
from automation.tasks import generate_agent_checks_from_policies_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
generate_agent_checks_from_policies_task.delay(
|
||||
request.data["policy"], create_tasks=True
|
||||
generate_agent_checks_task.delay(
|
||||
policy=request.data["policy"], create_tasks=True
|
||||
)
|
||||
return Response("ok")
|
||||
|
||||
@@ -101,8 +99,9 @@ class PolicyAutoTask(APIView):
|
||||
|
||||
# bulk run win tasks associated with policy
|
||||
def put(self, request, task):
|
||||
tasks = AutomatedTask.objects.filter(parent_task=task)
|
||||
run_win_policy_autotask_task.delay([task.id for task in tasks])
|
||||
from .tasks import run_win_policy_autotasks_task
|
||||
|
||||
run_win_policy_autotasks_task.delay(task=task)
|
||||
return Response("Affected agent tasks will run shortly")
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-04 00:32
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0019_globalkvstore'),
|
||||
('scripts', '0007_script_args'),
|
||||
('autotasks', '0018_automatedtask_run_asap_after_missed'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='custom_field',
|
||||
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='autotask', to='core.customfield'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='automatedtask',
|
||||
name='retvalue',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='script',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='autoscript', to='scripts.script'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-21 02:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('autotasks', '0019_auto_20210404_0032'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='sync_status',
|
||||
field=models.CharField(choices=[('synced', 'Synced With Agent'), ('notsynced', 'Waiting On Agent Checkin'), ('pendingdeletion', 'Pending Deletion on Agent'), ('initial', 'Initial Task Sync')], default='initial', max_length=100),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-27 14:11
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0021_customfield_hide_in_ui'),
|
||||
('autotasks', '0020_auto_20210421_0226'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automatedtask',
|
||||
name='custom_field',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='autotasks', to='core.customfield'),
|
||||
),
|
||||
]
|
||||
@@ -1,16 +1,19 @@
|
||||
import asyncio
|
||||
import datetime as dt
|
||||
import random
|
||||
import string
|
||||
from typing import List
|
||||
|
||||
import pytz
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.db.models.fields import DateTimeField
|
||||
from loguru import logger
|
||||
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from django.utils import timezone as djangotime
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
from tacticalrmm.utils import bitdays_to_string
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
@@ -36,6 +39,7 @@ SYNC_STATUS_CHOICES = [
|
||||
("synced", "Synced With Agent"),
|
||||
("notsynced", "Waiting On Agent Checkin"),
|
||||
("pendingdeletion", "Pending Deletion on Agent"),
|
||||
("initial", "Initial Task Sync"),
|
||||
]
|
||||
|
||||
TASK_STATUS_CHOICES = [
|
||||
@@ -60,12 +64,19 @@ class AutomatedTask(BaseAuditModel):
|
||||
blank=True,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
custom_field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
related_name="autotasks",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="autoscript",
|
||||
on_delete=models.CASCADE,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
@@ -100,6 +111,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
parent_task = models.PositiveIntegerField(null=True, blank=True)
|
||||
win_task_name = models.CharField(max_length=255, null=True, blank=True)
|
||||
timeout = models.PositiveIntegerField(default=120)
|
||||
retvalue = models.TextField(null=True, blank=True)
|
||||
retcode = models.IntegerField(null=True, blank=True)
|
||||
stdout = models.TextField(null=True, blank=True)
|
||||
stderr = models.TextField(null=True, blank=True)
|
||||
@@ -110,7 +122,7 @@ class AutomatedTask(BaseAuditModel):
|
||||
max_length=30, choices=TASK_STATUS_CHOICES, default="pending"
|
||||
)
|
||||
sync_status = models.CharField(
|
||||
max_length=100, choices=SYNC_STATUS_CHOICES, default="notsynced"
|
||||
max_length=100, choices=SYNC_STATUS_CHOICES, default="initial"
|
||||
)
|
||||
alert_severity = models.CharField(
|
||||
max_length=30, choices=SEVERITY_CHOICES, default="info"
|
||||
@@ -147,6 +159,31 @@ class AutomatedTask(BaseAuditModel):
|
||||
|
||||
return self.last_run
|
||||
|
||||
# These fields will be duplicated on the agent tasks that are managed by a policy
|
||||
@property
|
||||
def policy_fields_to_copy(self) -> List[str]:
|
||||
return [
|
||||
"alert_severity",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"dashboard_alert",
|
||||
"script",
|
||||
"script_args",
|
||||
"assigned_check",
|
||||
"name",
|
||||
"run_time_days",
|
||||
"run_time_minute",
|
||||
"run_time_bit_weekdays",
|
||||
"run_time_date",
|
||||
"task_type",
|
||||
"win_task_name",
|
||||
"timeout",
|
||||
"enabled",
|
||||
"remove_if_not_scheduled",
|
||||
"run_asap_after_missed",
|
||||
"custom_field",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def generate_task_name():
|
||||
chars = string.ascii_letters
|
||||
@@ -160,7 +197,6 @@ class AutomatedTask(BaseAuditModel):
|
||||
return TaskSerializer(task).data
|
||||
|
||||
def create_policy_task(self, agent=None, policy=None):
|
||||
from .tasks import create_win_task_schedule
|
||||
|
||||
# if policy is present, then this task is being copied to another policy
|
||||
# if agent is present, then this task is being created on an agent from a policy
|
||||
@@ -177,15 +213,6 @@ class AutomatedTask(BaseAuditModel):
|
||||
assigned_check = agent.agentchecks.filter(
|
||||
parent_check=self.assigned_check.pk
|
||||
).first()
|
||||
# check was overriden by agent and we need to use that agents check
|
||||
else:
|
||||
if agent.agentchecks.filter(
|
||||
check_type=self.assigned_check.check_type, overriden_by_policy=True
|
||||
).exists():
|
||||
assigned_check = agent.agentchecks.filter(
|
||||
check_type=self.assigned_check.check_type,
|
||||
overriden_by_policy=True,
|
||||
).first()
|
||||
elif policy and self.assigned_check:
|
||||
if policy.policychecks.filter(name=self.assigned_check.name).exists():
|
||||
assigned_check = policy.policychecks.filter(
|
||||
@@ -201,27 +228,175 @@ class AutomatedTask(BaseAuditModel):
|
||||
policy=policy,
|
||||
managed_by_policy=bool(agent),
|
||||
parent_task=(self.pk if agent else None),
|
||||
alert_severity=self.alert_severity,
|
||||
email_alert=self.email_alert,
|
||||
text_alert=self.text_alert,
|
||||
dashboard_alert=self.dashboard_alert,
|
||||
script=self.script,
|
||||
script_args=self.script_args,
|
||||
assigned_check=assigned_check,
|
||||
name=self.name,
|
||||
run_time_days=self.run_time_days,
|
||||
run_time_minute=self.run_time_minute,
|
||||
run_time_bit_weekdays=self.run_time_bit_weekdays,
|
||||
run_time_date=self.run_time_date,
|
||||
task_type=self.task_type,
|
||||
win_task_name=self.win_task_name,
|
||||
timeout=self.timeout,
|
||||
enabled=self.enabled,
|
||||
remove_if_not_scheduled=self.remove_if_not_scheduled,
|
||||
run_asap_after_missed=self.run_asap_after_missed,
|
||||
)
|
||||
|
||||
create_win_task_schedule.delay(task.pk)
|
||||
for field in self.policy_fields_to_copy:
|
||||
setattr(task, field, getattr(self, field))
|
||||
|
||||
task.save()
|
||||
|
||||
task.create_task_on_agent()
|
||||
|
||||
def create_task_on_agent(self):
|
||||
from agents.models import Agent
|
||||
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
)
|
||||
|
||||
if self.task_type == "scheduled":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": self.run_time_bit_weekdays,
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
"hour": dt.datetime.strptime(self.run_time_minute, "%H:%M").hour,
|
||||
"min": dt.datetime.strptime(self.run_time_minute, "%H:%M").minute,
|
||||
},
|
||||
}
|
||||
|
||||
elif self.task_type == "runonce":
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(agent.timezone)
|
||||
task_time_utc = self.run_time_date.replace(tzinfo=agent_tz).astimezone(
|
||||
pytz.utc
|
||||
)
|
||||
now = djangotime.now()
|
||||
if task_time_utc < now:
|
||||
self.run_time_date = now.astimezone(agent_tz).replace(
|
||||
tzinfo=pytz.utc
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
self.save(update_fields=["run_time_date"])
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
"year": int(dt.datetime.strftime(self.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(self.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(self.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(self.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(self.run_time_date, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if self.run_asap_after_missed and pyver.parse(agent.version) >= pyver.parse(
|
||||
"1.4.7"
|
||||
):
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
|
||||
if self.remove_if_not_scheduled:
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif self.task_type == "checkfailure" or self.task_type == "manual":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": self.pk,
|
||||
"name": self.win_task_name,
|
||||
},
|
||||
}
|
||||
else:
|
||||
return "error"
|
||||
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "initial"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to create scheduled task {self.name} on {agent.hostname}. It will be created when the agent checks in."
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully created")
|
||||
|
||||
return "ok"
|
||||
|
||||
def modify_task_on_agent(self):
|
||||
from agents.models import Agent
|
||||
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "enableschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": self.win_task_name,
|
||||
"enabled": self.enabled,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=5))
|
||||
|
||||
if r != "ok":
|
||||
self.sync_status = "notsynced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"Unable to modify scheduled task {self.name} on {agent.hostname}. It will try again on next agent checkin"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.sync_status = "synced"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.info(f"{agent.hostname} task {self.name} was successfully modified")
|
||||
|
||||
return "ok"
|
||||
|
||||
def delete_task_on_agent(self):
|
||||
from agents.models import Agent
|
||||
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
)
|
||||
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": self.win_task_name},
|
||||
}
|
||||
r = asyncio.run(agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
self.sync_status = "pendingdeletion"
|
||||
self.save(update_fields=["sync_status"])
|
||||
logger.warning(
|
||||
f"{agent.hostname} task {self.name} was successfully modified"
|
||||
)
|
||||
return "timeout"
|
||||
else:
|
||||
self.delete()
|
||||
logger.info(f"{agent.hostname} task {self.name} was deleted")
|
||||
|
||||
return "ok"
|
||||
|
||||
def run_win_task(self):
|
||||
from agents.models import Agent
|
||||
|
||||
agent = (
|
||||
Agent.objects.filter(pk=self.agent.pk)
|
||||
.only("pk", "version", "hostname", "agent_id")
|
||||
.first()
|
||||
)
|
||||
|
||||
asyncio.run(agent.nats_cmd({"func": "runtask", "taskpk": self.pk}, wait=False))
|
||||
return "ok"
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
return (
|
||||
|
||||
@@ -4,207 +4,46 @@ import random
|
||||
from time import sleep
|
||||
from typing import Union
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from packaging import version as pyver
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.celery import app
|
||||
|
||||
from .models import AutomatedTask
|
||||
from autotasks.models import AutomatedTask
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@app.task
|
||||
def create_win_task_schedule(pk, pending_action=False):
|
||||
def create_win_task_schedule(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
if task.task_type == "scheduled":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "weekly",
|
||||
"weekdays": task.run_time_bit_weekdays,
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"hour": dt.datetime.strptime(task.run_time_minute, "%H:%M").hour,
|
||||
"min": dt.datetime.strptime(task.run_time_minute, "%H:%M").minute,
|
||||
},
|
||||
}
|
||||
|
||||
elif task.task_type == "runonce":
|
||||
# check if scheduled time is in the past
|
||||
agent_tz = pytz.timezone(task.agent.timezone)
|
||||
task_time_utc = task.run_time_date.replace(tzinfo=agent_tz).astimezone(pytz.utc)
|
||||
now = djangotime.now()
|
||||
if task_time_utc < now:
|
||||
task.run_time_date = now.astimezone(agent_tz).replace(
|
||||
tzinfo=pytz.utc
|
||||
) + djangotime.timedelta(minutes=5)
|
||||
task.save(update_fields=["run_time_date"])
|
||||
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "once",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
"year": int(dt.datetime.strftime(task.run_time_date, "%Y")),
|
||||
"month": dt.datetime.strftime(task.run_time_date, "%B"),
|
||||
"day": int(dt.datetime.strftime(task.run_time_date, "%d")),
|
||||
"hour": int(dt.datetime.strftime(task.run_time_date, "%H")),
|
||||
"min": int(dt.datetime.strftime(task.run_time_date, "%M")),
|
||||
},
|
||||
}
|
||||
|
||||
if task.run_asap_after_missed and pyver.parse(
|
||||
task.agent.version
|
||||
) >= pyver.parse("1.4.7"):
|
||||
nats_data["schedtaskpayload"]["run_asap_after_missed"] = True
|
||||
|
||||
if task.remove_if_not_scheduled:
|
||||
nats_data["schedtaskpayload"]["deleteafter"] = True
|
||||
|
||||
elif task.task_type == "checkfailure" or task.task_type == "manual":
|
||||
nats_data = {
|
||||
"func": "schedtask",
|
||||
"schedtaskpayload": {
|
||||
"type": "rmm",
|
||||
"trigger": "manual",
|
||||
"pk": task.pk,
|
||||
"name": task.win_task_name,
|
||||
},
|
||||
}
|
||||
else:
|
||||
return "error"
|
||||
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
details={"action": "taskcreate", "task_id": task.id},
|
||||
).save()
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.error(
|
||||
f"Unable to create scheduled task {task.win_task_name} on {task.agent.hostname}. It will be created when the agent checks in."
|
||||
)
|
||||
return
|
||||
|
||||
# clear pending action since it was successful
|
||||
if pending_action:
|
||||
pendingaction = PendingAction.objects.get(pk=pending_action)
|
||||
pendingaction.status = "completed"
|
||||
pendingaction.save(update_fields=["status"])
|
||||
|
||||
task.sync_status = "synced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
logger.info(f"{task.agent.hostname} task {task.name} was successfully created")
|
||||
task.create_task_on_agent()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def enable_or_disable_win_task(pk, action, pending_action=False):
|
||||
def enable_or_disable_win_task(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
nats_data = {
|
||||
"func": "enableschedtask",
|
||||
"schedtaskpayload": {
|
||||
"name": task.win_task_name,
|
||||
"enabled": action,
|
||||
},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data))
|
||||
|
||||
if r != "ok":
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
details={
|
||||
"action": "tasktoggle",
|
||||
"value": action,
|
||||
"task_id": task.id,
|
||||
},
|
||||
).save()
|
||||
task.sync_status = "notsynced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
return
|
||||
|
||||
# clear pending action since it was successful
|
||||
if pending_action:
|
||||
pendingaction = PendingAction.objects.get(pk=pending_action)
|
||||
pendingaction.status = "completed"
|
||||
pendingaction.save(update_fields=["status"])
|
||||
|
||||
task.sync_status = "synced"
|
||||
task.save(update_fields=["sync_status"])
|
||||
task.modify_task_on_agent()
|
||||
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def delete_win_task_schedule(pk, pending_action=False):
|
||||
def delete_win_task_schedule(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
|
||||
nats_data = {
|
||||
"func": "delschedtask",
|
||||
"schedtaskpayload": {"name": task.win_task_name},
|
||||
}
|
||||
r = asyncio.run(task.agent.nats_cmd(nats_data, timeout=10))
|
||||
|
||||
if r != "ok" and "The system cannot find the file specified" not in r:
|
||||
# don't create pending action if this task was initiated by a pending action
|
||||
if not pending_action:
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
PendingAction(
|
||||
agent=task.agent,
|
||||
action_type="taskaction",
|
||||
details={"action": "taskdelete", "task_id": task.id},
|
||||
).save()
|
||||
task.sync_status = "pendingdeletion"
|
||||
task.save(update_fields=["sync_status"])
|
||||
|
||||
return "timeout"
|
||||
|
||||
# complete pending action since it was successful
|
||||
if pending_action:
|
||||
pendingaction = PendingAction.objects.get(pk=pending_action)
|
||||
pendingaction.status = "completed"
|
||||
pendingaction.save(update_fields=["status"])
|
||||
|
||||
# complete any other pending actions on agent with same task_id
|
||||
task.agent.remove_matching_pending_task_actions(task.id)
|
||||
|
||||
task.delete()
|
||||
task.delete_task_on_agent()
|
||||
return "ok"
|
||||
|
||||
|
||||
@app.task
|
||||
def run_win_task(pk):
|
||||
task = AutomatedTask.objects.get(pk=pk)
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
task.run_win_task()
|
||||
return "ok"
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ from unittest.mock import call, patch
|
||||
from django.utils import timezone as djangotime
|
||||
from model_bakery import baker
|
||||
|
||||
from logs.models import PendingAction
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import AutomatedTask
|
||||
@@ -17,10 +16,10 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
@patch("automation.tasks.generate_agent_tasks_from_policies_task.delay")
|
||||
@patch("automation.tasks.generate_agent_autotasks_task.delay")
|
||||
@patch("autotasks.tasks.create_win_task_schedule.delay")
|
||||
def test_add_autotask(
|
||||
self, create_win_task_schedule, generate_agent_tasks_from_policies_task
|
||||
self, create_win_task_schedule, generate_agent_autotasks_task
|
||||
):
|
||||
url = "/tasks/automatedtasks/"
|
||||
|
||||
@@ -29,7 +28,6 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
policy = baker.make("automation.Policy")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
|
||||
# test script set to invalid pk
|
||||
data = {"autotask": {"script": 500}}
|
||||
@@ -52,15 +50,6 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test old agent version
|
||||
data = {
|
||||
"autotask": {"script": script.id},
|
||||
"agent": old_agent.id,
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
|
||||
# test add task to agent
|
||||
data = {
|
||||
"autotask": {
|
||||
@@ -94,13 +83,13 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
"task_type": "manual",
|
||||
"assigned_check": None,
|
||||
},
|
||||
"policy": policy.id,
|
||||
"policy": policy.id, # type: ignore
|
||||
}
|
||||
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
generate_agent_tasks_from_policies_task.assert_called_with(policy.id)
|
||||
generate_agent_autotasks_task.assert_called_with(policy=policy.id) # type: ignore
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
@@ -116,14 +105,14 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
serializer = AutoTaskSerializer(agent)
|
||||
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, serializer.data)
|
||||
self.assertEqual(resp.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
|
||||
@patch("automation.tasks.update_policy_task_fields_task.delay")
|
||||
@patch("automation.tasks.update_policy_autotasks_fields_task.delay")
|
||||
def test_update_autotask(
|
||||
self, update_policy_task_fields_task, enable_or_disable_win_task
|
||||
self, update_policy_autotasks_fields_task, enable_or_disable_win_task
|
||||
):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
@@ -135,32 +124,32 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
resp = self.client.patch("/tasks/500/automatedtasks/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/"
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/" # type: ignore
|
||||
|
||||
# test editing agent task
|
||||
data = {"enableordisable": False}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
enable_or_disable_win_task.assert_called_with(pk=agent_task.id, action=False)
|
||||
enable_or_disable_win_task.assert_called_with(pk=agent_task.id) # type: ignore
|
||||
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/"
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/" # type: ignore
|
||||
|
||||
# test editing policy task
|
||||
data = {"enableordisable": True}
|
||||
|
||||
resp = self.client.patch(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
update_policy_task_fields_task.assert_called_with(
|
||||
policy_task.id, update_agent=True
|
||||
update_policy_autotasks_fields_task.assert_called_with(
|
||||
task=policy_task.id, update_agent=True # type: ignore
|
||||
)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@patch("autotasks.tasks.delete_win_task_schedule.delay")
|
||||
@patch("automation.tasks.delete_policy_autotask_task.delay")
|
||||
@patch("automation.tasks.delete_policy_autotasks_task.delay")
|
||||
def test_delete_autotask(
|
||||
self, delete_policy_autotask_task, delete_win_task_schedule
|
||||
self, delete_policy_autotasks_task, delete_win_task_schedule
|
||||
):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
@@ -173,21 +162,21 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test delete agent task
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/"
|
||||
url = f"/tasks/{agent_task.id}/automatedtasks/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
delete_win_task_schedule.assert_called_with(pk=agent_task.id)
|
||||
delete_win_task_schedule.assert_called_with(pk=agent_task.id) # type: ignore
|
||||
|
||||
# test delete policy task
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/"
|
||||
url = f"/tasks/{policy_task.id}/automatedtasks/" # type: ignore
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
delete_policy_autotask_task.assert_called_with(policy_task.id)
|
||||
delete_policy_autotasks_task.assert_called_with(task=policy_task.id) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_autotask(self, nats_cmd):
|
||||
@patch("autotasks.tasks.run_win_task.delay")
|
||||
def test_run_autotask(self, run_win_task):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent", version="1.1.0")
|
||||
task = baker.make("autotasks.AutomatedTask", agent=agent)
|
||||
@@ -197,18 +186,10 @@ class TestAutotaskViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
# test run agent task
|
||||
url = f"/tasks/runwintask/{task.id}/"
|
||||
url = f"/tasks/runwintask/{task.id}/" # type: ignore
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
nats_cmd.assert_called_with({"func": "runtask", "taskpk": task.id}, wait=False)
|
||||
nats_cmd.reset_mock()
|
||||
|
||||
old_agent = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
task2 = baker.make("autotasks.AutomatedTask", agent=old_agent)
|
||||
url = f"/tasks/runwintask/{task2.id}/"
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
nats_cmd.assert_not_called()
|
||||
run_win_task.assert_called()
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -301,9 +282,9 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
run_time_bit_weekdays=127,
|
||||
run_time_minute="21:55",
|
||||
)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
self.assertEqual(self.task1.sync_status, "initial")
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk).apply()
|
||||
self.assertEqual(nats_cmd.call_count, 1)
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
@@ -318,29 +299,16 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"min": 55,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
timeout=5,
|
||||
)
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "synced")
|
||||
|
||||
nats_cmd.return_value = "timeout"
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task1.pk).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
|
||||
self.assertEqual(self.task1.sync_status, "notsynced")
|
||||
|
||||
# test pending action
|
||||
self.pending_action = PendingAction.objects.create(
|
||||
agent=self.agent, action_type="taskaction"
|
||||
)
|
||||
self.assertEqual(self.pending_action.status, "pending")
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(
|
||||
pk=self.task1.pk, pending_action=self.pending_action.pk
|
||||
).apply()
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
self.pending_action = PendingAction.objects.get(pk=self.pending_action.pk)
|
||||
self.assertEqual(self.pending_action.status, "completed")
|
||||
self.assertEqual(self.task1.sync_status, "initial")
|
||||
|
||||
# test runonce with future date
|
||||
nats_cmd.reset_mock()
|
||||
@@ -354,7 +322,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task2.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task2.pk).apply()
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
@@ -370,7 +338,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
timeout=5,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -386,7 +354,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
run_time_date=run_time_date,
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task3.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task3.pk).apply()
|
||||
self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -402,7 +370,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
assigned_check=self.check,
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task4.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task4.pk).apply()
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
@@ -413,7 +381,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
timeout=5,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -427,7 +395,7 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
task_type="manual",
|
||||
)
|
||||
nats_cmd.return_value = "ok"
|
||||
ret = create_win_task_schedule.s(pk=self.task5.pk, pending_action=False).apply()
|
||||
ret = create_win_task_schedule.s(pk=self.task5.pk).apply()
|
||||
nats_cmd.assert_called_with(
|
||||
{
|
||||
"func": "schedtask",
|
||||
@@ -438,6 +406,6 @@ class TestAutoTaskCeleryTasks(TacticalTestCase):
|
||||
"name": task_name,
|
||||
},
|
||||
},
|
||||
timeout=10,
|
||||
timeout=5,
|
||||
)
|
||||
self.assertEqual(ret.status, "SUCCESS")
|
||||
|
||||
@@ -1,28 +1,22 @@
|
||||
import asyncio
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from checks.models import Check
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.utils import get_bit_days, get_default_timezone, notify_error
|
||||
|
||||
from .models import AutomatedTask
|
||||
from .serializers import AutoTaskSerializer, TaskSerializer
|
||||
from .tasks import (
|
||||
create_win_task_schedule,
|
||||
delete_win_task_schedule,
|
||||
enable_or_disable_win_task,
|
||||
)
|
||||
|
||||
|
||||
class AddAutoTask(APIView):
|
||||
def post(self, request):
|
||||
from automation.models import Policy
|
||||
from automation.tasks import generate_agent_tasks_from_policies_task
|
||||
from automation.tasks import generate_agent_autotasks_task
|
||||
|
||||
from autotasks.tasks import create_win_task_schedule
|
||||
|
||||
data = request.data
|
||||
script = get_object_or_404(Script, pk=data["autotask"]["script"])
|
||||
@@ -34,9 +28,6 @@ class AddAutoTask(APIView):
|
||||
parent = {"policy": policy}
|
||||
else:
|
||||
agent = get_object_or_404(Agent, pk=data["agent"])
|
||||
if not agent.has_gotasks:
|
||||
return notify_error("Requires agent version 1.1.1 or greater")
|
||||
|
||||
parent = {"agent": agent}
|
||||
|
||||
check = None
|
||||
@@ -50,7 +41,7 @@ class AddAutoTask(APIView):
|
||||
del data["autotask"]["run_time_days"]
|
||||
serializer = TaskSerializer(data=data["autotask"], partial=True, context=parent)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save(
|
||||
task = serializer.save(
|
||||
**parent,
|
||||
script=script,
|
||||
win_task_name=AutomatedTask.generate_task_name(),
|
||||
@@ -58,11 +49,11 @@ class AddAutoTask(APIView):
|
||||
run_time_bit_weekdays=bit_weekdays,
|
||||
)
|
||||
|
||||
if not "policy" in data:
|
||||
create_win_task_schedule.delay(pk=obj.pk)
|
||||
if task.agent:
|
||||
create_win_task_schedule.delay(pk=task.pk)
|
||||
|
||||
if "policy" in data:
|
||||
generate_agent_tasks_from_policies_task.delay(data["policy"])
|
||||
elif task.policy:
|
||||
generate_agent_autotasks_task.delay(policy=task.policy.pk)
|
||||
|
||||
return Response("Task will be created shortly!")
|
||||
|
||||
@@ -78,7 +69,7 @@ class AutoTask(APIView):
|
||||
return Response(AutoTaskSerializer(agent, context=ctx).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
from automation.tasks import update_policy_task_fields_task
|
||||
from automation.tasks import update_policy_autotasks_fields_task
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
@@ -87,39 +78,44 @@ class AutoTask(APIView):
|
||||
serializer.save()
|
||||
|
||||
if task.policy:
|
||||
update_policy_task_fields_task.delay(task.pk)
|
||||
update_policy_autotasks_fields_task.delay(task=task.pk)
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def patch(self, request, pk):
|
||||
from automation.tasks import update_policy_task_fields_task
|
||||
from automation.tasks import update_policy_autotasks_fields_task
|
||||
from autotasks.tasks import enable_or_disable_win_task
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if "enableordisable" in request.data:
|
||||
action = request.data["enableordisable"]
|
||||
|
||||
if not task.policy:
|
||||
enable_or_disable_win_task.delay(pk=task.pk, action=action)
|
||||
|
||||
else:
|
||||
update_policy_task_fields_task.delay(task.pk, update_agent=True)
|
||||
|
||||
task.enabled = action
|
||||
task.save(update_fields=["enabled"])
|
||||
action = "enabled" if action else "disabled"
|
||||
|
||||
if task.policy:
|
||||
update_policy_autotasks_fields_task.delay(
|
||||
task=task.pk, update_agent=True
|
||||
)
|
||||
elif task.agent:
|
||||
enable_or_disable_win_task.delay(pk=task.pk)
|
||||
|
||||
return Response(f"Task will be {action} shortly")
|
||||
|
||||
else:
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
def delete(self, request, pk):
|
||||
from automation.tasks import delete_policy_autotask_task
|
||||
from automation.tasks import delete_policy_autotasks_task
|
||||
from autotasks.tasks import delete_win_task_schedule
|
||||
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
|
||||
if not task.policy:
|
||||
if task.agent:
|
||||
delete_win_task_schedule.delay(pk=task.pk)
|
||||
|
||||
if task.policy:
|
||||
delete_policy_autotask_task.delay(task.pk)
|
||||
elif task.policy:
|
||||
delete_policy_autotasks_task.delay(task=task.pk)
|
||||
task.delete()
|
||||
|
||||
return Response(f"{task.name} will be deleted shortly")
|
||||
@@ -127,9 +123,8 @@ class AutoTask(APIView):
|
||||
|
||||
@api_view()
|
||||
def run_task(request, pk):
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
if not task.agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
from autotasks.tasks import run_win_task
|
||||
|
||||
asyncio.run(task.agent.nats_cmd({"func": "runtask", "taskpk": task.pk}, wait=False))
|
||||
task = get_object_or_404(AutomatedTask, pk=pk)
|
||||
run_win_task.delay(pk=pk)
|
||||
return Response(f"{task.name} will now be run on {task.agent.hostname}")
|
||||
|
||||
@@ -4,17 +4,17 @@ import os
|
||||
import string
|
||||
from statistics import mean
|
||||
from typing import Any
|
||||
from packaging import version as pyver
|
||||
|
||||
import pytz
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from core.models import CoreSettings
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
|
||||
from alerts.models import SEVERITY_CHOICES
|
||||
from core.models import CoreSettings
|
||||
from logs.models import BaseAuditModel
|
||||
from loguru import logger
|
||||
|
||||
from .utils import bytes2human
|
||||
|
||||
@@ -263,6 +263,42 @@ class Check(BaseAuditModel):
|
||||
"modified_time",
|
||||
]
|
||||
|
||||
@property
|
||||
def policy_fields_to_copy(self) -> list[str]:
|
||||
return [
|
||||
"warning_threshold",
|
||||
"error_threshold",
|
||||
"alert_severity",
|
||||
"name",
|
||||
"run_interval",
|
||||
"disk",
|
||||
"fails_b4_alert",
|
||||
"ip",
|
||||
"script",
|
||||
"script_args",
|
||||
"info_return_codes",
|
||||
"warning_return_codes",
|
||||
"timeout",
|
||||
"svc_name",
|
||||
"svc_display_name",
|
||||
"svc_policy_mode",
|
||||
"pass_if_start_pending",
|
||||
"pass_if_svc_not_exist",
|
||||
"restart_if_stopped",
|
||||
"log_name",
|
||||
"event_id",
|
||||
"event_id_is_wildcard",
|
||||
"event_type",
|
||||
"event_source",
|
||||
"event_message",
|
||||
"fail_when",
|
||||
"search_last_days",
|
||||
"number_of_events_b4_alert",
|
||||
"email_alert",
|
||||
"text_alert",
|
||||
"dashboard_alert",
|
||||
]
|
||||
|
||||
def should_create_alert(self, alert_template=None):
|
||||
|
||||
return (
|
||||
@@ -386,16 +422,20 @@ class Check(BaseAuditModel):
|
||||
|
||||
# ping checks
|
||||
elif self.check_type == "ping":
|
||||
success = ["Reply", "bytes", "time", "TTL"]
|
||||
output = data["output"]
|
||||
|
||||
if data["has_stdout"]:
|
||||
if all(x in output for x in success):
|
||||
self.status = "passing"
|
||||
else:
|
||||
if pyver.parse(self.agent.version) <= pyver.parse("1.5.2"):
|
||||
# DEPRECATED
|
||||
success = ["Reply", "bytes", "time", "TTL"]
|
||||
if data["has_stdout"]:
|
||||
if all(x in output for x in success):
|
||||
self.status = "passing"
|
||||
else:
|
||||
self.status = "failing"
|
||||
elif data["has_stderr"]:
|
||||
self.status = "failing"
|
||||
elif data["has_stderr"]:
|
||||
self.status = "failing"
|
||||
else:
|
||||
self.status = data["status"]
|
||||
|
||||
self.more_info = output
|
||||
self.save(update_fields=["more_info"])
|
||||
@@ -551,49 +591,23 @@ class Check(BaseAuditModel):
|
||||
|
||||
def create_policy_check(self, agent=None, policy=None):
|
||||
|
||||
if not agent and not policy or agent and policy:
|
||||
if (not agent and not policy) or (agent and policy):
|
||||
return
|
||||
|
||||
Check.objects.create(
|
||||
check = Check.objects.create(
|
||||
agent=agent,
|
||||
policy=policy,
|
||||
managed_by_policy=bool(agent),
|
||||
parent_check=(self.pk if agent else None),
|
||||
name=self.name,
|
||||
alert_severity=self.alert_severity,
|
||||
check_type=self.check_type,
|
||||
email_alert=self.email_alert,
|
||||
dashboard_alert=self.dashboard_alert,
|
||||
text_alert=self.text_alert,
|
||||
fails_b4_alert=self.fails_b4_alert,
|
||||
extra_details=self.extra_details,
|
||||
run_interval=self.run_interval,
|
||||
error_threshold=self.error_threshold,
|
||||
warning_threshold=self.warning_threshold,
|
||||
disk=self.disk,
|
||||
ip=self.ip,
|
||||
script=self.script,
|
||||
script_args=self.script_args,
|
||||
timeout=self.timeout,
|
||||
info_return_codes=self.info_return_codes,
|
||||
warning_return_codes=self.warning_return_codes,
|
||||
svc_name=self.svc_name,
|
||||
svc_display_name=self.svc_display_name,
|
||||
pass_if_start_pending=self.pass_if_start_pending,
|
||||
pass_if_svc_not_exist=self.pass_if_svc_not_exist,
|
||||
restart_if_stopped=self.restart_if_stopped,
|
||||
svc_policy_mode=self.svc_policy_mode,
|
||||
log_name=self.log_name,
|
||||
event_id=self.event_id,
|
||||
event_id_is_wildcard=self.event_id_is_wildcard,
|
||||
event_type=self.event_type,
|
||||
event_source=self.event_source,
|
||||
event_message=self.event_message,
|
||||
fail_when=self.fail_when,
|
||||
search_last_days=self.search_last_days,
|
||||
number_of_events_b4_alert=self.number_of_events_b4_alert,
|
||||
)
|
||||
|
||||
for field in self.policy_fields_to_copy:
|
||||
setattr(check, field, getattr(self, field))
|
||||
|
||||
check.save()
|
||||
|
||||
def is_duplicate(self, check):
|
||||
if self.check_type == "diskspace":
|
||||
return self.disk == check.disk
|
||||
@@ -633,12 +647,15 @@ class Check(BaseAuditModel):
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
try:
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
except:
|
||||
body = subject + f" - Disk {self.disk} does not exist"
|
||||
|
||||
elif self.check_type == "script":
|
||||
|
||||
@@ -667,16 +684,7 @@ class Check(BaseAuditModel):
|
||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||
|
||||
elif self.check_type == "winsvc":
|
||||
|
||||
try:
|
||||
status = list(
|
||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
||||
)[0]["status"]
|
||||
# catch services that don't exist if policy check
|
||||
except:
|
||||
status = "Unknown"
|
||||
|
||||
body = subject + f" - Status: {status.upper()}"
|
||||
body = subject + f" - Status: {self.more_info}"
|
||||
|
||||
elif self.check_type == "eventlog":
|
||||
|
||||
@@ -719,11 +727,15 @@ class Check(BaseAuditModel):
|
||||
if self.error_threshold:
|
||||
text += f" Error Threshold: {self.error_threshold}%"
|
||||
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
try:
|
||||
percent_used = [
|
||||
d["percent"] for d in self.agent.disks if d["device"] == self.disk
|
||||
][0]
|
||||
percent_free = 100 - percent_used
|
||||
body = subject + f" - Free: {percent_free}%, {text}"
|
||||
except:
|
||||
body = subject + f" - Disk {self.disk} does not exist"
|
||||
|
||||
elif self.check_type == "script":
|
||||
body = subject + f" - Return code: {self.retcode}"
|
||||
elif self.check_type == "ping":
|
||||
@@ -741,10 +753,7 @@ class Check(BaseAuditModel):
|
||||
elif self.check_type == "memory":
|
||||
body = subject + f" - Average memory usage: {avg}%, {text}"
|
||||
elif self.check_type == "winsvc":
|
||||
status = list(
|
||||
filter(lambda x: x["name"] == self.svc_name, self.agent.services)
|
||||
)[0]["status"]
|
||||
body = subject + f" - Status: {status.upper()}"
|
||||
body = subject + f" - Status: {self.more_info}"
|
||||
elif self.check_type == "eventlog":
|
||||
body = subject
|
||||
|
||||
|
||||
@@ -14,6 +14,22 @@ class TestCheckViews(TacticalTestCase):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_delete_agent_check(self):
|
||||
# setup data
|
||||
agent = baker.make_recipe("agents.agent")
|
||||
check = baker.make_recipe("checks.diskspace_check", agent=agent)
|
||||
|
||||
resp = self.client.delete("/checks/500/check/", format="json")
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
url = f"/checks/{check.pk}/check/"
|
||||
|
||||
resp = self.client.delete(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertFalse(agent.agentchecks.all())
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_disk_check(self):
|
||||
# setup data
|
||||
disk_check = baker.make_recipe("checks.diskspace_check")
|
||||
@@ -310,14 +326,8 @@ class TestCheckViews(TacticalTestCase):
|
||||
@patch("agents.models.Agent.nats_cmd")
|
||||
def test_run_checks(self, nats_cmd):
|
||||
agent = baker.make_recipe("agents.agent", version="1.4.1")
|
||||
agent_old = baker.make_recipe("agents.agent", version="1.0.2")
|
||||
agent_b4_141 = baker.make_recipe("agents.agent", version="1.4.0")
|
||||
|
||||
url = f"/checks/runchecks/{agent_old.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.json(), "Requires agent version 1.1.0 or greater")
|
||||
|
||||
url = f"/checks/runchecks/{agent_b4_141.pk}/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import asyncio
|
||||
from datetime import datetime as dt
|
||||
|
||||
from agents.models import Agent
|
||||
from automation.models import Policy
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
@@ -8,14 +10,6 @@ from packaging import version as pyver
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from automation.models import Policy
|
||||
from automation.tasks import (
|
||||
delete_policy_check_task,
|
||||
generate_agent_checks_from_policies_task,
|
||||
update_policy_check_fields_task,
|
||||
)
|
||||
from scripts.models import Script
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
@@ -25,6 +19,8 @@ from .serializers import CheckHistorySerializer, CheckSerializer
|
||||
|
||||
class AddCheck(APIView):
|
||||
def post(self, request):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
policy = None
|
||||
agent = None
|
||||
|
||||
@@ -53,28 +49,30 @@ class AddCheck(APIView):
|
||||
data=request.data["check"], partial=True, context=parent
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save(**parent, script=script)
|
||||
new_check = serializer.save(**parent, script=script)
|
||||
|
||||
# Generate policy Checks
|
||||
if policy:
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy.pk)
|
||||
generate_agent_checks_task.delay(policy=policy.pk)
|
||||
elif agent:
|
||||
checks = agent.agentchecks.filter( # type: ignore
|
||||
check_type=obj.check_type, managed_by_policy=True
|
||||
check_type=new_check.check_type, managed_by_policy=True
|
||||
)
|
||||
|
||||
# Should only be one
|
||||
duplicate_check = [check for check in checks if check.is_duplicate(obj)]
|
||||
duplicate_check = [
|
||||
check for check in checks if check.is_duplicate(new_check)
|
||||
]
|
||||
|
||||
if duplicate_check:
|
||||
policy = Check.objects.get(pk=duplicate_check[0].parent_check).policy
|
||||
if policy.enforced:
|
||||
obj.overriden_by_policy = True
|
||||
obj.save()
|
||||
new_check.overriden_by_policy = True
|
||||
new_check.save()
|
||||
else:
|
||||
duplicate_check[0].delete()
|
||||
|
||||
return Response(f"{obj.readable_desc} was added!")
|
||||
return Response(f"{new_check.readable_desc} was added!")
|
||||
|
||||
|
||||
class GetUpdateDeleteCheck(APIView):
|
||||
@@ -83,10 +81,17 @@ class GetUpdateDeleteCheck(APIView):
|
||||
return Response(CheckSerializer(check).data)
|
||||
|
||||
def patch(self, request, pk):
|
||||
from automation.tasks import (
|
||||
update_policy_check_fields_task,
|
||||
)
|
||||
|
||||
check = get_object_or_404(Check, pk=pk)
|
||||
|
||||
# remove fields that should not be changed when editing a check from the frontend
|
||||
if "check_alert" not in request.data.keys():
|
||||
if (
|
||||
"check_alert" not in request.data.keys()
|
||||
and "check_reset" not in request.data.keys()
|
||||
):
|
||||
[request.data.pop(i) for i in check.non_editable_fields]
|
||||
|
||||
# set event id to 0 if wildcard because it needs to be an integer field for db
|
||||
@@ -102,31 +107,31 @@ class GetUpdateDeleteCheck(APIView):
|
||||
|
||||
serializer = CheckSerializer(instance=check, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
obj = serializer.save()
|
||||
check = serializer.save()
|
||||
|
||||
# resolve any alerts that are open
|
||||
if "check_reset" in request.data.keys():
|
||||
if check.alert.filter(resolved=False).exists():
|
||||
check.alert.get(resolved=False).resolve()
|
||||
|
||||
# Update policy check fields
|
||||
if check.policy:
|
||||
update_policy_check_fields_task(checkpk=pk)
|
||||
update_policy_check_fields_task.delay(check=check.pk)
|
||||
|
||||
return Response(f"{obj.readable_desc} was edited!")
|
||||
return Response(f"{check.readable_desc} was edited!")
|
||||
|
||||
def delete(self, request, pk):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
check = get_object_or_404(Check, pk=pk)
|
||||
|
||||
check_pk = check.pk
|
||||
policy_pk = None
|
||||
if check.policy:
|
||||
policy_pk = check.policy.pk
|
||||
|
||||
check.delete()
|
||||
|
||||
# Policy check deleted
|
||||
if check.policy:
|
||||
delete_policy_check_task.delay(checkpk=check_pk)
|
||||
Check.objects.filter(parent_check=check.pk).delete()
|
||||
|
||||
# Re-evaluate agent checks is policy was enforced
|
||||
if check.policy.enforced:
|
||||
generate_agent_checks_from_policies_task.delay(policypk=policy_pk)
|
||||
generate_agent_checks_task.delay(policy=check.policy)
|
||||
|
||||
# Agent check deleted
|
||||
elif check.agent:
|
||||
@@ -161,8 +166,6 @@ class CheckHistory(APIView):
|
||||
@api_view()
|
||||
def run_checks(request, pk):
|
||||
agent = get_object_or_404(Agent, pk=pk)
|
||||
if not agent.has_nats:
|
||||
return notify_error("Requires agent version 1.1.0 or greater")
|
||||
|
||||
if pyver.parse(agent.version) >= pyver.parse("1.4.1"):
|
||||
r = asyncio.run(agent.nats_cmd({"func": "runchecks"}, timeout=15))
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import Client, Deployment, Site
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
|
||||
admin.site.register(Client)
|
||||
admin.site.register(Site)
|
||||
admin.site.register(Deployment)
|
||||
admin.site.register(ClientCustomField)
|
||||
admin.site.register(SiteCustomField)
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-17 14:45
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0014_customfield'),
|
||||
('clients', '0009_auto_20210212_1408'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='SiteCustomField',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('value', models.TextField(blank=True, null=True)),
|
||||
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_fields', to='core.customfield')),
|
||||
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='clients.site')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ClientCustomField',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('value', models.TextField(blank=True, null=True)),
|
||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='custom_fields', to='clients.client')),
|
||||
('field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='client_fields', to='core.customfield')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-21 15:11
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0010_clientcustomfield_sitecustomfield'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='site',
|
||||
unique_together={('client', 'name')},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-26 06:52
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0011_auto_20210321_1511'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='deployment',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 02:51
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0012_deployment_created'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='clientcustomfield',
|
||||
name='multiple_value',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitecustomfield',
|
||||
name='multiple_value',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 03:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0013_auto_20210329_0251'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='clientcustomfield',
|
||||
name='checkbox_value',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitecustomfield',
|
||||
name='checkbox_value',
|
||||
field=models.BooleanField(blank=True, default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 17:09
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0014_auto_20210329_0301'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='clientcustomfield',
|
||||
old_name='checkbox_value',
|
||||
new_name='bool_value',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='clientcustomfield',
|
||||
old_name='value',
|
||||
new_name='string_value',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='sitecustomfield',
|
||||
name='value',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 18:27
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0015_auto_20210329_1709'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='sitecustomfield',
|
||||
old_name='checkbox_value',
|
||||
new_name='bool_value',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitecustomfield',
|
||||
name='string_value',
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-17 01:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('clients', '0016_auto_20210329_1827'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='client',
|
||||
name='block_policy_inheritance',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='block_policy_inheritance',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,5 +1,6 @@
|
||||
import uuid
|
||||
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
|
||||
from agents.models import Agent
|
||||
@@ -8,6 +9,7 @@ from logs.models import BaseAuditModel
|
||||
|
||||
class Client(BaseAuditModel):
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
block_policy_inheritance = models.BooleanField(default=False)
|
||||
workstation_policy = models.ForeignKey(
|
||||
"automation.Policy",
|
||||
related_name="workstation_clients",
|
||||
@@ -33,30 +35,29 @@ class Client(BaseAuditModel):
|
||||
|
||||
def save(self, *args, **kw):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_by_location_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_client = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(BaseAuditModel, self).save(*args, **kw)
|
||||
|
||||
# check if server polcies have changed and initiate task to reapply policies if so
|
||||
if old_client and old_client.server_policy != self.server_policy:
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": self.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_client:
|
||||
if (
|
||||
(old_client.server_policy != self.server_policy)
|
||||
or (old_client.workstation_policy != self.workstation_policy)
|
||||
or (
|
||||
old_client.block_policy_inheritance != self.block_policy_inheritance
|
||||
)
|
||||
):
|
||||
|
||||
# check if workstation polcies have changed and initiate task to reapply policies if so
|
||||
if old_client and old_client.workstation_policy != self.workstation_policy:
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site__client_id": self.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_task.delay(
|
||||
client=self.pk,
|
||||
create_tasks=True,
|
||||
)
|
||||
|
||||
if old_client and old_client.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
if old_client.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
@@ -64,6 +65,10 @@ class Client(BaseAuditModel):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def agent_count(self) -> int:
|
||||
return Agent.objects.filter(site__client=self).count()
|
||||
|
||||
@property
|
||||
def has_maintenanace_mode_agents(self):
|
||||
return (
|
||||
@@ -85,16 +90,24 @@ class Client(BaseAuditModel):
|
||||
.prefetch_related("agentchecks")
|
||||
)
|
||||
|
||||
failing = 0
|
||||
data = {"error": False, "warning": False}
|
||||
|
||||
for agent in agents:
|
||||
if agent.checks["has_failing_checks"]:
|
||||
failing += 1
|
||||
|
||||
if agent.checks["warning"]:
|
||||
data["warning"] = True
|
||||
|
||||
if agent.checks["failing"]:
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
failing += 1
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
return failing > 0
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def serialize(client):
|
||||
@@ -107,6 +120,7 @@ class Client(BaseAuditModel):
|
||||
class Site(BaseAuditModel):
|
||||
client = models.ForeignKey(Client, related_name="sites", on_delete=models.CASCADE)
|
||||
name = models.CharField(max_length=255)
|
||||
block_policy_inheritance = models.BooleanField(default=False)
|
||||
workstation_policy = models.ForeignKey(
|
||||
"automation.Policy",
|
||||
related_name="workstation_sites",
|
||||
@@ -132,37 +146,36 @@ class Site(BaseAuditModel):
|
||||
|
||||
def save(self, *args, **kw):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_agent_checks_by_location_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
# get old client if exists
|
||||
old_site = type(self).objects.get(pk=self.pk) if self.pk else None
|
||||
super(Site, self).save(*args, **kw)
|
||||
|
||||
# check if server polcies have changed and initiate task to reapply policies if so
|
||||
if old_site and old_site.server_policy != self.server_policy:
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": self.pk},
|
||||
mon_type="server",
|
||||
create_tasks=True,
|
||||
)
|
||||
# check if polcies have changed and initiate task to reapply policies if so
|
||||
if old_site:
|
||||
if (
|
||||
(old_site.server_policy != self.server_policy)
|
||||
or (old_site.workstation_policy != self.workstation_policy)
|
||||
or (old_site.block_policy_inheritance != self.block_policy_inheritance)
|
||||
):
|
||||
|
||||
# check if workstation polcies have changed and initiate task to reapply policies if so
|
||||
if old_site and old_site.workstation_policy != self.workstation_policy:
|
||||
generate_agent_checks_by_location_task.delay(
|
||||
location={"site_id": self.pk},
|
||||
mon_type="workstation",
|
||||
create_tasks=True,
|
||||
)
|
||||
generate_agent_checks_task.delay(site=self.pk, create_tasks=True)
|
||||
|
||||
if old_site and old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
if old_site.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
unique_together = (("client", "name"),)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def agent_count(self) -> int:
|
||||
return Agent.objects.filter(site=self).count()
|
||||
|
||||
@property
|
||||
def has_maintenanace_mode_agents(self):
|
||||
return Agent.objects.filter(site=self, maintenance_mode=True).count() > 0
|
||||
@@ -182,16 +195,24 @@ class Site(BaseAuditModel):
|
||||
.prefetch_related("agentchecks")
|
||||
)
|
||||
|
||||
failing = 0
|
||||
data = {"error": False, "warning": False}
|
||||
|
||||
for agent in agents:
|
||||
|
||||
if agent.checks["has_failing_checks"]:
|
||||
failing += 1
|
||||
if agent.checks["warning"]:
|
||||
data["warning"] = True
|
||||
|
||||
if agent.checks["failing"]:
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
if agent.overdue_email_alert or agent.overdue_text_alert:
|
||||
if agent.status == "overdue":
|
||||
failing += 1
|
||||
data["error"] = True
|
||||
break
|
||||
|
||||
return failing > 0
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def serialize(site):
|
||||
@@ -225,6 +246,7 @@ class Deployment(models.Model):
|
||||
)
|
||||
arch = models.CharField(max_length=255, choices=ARCH_CHOICES, default="64")
|
||||
expiry = models.DateTimeField(null=True, blank=True)
|
||||
created = models.DateTimeField(auto_now_add=True, null=True, blank=True)
|
||||
auth_token = models.ForeignKey(
|
||||
"knox.AuthToken", related_name="deploytokens", on_delete=models.CASCADE
|
||||
)
|
||||
@@ -233,3 +255,73 @@ class Deployment(models.Model):
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.client} - {self.site} - {self.mon_type}"
|
||||
|
||||
|
||||
class ClientCustomField(models.Model):
|
||||
client = models.ForeignKey(
|
||||
Client,
|
||||
related_name="custom_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
related_name="client_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
string_value = models.TextField(null=True, blank=True)
|
||||
bool_value = models.BooleanField(blank=True, default=False)
|
||||
multiple_value = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.field.name
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
if self.field.type == "multiple":
|
||||
return self.multiple_value
|
||||
elif self.field.type == "checkbox":
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
|
||||
class SiteCustomField(models.Model):
|
||||
site = models.ForeignKey(
|
||||
Site,
|
||||
related_name="custom_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
field = models.ForeignKey(
|
||||
"core.CustomField",
|
||||
related_name="site_fields",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
|
||||
string_value = models.TextField(null=True, blank=True)
|
||||
bool_value = models.BooleanField(blank=True, default=False)
|
||||
multiple_value = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.field.name
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
if self.field.type == "multiple":
|
||||
return self.multiple_value
|
||||
elif self.field.type == "checkbox":
|
||||
return self.bool_value
|
||||
else:
|
||||
return self.string_value
|
||||
|
||||
@@ -1,42 +1,93 @@
|
||||
from rest_framework.serializers import ModelSerializer, ReadOnlyField, ValidationError
|
||||
|
||||
from .models import Client, Deployment, Site
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
|
||||
|
||||
class SiteCustomFieldSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = SiteCustomField
|
||||
fields = (
|
||||
"id",
|
||||
"field",
|
||||
"site",
|
||||
"value",
|
||||
"string_value",
|
||||
"bool_value",
|
||||
"multiple_value",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"string_value": {"write_only": True},
|
||||
"bool_value": {"write_only": True},
|
||||
"multiple_value": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class SiteSerializer(ModelSerializer):
|
||||
client_name = ReadOnlyField(source="client.name")
|
||||
custom_fields = SiteCustomFieldSerializer(many=True, read_only=True)
|
||||
agent_count = ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = "__all__"
|
||||
fields = (
|
||||
"id",
|
||||
"name",
|
||||
"server_policy",
|
||||
"workstation_policy",
|
||||
"alert_template",
|
||||
"client_name",
|
||||
"client",
|
||||
"custom_fields",
|
||||
"agent_count",
|
||||
"block_policy_inheritance",
|
||||
)
|
||||
|
||||
def validate(self, val):
|
||||
if "name" in val.keys() and "|" in val["name"]:
|
||||
raise ValidationError("Site name cannot contain the | character")
|
||||
|
||||
if self.context:
|
||||
client = Client.objects.get(pk=self.context["clientpk"])
|
||||
if Site.objects.filter(client=client, name=val["name"]).exists():
|
||||
raise ValidationError(f"Site {val['name']} already exists")
|
||||
|
||||
return val
|
||||
|
||||
|
||||
class ClientCustomFieldSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = ClientCustomField
|
||||
fields = (
|
||||
"id",
|
||||
"field",
|
||||
"client",
|
||||
"value",
|
||||
"string_value",
|
||||
"bool_value",
|
||||
"multiple_value",
|
||||
)
|
||||
extra_kwargs = {
|
||||
"string_value": {"write_only": True},
|
||||
"bool_value": {"write_only": True},
|
||||
"multiple_value": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class ClientSerializer(ModelSerializer):
|
||||
sites = SiteSerializer(many=True, read_only=True)
|
||||
custom_fields = ClientCustomFieldSerializer(many=True, read_only=True)
|
||||
agent_count = ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = "__all__"
|
||||
fields = (
|
||||
"id",
|
||||
"name",
|
||||
"server_policy",
|
||||
"workstation_policy",
|
||||
"alert_template",
|
||||
"block_policy_inheritance",
|
||||
"sites",
|
||||
"custom_fields",
|
||||
"agent_count",
|
||||
)
|
||||
|
||||
def validate(self, val):
|
||||
|
||||
if "site" in self.context:
|
||||
if "|" in self.context["site"]:
|
||||
raise ValidationError("Site name cannot contain the | character")
|
||||
if len(self.context["site"]) > 255:
|
||||
raise ValidationError("Site name too long")
|
||||
|
||||
if "name" in val.keys() and "|" in val["name"]:
|
||||
raise ValidationError("Client name cannot contain the | character")
|
||||
|
||||
@@ -50,7 +101,6 @@ class SiteTreeSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Site
|
||||
fields = "__all__"
|
||||
ordering = ("failing_checks",)
|
||||
|
||||
|
||||
class ClientTreeSerializer(ModelSerializer):
|
||||
@@ -61,7 +111,6 @@ class ClientTreeSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Client
|
||||
fields = "__all__"
|
||||
ordering = ("failing_checks",)
|
||||
|
||||
|
||||
class DeploymentSerializer(ModelSerializer):
|
||||
@@ -83,4 +132,5 @@ class DeploymentSerializer(ModelSerializer):
|
||||
"arch",
|
||||
"expiry",
|
||||
"install_flags",
|
||||
"created",
|
||||
]
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .models import Client, Deployment, Site
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
from .serializers import (
|
||||
ClientSerializer,
|
||||
ClientTreeSerializer,
|
||||
@@ -28,18 +29,29 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = ClientSerializer(clients, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_client(self):
|
||||
url = "/clients/clients/"
|
||||
payload = {"client": "Company 1", "site": "Site 1"}
|
||||
|
||||
# test successfull add client
|
||||
payload = {
|
||||
"client": {"name": "Client1"},
|
||||
"site": {"name": "Site1"},
|
||||
"custom_fields": [],
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
payload["client"] = "Company1|askd"
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
# test add client with | in name
|
||||
payload = {
|
||||
"client": {"name": "Client2|d"},
|
||||
"site": {"name": "Site1"},
|
||||
"custom_fields": [],
|
||||
}
|
||||
serializer = ClientSerializer(data=payload["client"])
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "Client name cannot contain the | character"
|
||||
):
|
||||
@@ -48,19 +60,22 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
payload = {"client": "Company 156", "site": "Site2|a34"}
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "Site name cannot contain the | character"
|
||||
):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
# test add client with | in Site name
|
||||
payload = {
|
||||
"client": {"name": "Client2"},
|
||||
"site": {"name": "Site1|fds"},
|
||||
"custom_fields": [],
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test unique
|
||||
payload = {"client": "Company 1", "site": "Site 1"}
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
payload = {
|
||||
"client": {"name": "Client1"},
|
||||
"site": {"name": "Site1"},
|
||||
"custom_fields": [],
|
||||
}
|
||||
serializer = ClientSerializer(data=payload["client"])
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "client with this name already exists."
|
||||
):
|
||||
@@ -69,66 +84,124 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test long site name
|
||||
payload = {"client": "Company 2394", "site": "Site123" * 100}
|
||||
serializer = ClientSerializer(data={"name": payload["client"]}, context=payload)
|
||||
with self.assertRaisesMessage(ValidationError, "Site name too long"):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test initial setup
|
||||
payload = {
|
||||
"client": {"client": "Company 4", "site": "HQ"},
|
||||
"initialsetup": True,
|
||||
"client": {"name": "Setup Client"},
|
||||
"site": {"name": "Setup Site"},
|
||||
"timezone": "America/Los_Angeles",
|
||||
"initialsetup": True,
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
# test add with custom fields
|
||||
field = baker.make("core.CustomField", model="client", type="text")
|
||||
payload = {
|
||||
"client": {"name": "Custom Field Client"},
|
||||
"site": {"name": "Setup Site"},
|
||||
"custom_fields": [{"field": field.id, "string_value": "new Value"}], # type: ignore
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
client = Client.objects.get(name="Custom Field Client")
|
||||
self.assertTrue(
|
||||
ClientCustomField.objects.filter(client=client, field=field).exists()
|
||||
)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_client(self):
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
|
||||
url = f"/clients/{client.id}/client/" # type: ignore
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = ClientSerializer(client)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_edit_client(self):
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
client = baker.make("clients.Client", name="OldClientName")
|
||||
|
||||
# test invalid id
|
||||
r = self.client.put("/clients/500/client/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
data = {"id": client.id, "name": "New Name"}
|
||||
|
||||
url = f"/clients/{client.id}/client/"
|
||||
# test successfull edit client
|
||||
data = {"client": {"name": "NewClientName"}, "custom_fields": []}
|
||||
url = f"/clients/{client.id}/client/" # type: ignore
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(Client.objects.filter(name="New Name").exists())
|
||||
self.assertTrue(Client.objects.filter(name="NewClientName").exists())
|
||||
self.assertFalse(Client.objects.filter(name="OldClientName").exists())
|
||||
|
||||
# test edit client with | in name
|
||||
data = {"client": {"name": "NewClie|ntName"}, "custom_fields": []}
|
||||
url = f"/clients/{client.id}/client/" # type: ignore
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test add with custom fields new value
|
||||
field = baker.make("core.CustomField", model="client", type="checkbox")
|
||||
payload = {
|
||||
"client": {
|
||||
"id": client.id, # type: ignore
|
||||
"name": "Custom Field Client",
|
||||
},
|
||||
"custom_fields": [{"field": field.id, "bool_value": True}], # type: ignore
|
||||
}
|
||||
r = self.client.put(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
client = Client.objects.get(name="Custom Field Client")
|
||||
self.assertTrue(
|
||||
ClientCustomField.objects.filter(client=client, field=field).exists()
|
||||
)
|
||||
|
||||
# edit custom field value
|
||||
payload = {
|
||||
"client": {
|
||||
"id": client.id, # type: ignore
|
||||
"name": "Custom Field Client",
|
||||
},
|
||||
"custom_fields": [{"field": field.id, "bool_value": False}], # type: ignore
|
||||
}
|
||||
r = self.client.put(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.assertFalse(
|
||||
ClientCustomField.objects.get(client=client, field=field).value
|
||||
)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_client(self):
|
||||
from agents.models import Agent
|
||||
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
site = baker.make("clients.Site", client=client)
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
client_to_delete = baker.make("clients.Client")
|
||||
client_to_move = baker.make("clients.Client")
|
||||
site_to_move = baker.make("clients.Site", client=client_to_move)
|
||||
agent = baker.make_recipe("agents.agent", site=site_to_move)
|
||||
|
||||
# test invalid id
|
||||
r = self.client.delete("/clients/500/client/", format="json")
|
||||
r = self.client.delete("/clients/334/953/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/clients/{client.id}/client/"
|
||||
|
||||
# test deleting with agents under client
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
url = f"/clients/{client_to_delete.id}/{site_to_move.id}/" # type: ignore
|
||||
|
||||
# test successful deletion
|
||||
agent.delete()
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertFalse(Client.objects.filter(pk=client.id).exists())
|
||||
self.assertFalse(Site.objects.filter(pk=site.id).exists())
|
||||
agent_moved = Agent.objects.get(pk=agent.pk)
|
||||
self.assertEqual(agent_moved.site.id, site_to_move.id) # type: ignore
|
||||
self.assertFalse(Client.objects.filter(pk=client_to_delete.id).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_sites(self):
|
||||
# setup data
|
||||
@@ -139,29 +212,31 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = SiteSerializer(sites, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_site(self):
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
client = baker.make("clients.Client")
|
||||
site = baker.make("clients.Site", client=client)
|
||||
|
||||
url = "/clients/sites/"
|
||||
|
||||
# test success add
|
||||
payload = {"client": site.client.id, "name": "LA Office"}
|
||||
payload = {
|
||||
"site": {"client": client.id, "name": "LA Office"}, # type: ignore
|
||||
"custom_fields": [],
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(
|
||||
Site.objects.filter(
|
||||
name="LA Office", client__name=site.client.name
|
||||
).exists()
|
||||
)
|
||||
|
||||
# test with | symbol
|
||||
payload = {"client": site.client.id, "name": "LA Off|ice |*&@#$"}
|
||||
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
|
||||
payload = {
|
||||
"site": {"client": client.id, "name": "LA Office |*&@#$"}, # type: ignore
|
||||
"custom_fields": [],
|
||||
}
|
||||
serializer = SiteSerializer(data=payload["site"])
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "Site name cannot contain the | character"
|
||||
):
|
||||
@@ -171,55 +246,135 @@ class TestClientViews(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test site already exists
|
||||
payload = {"client": site.client.id, "name": "LA Office"}
|
||||
serializer = SiteSerializer(data=payload, context={"clientpk": site.client.id})
|
||||
with self.assertRaisesMessage(ValidationError, "Site LA Office already exists"):
|
||||
payload = {
|
||||
"site": {"client": site.client.id, "name": "LA Office"}, # type: ignore
|
||||
"custom_fields": [],
|
||||
}
|
||||
serializer = SiteSerializer(data=payload["site"])
|
||||
with self.assertRaisesMessage(
|
||||
ValidationError, "The fields client, name must make a unique set."
|
||||
):
|
||||
self.assertFalse(serializer.is_valid(raise_exception=True))
|
||||
|
||||
# test add with custom fields
|
||||
field = baker.make(
|
||||
"core.CustomField",
|
||||
model="site",
|
||||
type="single",
|
||||
options=["one", "two", "three"],
|
||||
)
|
||||
payload = {
|
||||
"site": {"client": client.id, "name": "Custom Field Site"}, # type: ignore
|
||||
"custom_fields": [{"field": field.id, "string_value": "one"}], # type: ignore
|
||||
}
|
||||
r = self.client.post(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
site = Site.objects.get(name="Custom Field Site")
|
||||
self.assertTrue(SiteCustomField.objects.filter(site=site, field=field).exists())
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_edit_site(self):
|
||||
def test_get_site(self):
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
|
||||
url = f"/clients/sites/{site.id}/" # type: ignore
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = SiteSerializer(site)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_edit_site(self):
|
||||
# setup data
|
||||
client = baker.make("clients.Client")
|
||||
site = baker.make("clients.Site", client=client)
|
||||
|
||||
# test invalid id
|
||||
r = self.client.put("/clients/500/site/", format="json")
|
||||
r = self.client.put("/clients/sites/688/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
data = {"id": site.id, "name": "New Name", "client": site.client.id}
|
||||
data = {
|
||||
"site": {"client": client.id, "name": "New Site Name"}, # type: ignore
|
||||
"custom_fields": [],
|
||||
}
|
||||
|
||||
url = f"/clients/{site.id}/site/"
|
||||
url = f"/clients/sites/{site.id}/" # type: ignore
|
||||
r = self.client.put(url, data, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(Site.objects.filter(name="New Name").exists())
|
||||
self.assertTrue(
|
||||
Site.objects.filter(client=client, name="New Site Name").exists()
|
||||
)
|
||||
|
||||
# test add with custom fields new value
|
||||
field = baker.make(
|
||||
"core.CustomField",
|
||||
model="site",
|
||||
type="multiple",
|
||||
options=["one", "two", "three"],
|
||||
)
|
||||
payload = {
|
||||
"site": {
|
||||
"id": site.id, # type: ignore
|
||||
"client": site.client.id, # type: ignore
|
||||
"name": "Custom Field Site",
|
||||
},
|
||||
"custom_fields": [{"field": field.id, "multiple_value": ["two", "three"]}], # type: ignore
|
||||
}
|
||||
r = self.client.put(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
site = Site.objects.get(name="Custom Field Site")
|
||||
self.assertTrue(SiteCustomField.objects.filter(site=site, field=field).exists())
|
||||
|
||||
# edit custom field value
|
||||
payload = {
|
||||
"site": {
|
||||
"id": site.id, # type: ignore
|
||||
"client": client.id, # type: ignore
|
||||
"name": "Custom Field Site",
|
||||
},
|
||||
"custom_fields": [{"field": field.id, "multiple_value": ["one"]}], # type: ignore
|
||||
}
|
||||
r = self.client.put(url, payload, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.assertTrue(
|
||||
SiteCustomField.objects.get(site=site, field=field).value,
|
||||
["one"],
|
||||
)
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_site(self):
|
||||
from agents.models import Agent
|
||||
|
||||
# setup data
|
||||
site = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site)
|
||||
client = baker.make("clients.Client")
|
||||
site_to_delete = baker.make("clients.Site", client=client)
|
||||
site_to_move = baker.make("clients.Site")
|
||||
agent = baker.make_recipe("agents.agent", site=site_to_delete)
|
||||
|
||||
# test invalid id
|
||||
r = self.client.delete("/clients/500/site/", format="json")
|
||||
r = self.client.delete("/clients/500/445/", format="json")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/clients/{site.id}/site/"
|
||||
url = f"/clients/sites/{site_to_delete.id}/{site_to_move.id}/" # type: ignore
|
||||
|
||||
# test deleting with last site under client
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
# test deletion when agents exist under site
|
||||
baker.make("clients.Site", client=site.client)
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.json(), "A client must have at least 1 site.")
|
||||
|
||||
# test successful deletion
|
||||
agent.delete()
|
||||
site_to_move.client = client # type: ignore
|
||||
site_to_move.save(update_fields=["client"]) # type: ignore
|
||||
r = self.client.delete(url, format="json")
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertFalse(Site.objects.filter(pk=site.id).exists())
|
||||
agent_moved = Agent.objects.get(pk=agent.pk)
|
||||
self.assertEqual(agent_moved.site.id, site_to_move.id) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@@ -233,7 +388,7 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.get(url, format="json")
|
||||
serializer = ClientTreeSerializer(clients, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -245,7 +400,7 @@ class TestClientViews(TacticalTestCase):
|
||||
r = self.client.get(url)
|
||||
serializer = DeploymentSerializer(deployments, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -255,8 +410,8 @@ class TestClientViews(TacticalTestCase):
|
||||
|
||||
url = "/clients/deployments/"
|
||||
payload = {
|
||||
"client": site.client.id,
|
||||
"site": site.id,
|
||||
"client": site.client.id, # type: ignore
|
||||
"site": site.id, # type: ignore
|
||||
"expires": "2037-11-23 18:53",
|
||||
"power": 1,
|
||||
"ping": 0,
|
||||
@@ -284,10 +439,10 @@ class TestClientViews(TacticalTestCase):
|
||||
|
||||
url = "/clients/deployments/"
|
||||
|
||||
url = f"/clients/{deployment.id}/deployment/"
|
||||
url = f"/clients/{deployment.id}/deployment/" # type: ignore
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists())
|
||||
self.assertFalse(Deployment.objects.filter(pk=deployment.id).exists()) # type: ignore
|
||||
|
||||
url = "/clients/32348/deployment/"
|
||||
r = self.client.delete(url)
|
||||
@@ -301,7 +456,7 @@ class TestClientViews(TacticalTestCase):
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
self.assertEqual(r.data, "invalid")
|
||||
self.assertEqual(r.data, "invalid") # type: ignore
|
||||
|
||||
uid = uuid.uuid4()
|
||||
url = f"/clients/{uid}/deploy/"
|
||||
|
||||
@@ -4,10 +4,12 @@ from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("clients/", views.GetAddClients.as_view()),
|
||||
path("<int:pk>/client/", views.GetUpdateDeleteClient.as_view()),
|
||||
path("<int:pk>/client/", views.GetUpdateClient.as_view()),
|
||||
path("<int:pk>/<int:sitepk>/", views.DeleteClient.as_view()),
|
||||
path("tree/", views.GetClientTree.as_view()),
|
||||
path("sites/", views.GetAddSites.as_view()),
|
||||
path("<int:pk>/site/", views.GetUpdateDeleteSite.as_view()),
|
||||
path("sites/<int:pk>/", views.GetUpdateSite.as_view()),
|
||||
path("sites/<int:pk>/<int:sitepk>/", views.DeleteSite.as_view()),
|
||||
path("deployments/", views.AgentDeployment.as_view()),
|
||||
path("<int:pk>/deployment/", views.AgentDeployment.as_view()),
|
||||
path("<str:uid>/deploy/", views.GenerateAgent.as_view()),
|
||||
|
||||
@@ -6,22 +6,27 @@ import pytz
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from core.models import CoreSettings
|
||||
from tacticalrmm.utils import generate_installer_exe, notify_error
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import Client, Deployment, Site
|
||||
from .models import Client, ClientCustomField, Deployment, Site, SiteCustomField
|
||||
from .serializers import (
|
||||
ClientCustomFieldSerializer,
|
||||
ClientSerializer,
|
||||
ClientTreeSerializer,
|
||||
DeploymentSerializer,
|
||||
SiteCustomFieldSerializer,
|
||||
SiteSerializer,
|
||||
)
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class GetAddClients(APIView):
|
||||
def get(self, request):
|
||||
@@ -29,45 +34,98 @@ class GetAddClients(APIView):
|
||||
return Response(ClientSerializer(clients, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
# create client
|
||||
client_serializer = ClientSerializer(data=request.data["client"])
|
||||
client_serializer.is_valid(raise_exception=True)
|
||||
client = client_serializer.save()
|
||||
|
||||
if "initialsetup" in request.data:
|
||||
client = {"name": request.data["client"]["client"].strip()}
|
||||
site = {"name": request.data["client"]["site"].strip()}
|
||||
serializer = ClientSerializer(data=client, context=request.data["client"])
|
||||
serializer.is_valid(raise_exception=True)
|
||||
# create site
|
||||
site_serializer = SiteSerializer(
|
||||
data={"client": client.id, "name": request.data["site"]["name"]}
|
||||
)
|
||||
|
||||
# make sure site serializer doesn't return errors and save
|
||||
if site_serializer.is_valid():
|
||||
site_serializer.save()
|
||||
else:
|
||||
# delete client since site serializer was invalid
|
||||
client.delete()
|
||||
site_serializer.is_valid(raise_exception=True)
|
||||
|
||||
if "initialsetup" in request.data.keys():
|
||||
core = CoreSettings.objects.first()
|
||||
core.default_time_zone = request.data["timezone"]
|
||||
core.save(update_fields=["default_time_zone"])
|
||||
else:
|
||||
client = {"name": request.data["client"].strip()}
|
||||
site = {"name": request.data["site"].strip()}
|
||||
serializer = ClientSerializer(data=client, context=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
obj = serializer.save()
|
||||
Site(client=obj, name=site["name"]).save()
|
||||
# save custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
return Response(f"{obj} was added!")
|
||||
custom_field = field
|
||||
custom_field["client"] = client.id
|
||||
|
||||
serializer = ClientCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response(f"{client} was added!")
|
||||
|
||||
|
||||
class GetUpdateDeleteClient(APIView):
|
||||
class GetUpdateClient(APIView):
|
||||
def get(self, request, pk):
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
return Response(ClientSerializer(client).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
|
||||
serializer = ClientSerializer(data=request.data, instance=client, partial=True)
|
||||
serializer = ClientSerializer(
|
||||
data=request.data["client"], instance=client, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("The Client was renamed")
|
||||
# update custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["client"] = pk
|
||||
|
||||
if ClientCustomField.objects.filter(field=field["field"], client=pk):
|
||||
value = ClientCustomField.objects.get(
|
||||
field=field["field"], client=pk
|
||||
)
|
||||
serializer = ClientCustomFieldSerializer(
|
||||
instance=value, data=custom_field
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
else:
|
||||
serializer = ClientCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("The Client was updated")
|
||||
|
||||
|
||||
class DeleteClient(APIView):
|
||||
def delete(self, request, pk, sitepk):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
def delete(self, request, pk):
|
||||
client = get_object_or_404(Client, pk=pk)
|
||||
agent_count = Agent.objects.filter(site__client=client).count()
|
||||
if agent_count > 0:
|
||||
agents = Agent.objects.filter(site__client=client)
|
||||
|
||||
if not sitepk:
|
||||
return notify_error(
|
||||
f"Cannot delete {client} while {agent_count} agents exist in it. Move the agents to another client first."
|
||||
"There needs to be a site specified to move existing agents to"
|
||||
)
|
||||
|
||||
site = get_object_or_404(Site, pk=sitepk)
|
||||
agents.update(site=site)
|
||||
|
||||
generate_agent_checks_task.delay(all=True, create_tasks=True)
|
||||
|
||||
client.delete()
|
||||
return Response(f"{client.name} was deleted!")
|
||||
|
||||
@@ -84,39 +142,89 @@ class GetAddSites(APIView):
|
||||
return Response(SiteSerializer(sites, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
name = request.data["name"].strip()
|
||||
serializer = SiteSerializer(data=request.data["site"])
|
||||
serializer.is_valid(raise_exception=True)
|
||||
site = serializer.save()
|
||||
|
||||
# save custom fields
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["site"] = site.id
|
||||
|
||||
serializer = SiteCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response(f"Site {site.name} was added!")
|
||||
|
||||
|
||||
class GetUpdateSite(APIView):
|
||||
def get(self, request, pk):
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
return Response(SiteSerializer(site).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
|
||||
if "client" in request.data["site"].keys() and (
|
||||
site.client.id != request.data["site"]["client"]
|
||||
and site.client.sites.count() == 1
|
||||
):
|
||||
return notify_error("A client must have at least one site")
|
||||
|
||||
serializer = SiteSerializer(
|
||||
data={"name": name, "client": request.data["client"]},
|
||||
context={"clientpk": request.data["client"]},
|
||||
instance=site, data=request.data["site"], partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
# update custom field
|
||||
if "custom_fields" in request.data.keys():
|
||||
|
||||
for field in request.data["custom_fields"]:
|
||||
|
||||
custom_field = field
|
||||
custom_field["site"] = pk
|
||||
|
||||
if SiteCustomField.objects.filter(field=field["field"], site=pk):
|
||||
value = SiteCustomField.objects.get(field=field["field"], site=pk)
|
||||
serializer = SiteCustomFieldSerializer(
|
||||
instance=value, data=custom_field, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
else:
|
||||
serializer = SiteCustomFieldSerializer(data=custom_field)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("Site was edited!")
|
||||
|
||||
|
||||
class GetUpdateDeleteSite(APIView):
|
||||
def put(self, request, pk):
|
||||
class DeleteSite(APIView):
|
||||
def delete(self, request, pk, sitepk):
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
serializer = SiteSerializer(instance=site, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
site = get_object_or_404(Site, pk=pk)
|
||||
if site.client.sites.count() == 1:
|
||||
return notify_error(f"A client must have at least 1 site.")
|
||||
return notify_error("A client must have at least 1 site.")
|
||||
|
||||
agent_count = Agent.objects.filter(site=site).count()
|
||||
agents = Agent.objects.filter(site=site)
|
||||
|
||||
if agent_count > 0:
|
||||
if not sitepk:
|
||||
return notify_error(
|
||||
f"Cannot delete {site.name} while {agent_count} agents exist in it. Move the agents to another site first."
|
||||
"There needs to be a site specified to move the agents to"
|
||||
)
|
||||
|
||||
agent_site = get_object_or_404(Site, pk=sitepk)
|
||||
|
||||
agents.update(site=agent_site)
|
||||
|
||||
generate_agent_checks_task.delay(all=True, create_tasks=True)
|
||||
|
||||
site.delete()
|
||||
return Response(f"{site.name} was deleted!")
|
||||
|
||||
@@ -173,6 +281,8 @@ class GenerateAgent(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def get(self, request, uid):
|
||||
from tacticalrmm.utils import generate_winagent_exe
|
||||
|
||||
try:
|
||||
_ = uuid.UUID(uid, version=4)
|
||||
except ValueError:
|
||||
@@ -180,28 +290,22 @@ class GenerateAgent(APIView):
|
||||
|
||||
d = get_object_or_404(Deployment, uid=uid)
|
||||
|
||||
inno = (
|
||||
f"winagent-v{settings.LATEST_AGENT_VER}.exe"
|
||||
if d.arch == "64"
|
||||
else f"winagent-v{settings.LATEST_AGENT_VER}-x86.exe"
|
||||
)
|
||||
client = d.client.name.replace(" ", "").lower()
|
||||
site = d.site.name.replace(" ", "").lower()
|
||||
client = re.sub(r"([^a-zA-Z0-9]+)", "", client)
|
||||
site = re.sub(r"([^a-zA-Z0-9]+)", "", site)
|
||||
ext = ".exe" if d.arch == "64" else "-x86.exe"
|
||||
file_name = f"rmm-{client}-{site}-{d.mon_type}{ext}"
|
||||
|
||||
return generate_installer_exe(
|
||||
file_name=f"rmm-{client}-{site}-{d.mon_type}{ext}",
|
||||
goarch="amd64" if d.arch == "64" else "386",
|
||||
inno=inno,
|
||||
api=f"https://{request.get_host()}",
|
||||
client_id=d.client.pk,
|
||||
site_id=d.site.pk,
|
||||
atype=d.mon_type,
|
||||
return generate_winagent_exe(
|
||||
client=d.client.pk,
|
||||
site=d.site.pk,
|
||||
agent_type=d.mon_type,
|
||||
rdp=d.install_flags["rdp"],
|
||||
ping=d.install_flags["ping"],
|
||||
power=d.install_flags["power"],
|
||||
download_url=settings.DL_64 if d.arch == "64" else settings.DL_32,
|
||||
arch=d.arch,
|
||||
token=d.token_key,
|
||||
api=f"https://{request.get_host()}",
|
||||
file_name=file_name,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from django.contrib import admin
|
||||
|
||||
from .models import CoreSettings
|
||||
from .models import CodeSignToken, CoreSettings, CustomField
|
||||
|
||||
admin.site.register(CoreSettings)
|
||||
admin.site.register(CustomField)
|
||||
admin.site.register(CodeSignToken)
|
||||
|
||||
79
api/tacticalrmm/core/consumers.py
Normal file
79
api/tacticalrmm/core/consumers.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import asyncio
|
||||
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
|
||||
from agents.models import Agent
|
||||
|
||||
|
||||
class DashInfo(AsyncJsonWebsocketConsumer):
|
||||
async def connect(self):
|
||||
|
||||
self.user = self.scope["user"]
|
||||
|
||||
if isinstance(self.user, AnonymousUser):
|
||||
await self.close()
|
||||
|
||||
await self.accept()
|
||||
self.connected = True
|
||||
self.dash_info = asyncio.create_task(self.send_dash_info())
|
||||
|
||||
async def disconnect(self, close_code):
|
||||
|
||||
try:
|
||||
self.dash_info.cancel()
|
||||
except:
|
||||
pass
|
||||
|
||||
self.connected = False
|
||||
await self.close()
|
||||
|
||||
async def receive(self, json_data=None):
|
||||
pass
|
||||
|
||||
@database_sync_to_async
|
||||
def get_dashboard_info(self):
|
||||
server_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="server").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
workstation_offline_count = len(
|
||||
[
|
||||
agent
|
||||
for agent in Agent.objects.filter(monitoring_type="workstation").only(
|
||||
"pk",
|
||||
"last_seen",
|
||||
"overdue_time",
|
||||
"offline_time",
|
||||
)
|
||||
if not agent.status == "online"
|
||||
]
|
||||
)
|
||||
|
||||
ret = {
|
||||
"total_server_offline_count": server_offline_count,
|
||||
"total_workstation_offline_count": workstation_offline_count,
|
||||
"total_server_count": Agent.objects.filter(
|
||||
monitoring_type="server"
|
||||
).count(),
|
||||
"total_workstation_count": Agent.objects.filter(
|
||||
monitoring_type="workstation"
|
||||
).count(),
|
||||
}
|
||||
return ret
|
||||
|
||||
async def send_dash_info(self):
|
||||
while self.connected:
|
||||
c = await self.get_dashboard_info()
|
||||
await self.send_json(c)
|
||||
await asyncio.sleep(30)
|
||||
Binary file not shown.
@@ -1,5 +0,0 @@
|
||||
module github.com/wh1te909/goinstaller
|
||||
|
||||
go 1.16
|
||||
|
||||
require github.com/josephspurrier/goversioninfo v1.2.0 // indirect
|
||||
@@ -1,10 +0,0 @@
|
||||
github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw=
|
||||
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/josephspurrier/goversioninfo v1.2.0 h1:tpLHXAxLHKHg/dCU2AAYx08A4m+v9/CWg6+WUvTF4uQ=
|
||||
github.com/josephspurrier/goversioninfo v1.2.0/go.mod h1:AGP2a+Y/OVJZ+s6XM4IwFUpkETwvn0orYurY8qpw1+0=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
@@ -1,17 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity
|
||||
type="win32"
|
||||
name="TacticalRMMInstaller"
|
||||
version="1.0.0.0"
|
||||
processorArchitecture="*"/>
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel
|
||||
level="requireAdministrator"
|
||||
uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
</assembly>
|
||||
@@ -1,186 +0,0 @@
|
||||
//go:generate goversioninfo -icon=onit.ico -manifest=goversioninfo.exe.manifest -gofile=versioninfo.go
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
Inno string
|
||||
Api string
|
||||
Client string
|
||||
Site string
|
||||
Atype string
|
||||
Power string
|
||||
Rdp string
|
||||
Ping string
|
||||
Token string
|
||||
DownloadUrl string
|
||||
)
|
||||
|
||||
var netTransport = &http.Transport{
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 5 * time.Second,
|
||||
}).Dial,
|
||||
TLSHandshakeTimeout: 5 * time.Second,
|
||||
}
|
||||
|
||||
var netClient = &http.Client{
|
||||
Timeout: time.Second * 900,
|
||||
Transport: netTransport,
|
||||
}
|
||||
|
||||
func downloadAgent(filepath string) (err error) {
|
||||
|
||||
out, err := os.Create(filepath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
resp, err := netClient.Get(DownloadUrl)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("Bad response: %s", resp.Status)
|
||||
}
|
||||
|
||||
_, err = io.Copy(out, resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
|
||||
debugLog := flag.String("log", "", "Verbose output")
|
||||
localMesh := flag.String("local-mesh", "", "Use local mesh agent")
|
||||
silent := flag.Bool("silent", false, "Do not popup any message boxes during installation")
|
||||
cert := flag.String("cert", "", "Path to ca.pem")
|
||||
flag.Parse()
|
||||
|
||||
var debug bool = false
|
||||
|
||||
if strings.TrimSpace(strings.ToLower(*debugLog)) == "debug" {
|
||||
debug = true
|
||||
}
|
||||
|
||||
agentBinary := filepath.Join(os.Getenv("windir"), "Temp", Inno)
|
||||
tacrmm := filepath.Join(os.Getenv("PROGRAMFILES"), "TacticalAgent", "tacticalrmm.exe")
|
||||
|
||||
cmdArgs := []string{
|
||||
"-m", "install", "--api", Api, "--client-id",
|
||||
Client, "--site-id", Site, "--agent-type", Atype,
|
||||
"--auth", Token,
|
||||
}
|
||||
|
||||
if debug {
|
||||
cmdArgs = append(cmdArgs, "-log", "debug")
|
||||
}
|
||||
|
||||
if *silent {
|
||||
cmdArgs = append(cmdArgs, "-silent")
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*localMesh)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "-local-mesh", *localMesh)
|
||||
}
|
||||
|
||||
if len(strings.TrimSpace(*cert)) != 0 {
|
||||
cmdArgs = append(cmdArgs, "-cert", *cert)
|
||||
}
|
||||
|
||||
if Rdp == "1" {
|
||||
cmdArgs = append(cmdArgs, "-rdp")
|
||||
}
|
||||
|
||||
if Ping == "1" {
|
||||
cmdArgs = append(cmdArgs, "-ping")
|
||||
}
|
||||
|
||||
if Power == "1" {
|
||||
cmdArgs = append(cmdArgs, "-power")
|
||||
}
|
||||
|
||||
if debug {
|
||||
fmt.Println("Installer:", agentBinary)
|
||||
fmt.Println("Tactical Agent:", tacrmm)
|
||||
fmt.Println("Download URL:", DownloadUrl)
|
||||
fmt.Println("Install command:", tacrmm, strings.Join(cmdArgs, " "))
|
||||
}
|
||||
|
||||
fmt.Println("Downloading agent...")
|
||||
dl := downloadAgent(agentBinary)
|
||||
if dl != nil {
|
||||
fmt.Println("ERROR: unable to download agent from", DownloadUrl)
|
||||
fmt.Println(dl)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer os.Remove(agentBinary)
|
||||
|
||||
fmt.Println("Extracting files...")
|
||||
winagentCmd := exec.Command(agentBinary, "/VERYSILENT", "/SUPPRESSMSGBOXES")
|
||||
err := winagentCmd.Run()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
time.Sleep(5 * time.Second)
|
||||
|
||||
fmt.Println("Installation starting.")
|
||||
cmd := exec.Command(tacrmm, cmdArgs...)
|
||||
|
||||
cmdReader, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
return
|
||||
}
|
||||
|
||||
cmdErrReader, oerr := cmd.StderrPipe()
|
||||
if oerr != nil {
|
||||
fmt.Fprintln(os.Stderr, oerr)
|
||||
return
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(cmdReader)
|
||||
escanner := bufio.NewScanner(cmdErrReader)
|
||||
go func() {
|
||||
for scanner.Scan() {
|
||||
fmt.Println(scanner.Text())
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
for escanner.Scan() {
|
||||
fmt.Println(escanner.Text())
|
||||
}
|
||||
}()
|
||||
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = cmd.Wait()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 48 KiB |
@@ -1,43 +0,0 @@
|
||||
{
|
||||
"FixedFileInfo": {
|
||||
"FileVersion": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 0,
|
||||
"Build": 0
|
||||
},
|
||||
"ProductVersion": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 0,
|
||||
"Build": 0
|
||||
},
|
||||
"FileFlagsMask": "3f",
|
||||
"FileFlags ": "00",
|
||||
"FileOS": "040004",
|
||||
"FileType": "01",
|
||||
"FileSubType": "00"
|
||||
},
|
||||
"StringFileInfo": {
|
||||
"Comments": "",
|
||||
"CompanyName": "Tactical Techs",
|
||||
"FileDescription": "Tactical RMM Installer",
|
||||
"FileVersion": "v1.0.0.0",
|
||||
"InternalName": "rmm.exe",
|
||||
"LegalCopyright": "Copyright (c) 2020 Tactical Techs",
|
||||
"LegalTrademarks": "",
|
||||
"OriginalFilename": "installer.go",
|
||||
"PrivateBuild": "",
|
||||
"ProductName": "Tactical RMM Installer",
|
||||
"ProductVersion": "v1.0.0.0",
|
||||
"SpecialBuild": ""
|
||||
},
|
||||
"VarFileInfo": {
|
||||
"Translation": {
|
||||
"LangID": "0409",
|
||||
"CharsetID": "04B0"
|
||||
}
|
||||
},
|
||||
"IconPath": "",
|
||||
"ManifestPath": ""
|
||||
}
|
||||
@@ -9,6 +9,9 @@ $rdp = rdpchange
|
||||
$ping = pingchange
|
||||
$auth = '"tokenchange"'
|
||||
$downloadlink = 'downloadchange'
|
||||
$apilink = $downloadlink.split('/')
|
||||
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
|
||||
$serviceName = 'tacticalagent'
|
||||
If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
@@ -45,24 +48,35 @@ If (Get-Service $serviceName -ErrorAction SilentlyContinue) {
|
||||
# pass
|
||||
}
|
||||
|
||||
Try
|
||||
{
|
||||
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
|
||||
Start-Process -FilePath $OutPath\$output -ArgumentList ('/VERYSILENT /SUPPRESSMSGBOXES') -Wait
|
||||
write-host ('Extracting...')
|
||||
Start-Sleep -s 5
|
||||
Start-Process -FilePath "C:\Program Files\TacticalAgent\tacticalrmm.exe" -ArgumentList $installArgs -Wait
|
||||
exit 0
|
||||
}
|
||||
Catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
$FailedItem = $_.Exception.ItemName
|
||||
Write-Error -Message "$ErrorMessage $FailedItem"
|
||||
exit 1
|
||||
}
|
||||
Finally
|
||||
{
|
||||
Remove-Item -Path $OutPath\$output
|
||||
$X = 0
|
||||
do {
|
||||
Write-Output "Waiting for network"
|
||||
Start-Sleep -s 5
|
||||
$X += 1
|
||||
} until(($connectreult = Test-NetConnection $apilink[2] -Port 443 | ? { $_.TcpTestSucceeded }) -or $X -eq 3)
|
||||
|
||||
if ($connectreult.TcpTestSucceeded -eq $true){
|
||||
Try
|
||||
{
|
||||
Invoke-WebRequest -Uri $downloadlink -OutFile $OutPath\$output
|
||||
Start-Process -FilePath $OutPath\$output -ArgumentList ('/VERYSILENT /SUPPRESSMSGBOXES') -Wait
|
||||
write-host ('Extracting...')
|
||||
Start-Sleep -s 5
|
||||
Start-Process -FilePath "C:\Program Files\TacticalAgent\tacticalrmm.exe" -ArgumentList $installArgs -Wait
|
||||
exit 0
|
||||
}
|
||||
Catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
$FailedItem = $_.Exception.ItemName
|
||||
Write-Error -Message "$ErrorMessage $FailedItem"
|
||||
exit 1
|
||||
}
|
||||
Finally
|
||||
{
|
||||
Remove-Item -Path $OutPath\$output
|
||||
}
|
||||
} else {
|
||||
Write-Output "Unable to connect to server"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from agents.models import Agent
|
||||
from scripts.models import Script
|
||||
from logs.models import PendingAction
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -29,5 +25,8 @@ class Command(BaseCommand):
|
||||
self.style.SUCCESS(f"Migrated disks on {agent.hostname}")
|
||||
)
|
||||
|
||||
# remove task pending actions. deprecated 4/20/2021
|
||||
PendingAction.objects.filter(action_type="taskaction").delete()
|
||||
|
||||
# load community scripts into the db
|
||||
Script.load_community_scripts()
|
||||
|
||||
27
api/tacticalrmm/core/migrations/0014_customfield.py
Normal file
27
api/tacticalrmm/core/migrations/0014_customfield.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-17 14:45
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0013_coresettings_alert_template'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CustomField',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('order', models.PositiveIntegerField()),
|
||||
('model', models.CharField(choices=[('client', 'Client'), ('site', 'Site'), ('agent', 'Agent')], max_length=25)),
|
||||
('type', models.CharField(choices=[('text', 'Text'), ('number', 'Number'), ('single', 'Single'), ('multiple', 'Multiple'), ('checkbox', 'Checkbox'), ('datetime', 'DateTime')], default='text', max_length=25)),
|
||||
('options', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None)),
|
||||
('name', models.TextField(blank=True, null=True)),
|
||||
('default_value', models.TextField(blank=True, null=True)),
|
||||
('required', models.BooleanField(blank=True, default=False)),
|
||||
],
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/core/migrations/0015_auto_20210318_2034.py
Normal file
18
api/tacticalrmm/core/migrations/0015_auto_20210318_2034.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-18 20:34
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0014_customfield'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='customfield',
|
||||
name='order',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
]
|
||||
17
api/tacticalrmm/core/migrations/0016_auto_20210319_1536.py
Normal file
17
api/tacticalrmm/core/migrations/0016_auto_20210319_1536.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-19 15:36
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0015_auto_20210318_2034'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='customfield',
|
||||
unique_together={('model', 'name')},
|
||||
),
|
||||
]
|
||||
24
api/tacticalrmm/core/migrations/0017_auto_20210329_1050.py
Normal file
24
api/tacticalrmm/core/migrations/0017_auto_20210329_1050.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 10:50
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0016_auto_20210319_1536'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='checkbox_value',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='default_values_multiple',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(blank=True, max_length=255, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
]
|
||||
23
api/tacticalrmm/core/migrations/0018_auto_20210329_1709.py
Normal file
23
api/tacticalrmm/core/migrations/0018_auto_20210329_1709.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-29 17:09
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0017_auto_20210329_1050'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='customfield',
|
||||
old_name='checkbox_value',
|
||||
new_name='default_value_bool',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='customfield',
|
||||
old_name='default_value',
|
||||
new_name='default_value_string',
|
||||
),
|
||||
]
|
||||
20
api/tacticalrmm/core/migrations/0019_codesigntoken.py
Normal file
20
api/tacticalrmm/core/migrations/0019_codesigntoken.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Generated by Django 3.2 on 2021-04-13 05:41
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0018_auto_20210329_1709'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CodeSignToken',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('token', models.CharField(blank=True, max_length=255, null=True)),
|
||||
],
|
||||
),
|
||||
]
|
||||
21
api/tacticalrmm/core/migrations/0019_globalkvstore.py
Normal file
21
api/tacticalrmm/core/migrations/0019_globalkvstore.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-04 00:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0018_auto_20210329_1709'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='GlobalKVStore',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=25)),
|
||||
('value', models.TextField()),
|
||||
],
|
||||
),
|
||||
]
|
||||
14
api/tacticalrmm/core/migrations/0020_merge_20210415_0132.py
Normal file
14
api/tacticalrmm/core/migrations/0020_merge_20210415_0132.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-15 01:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0019_codesigntoken'),
|
||||
('core', '0019_globalkvstore'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-24 23:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0020_merge_20210415_0132'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='customfield',
|
||||
name='hide_in_ui',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -79,7 +79,7 @@ class CoreSettings(BaseAuditModel):
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
from alerts.tasks import cache_agents_alert_template
|
||||
from automation.tasks import generate_all_agent_checks_task
|
||||
from automation.tasks import generate_agent_checks_task
|
||||
|
||||
if not self.pk and CoreSettings.objects.exists():
|
||||
raise ValidationError("There can only be one CoreSettings instance")
|
||||
@@ -97,14 +97,10 @@ class CoreSettings(BaseAuditModel):
|
||||
super(BaseAuditModel, self).save(*args, **kwargs)
|
||||
|
||||
# check if server polcies have changed and initiate task to reapply policies if so
|
||||
if old_settings and old_settings.server_policy != self.server_policy:
|
||||
generate_all_agent_checks_task.delay(mon_type="server", create_tasks=True)
|
||||
|
||||
# check if workstation polcies have changed and initiate task to reapply policies if so
|
||||
if old_settings and old_settings.workstation_policy != self.workstation_policy:
|
||||
generate_all_agent_checks_task.delay(
|
||||
mon_type="workstation", create_tasks=True
|
||||
)
|
||||
if (old_settings and old_settings.server_policy != self.server_policy) or (
|
||||
old_settings and old_settings.workstation_policy != self.workstation_policy
|
||||
):
|
||||
generate_agent_checks_task.delay(all=True, create_tasks=True)
|
||||
|
||||
if old_settings and old_settings.alert_template != self.alert_template:
|
||||
cache_agents_alert_template.delay()
|
||||
@@ -216,3 +212,120 @@ class CoreSettings(BaseAuditModel):
|
||||
from .serializers import CoreSerializer
|
||||
|
||||
return CoreSerializer(core).data
|
||||
|
||||
|
||||
FIELD_TYPE_CHOICES = (
|
||||
("text", "Text"),
|
||||
("number", "Number"),
|
||||
("single", "Single"),
|
||||
("multiple", "Multiple"),
|
||||
("checkbox", "Checkbox"),
|
||||
("datetime", "DateTime"),
|
||||
)
|
||||
|
||||
MODEL_CHOICES = (("client", "Client"), ("site", "Site"), ("agent", "Agent"))
|
||||
|
||||
|
||||
class CustomField(models.Model):
|
||||
|
||||
order = models.PositiveIntegerField(default=0)
|
||||
model = models.CharField(max_length=25, choices=MODEL_CHOICES)
|
||||
type = models.CharField(max_length=25, choices=FIELD_TYPE_CHOICES, default="text")
|
||||
options = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
name = models.TextField(null=True, blank=True)
|
||||
required = models.BooleanField(blank=True, default=False)
|
||||
default_value_string = models.TextField(null=True, blank=True)
|
||||
default_value_bool = models.BooleanField(default=False)
|
||||
default_values_multiple = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
hide_in_ui = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("model", "name"),)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def default_value(self):
|
||||
if self.type == "multiple":
|
||||
return self.default_values_multiple
|
||||
elif self.type == "checkbox":
|
||||
return self.default_value_bool
|
||||
else:
|
||||
return self.default_value_string
|
||||
|
||||
|
||||
class CodeSignToken(models.Model):
|
||||
token = models.CharField(max_length=255, null=True, blank=True)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.pk and CodeSignToken.objects.exists():
|
||||
raise ValidationError("There can only be one CodeSignToken instance")
|
||||
|
||||
super(CodeSignToken, self).save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return "Code signing token"
|
||||
|
||||
|
||||
class GlobalKVStore(models.Model):
|
||||
name = models.CharField(max_length=25)
|
||||
value = models.TextField()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
RUN_ON_CHOICES = (
|
||||
("client", "Client"),
|
||||
("site", "Site"),
|
||||
("agent", "Agent"),
|
||||
("once", "Once"),
|
||||
)
|
||||
|
||||
SCHEDULE_CHOICES = (("daily", "Daily"), ("weekly", "Weekly"), ("monthly", "Monthly"))
|
||||
|
||||
|
||||
""" class GlobalTask(models.Model):
|
||||
script = models.ForeignKey(
|
||||
"scripts.Script",
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="script",
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
script_args = ArrayField(
|
||||
models.CharField(max_length=255, null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
custom_field = models.OneToOneField(
|
||||
"core.CustomField",
|
||||
related_name="globaltask",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
timeout = models.PositiveIntegerField(default=120)
|
||||
retcode = models.IntegerField(null=True, blank=True)
|
||||
retvalue = models.TextField(null=True, blank=True)
|
||||
stdout = models.TextField(null=True, blank=True)
|
||||
stderr = models.TextField(null=True, blank=True)
|
||||
execution_time = models.CharField(max_length=100, default="0.0000")
|
||||
run_schedule = models.CharField(
|
||||
max_length=25, choices=SCHEDULE_CHOICES, default="once"
|
||||
)
|
||||
run_on = models.CharField(
|
||||
max_length=25, choices=RUN_ON_CHOICES, default="once"
|
||||
) """
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import pytz
|
||||
from rest_framework import serializers
|
||||
|
||||
from .models import CoreSettings
|
||||
from .models import CodeSignToken, CoreSettings, CustomField, GlobalKVStore
|
||||
|
||||
|
||||
class CoreSettingsSerializer(serializers.ModelSerializer):
|
||||
@@ -21,3 +21,21 @@ class CoreSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CoreSettings
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CustomFieldSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CustomField
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CodeSignTokenSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CodeSignToken
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class KeyStoreSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = GlobalKVStore
|
||||
fields = "__all__"
|
||||
|
||||
@@ -1,11 +1,62 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from model_bakery import baker, seq
|
||||
import requests
|
||||
from channels.db import database_sync_to_async
|
||||
from channels.testing import WebsocketCommunicator
|
||||
from model_bakery import baker
|
||||
|
||||
from core.models import CoreSettings
|
||||
from core.tasks import core_maintenance_tasks
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
from .consumers import DashInfo
|
||||
from .models import CoreSettings, CustomField, GlobalKVStore
|
||||
from .serializers import CustomFieldSerializer, KeyStoreSerializer
|
||||
from .tasks import core_maintenance_tasks
|
||||
|
||||
|
||||
class TestCodeSign(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
self.url = "/core/codesign/"
|
||||
|
||||
def test_get_codesign(self):
|
||||
r = self.client.get(self.url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("get", self.url)
|
||||
|
||||
@patch("requests.post")
|
||||
def test_edit_codesign_timeout(self, mock_post):
|
||||
mock_post.side_effect = requests.exceptions.ConnectionError()
|
||||
data = {"token": "token123"}
|
||||
r = self.client.patch(self.url, data, format="json")
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
self.check_not_authenticated("patch", self.url)
|
||||
|
||||
|
||||
class TestConsumers(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.setup_coresettings()
|
||||
self.authenticate()
|
||||
|
||||
@database_sync_to_async
|
||||
def get_token(self):
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
token = Token.objects.create(user=self.john)
|
||||
return token.key
|
||||
|
||||
async def test_dash_info(self):
|
||||
key = self.get_token()
|
||||
communicator = WebsocketCommunicator(
|
||||
DashInfo.as_asgi(), f"/ws/dashinfo/?access_token={key}"
|
||||
)
|
||||
communicator.scope["user"] = self.john
|
||||
connected, _ = await communicator.connect()
|
||||
assert connected
|
||||
await communicator.disconnect()
|
||||
|
||||
|
||||
class TestCoreTasks(TacticalTestCase):
|
||||
def setUp(self):
|
||||
@@ -37,12 +88,12 @@ class TestCoreTasks(TacticalTestCase):
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@patch("automation.tasks.generate_all_agent_checks_task.delay")
|
||||
def test_edit_coresettings(self, generate_all_agent_checks_task):
|
||||
@patch("automation.tasks.generate_agent_checks_task.delay")
|
||||
def test_edit_coresettings(self, generate_agent_checks_task):
|
||||
url = "/core/editsettings/"
|
||||
|
||||
# setup
|
||||
policies = baker.make("Policy", _quantity=2)
|
||||
policies = baker.make("automation.Policy", _quantity=2)
|
||||
# test normal request
|
||||
data = {
|
||||
"smtp_from_email": "newexample@example.com",
|
||||
@@ -55,23 +106,23 @@ class TestCoreTasks(TacticalTestCase):
|
||||
)
|
||||
self.assertEqual(CoreSettings.objects.first().mesh_token, data["mesh_token"])
|
||||
|
||||
generate_all_agent_checks_task.assert_not_called()
|
||||
generate_agent_checks_task.assert_not_called()
|
||||
|
||||
# test adding policy
|
||||
data = {
|
||||
"workstation_policy": policies[0].id,
|
||||
"server_policy": policies[1].id,
|
||||
"workstation_policy": policies[0].id, # type: ignore
|
||||
"server_policy": policies[1].id, # type: ignore
|
||||
}
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(CoreSettings.objects.first().server_policy.id, policies[1].id)
|
||||
self.assertEqual(CoreSettings.objects.first().server_policy.id, policies[1].id) # type: ignore
|
||||
self.assertEqual(
|
||||
CoreSettings.objects.first().workstation_policy.id, policies[0].id
|
||||
CoreSettings.objects.first().workstation_policy.id, policies[0].id # type: ignore
|
||||
)
|
||||
|
||||
self.assertEqual(generate_all_agent_checks_task.call_count, 2)
|
||||
generate_agent_checks_task.assert_called_once()
|
||||
|
||||
generate_all_agent_checks_task.reset_mock()
|
||||
generate_agent_checks_task.reset_mock()
|
||||
|
||||
# test remove policy
|
||||
data = {
|
||||
@@ -81,7 +132,7 @@ class TestCoreTasks(TacticalTestCase):
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(CoreSettings.objects.first().workstation_policy, None)
|
||||
|
||||
self.assertEqual(generate_all_agent_checks_task.call_count, 1)
|
||||
self.assertEqual(generate_agent_checks_task.call_count, 1)
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
@@ -128,3 +179,155 @@ class TestCoreTasks(TacticalTestCase):
|
||||
remove_orphaned_win_tasks.assert_called()
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_custom_fields(self):
|
||||
url = "/core/customfields/"
|
||||
|
||||
# setup
|
||||
custom_fields = baker.make("core.CustomField", _quantity=2)
|
||||
|
||||
r = self.client.get(url)
|
||||
serializer = CustomFieldSerializer(custom_fields, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 2) # type: ignore
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_get_custom_fields_by_model(self):
|
||||
url = "/core/customfields/"
|
||||
|
||||
# setup
|
||||
custom_fields = baker.make("core.CustomField", model="agent", _quantity=5)
|
||||
baker.make("core.CustomField", model="client", _quantity=5)
|
||||
|
||||
# will error if request invalid
|
||||
r = self.client.patch(url, {"invalid": ""})
|
||||
self.assertEqual(r.status_code, 400)
|
||||
|
||||
data = {"model": "agent"}
|
||||
r = self.client.patch(url, data)
|
||||
serializer = CustomFieldSerializer(custom_fields, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 5) # type: ignore
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("patch", url)
|
||||
|
||||
def test_add_custom_field(self):
|
||||
url = "/core/customfields/"
|
||||
|
||||
data = {"model": "client", "type": "text", "name": "Field"}
|
||||
r = self.client.patch(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_get_custom_field(self):
|
||||
# setup
|
||||
custom_field = baker.make("core.CustomField")
|
||||
|
||||
# test not found
|
||||
r = self.client.get("/core/customfields/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/customfields/{custom_field.id}/" # type: ignore
|
||||
r = self.client.get(url)
|
||||
serializer = CustomFieldSerializer(custom_field)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_update_custom_field(self):
|
||||
# setup
|
||||
custom_field = baker.make("core.CustomField")
|
||||
|
||||
# test not found
|
||||
r = self.client.put("/core/customfields/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/customfields/{custom_field.id}/" # type: ignore
|
||||
data = {"type": "single", "options": ["ione", "two", "three"]}
|
||||
r = self.client.put(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
new_field = CustomField.objects.get(pk=custom_field.id) # type: ignore
|
||||
self.assertEqual(new_field.type, data["type"])
|
||||
self.assertEqual(new_field.options, data["options"])
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_custom_field(self):
|
||||
# setup
|
||||
custom_field = baker.make("core.CustomField")
|
||||
|
||||
# test not found
|
||||
r = self.client.delete("/core/customfields/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/customfields/{custom_field.id}/" # type: ignore
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.assertFalse(CustomField.objects.filter(pk=custom_field.id).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
def test_get_keystore(self):
|
||||
url = "/core/keystore/"
|
||||
|
||||
# setup
|
||||
keys = baker.make("core.GlobalKVStore", _quantity=2)
|
||||
|
||||
r = self.client.get(url)
|
||||
serializer = KeyStoreSerializer(keys, many=True)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.data), 2) # type: ignore
|
||||
self.assertEqual(r.data, serializer.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
def test_add_keystore(self):
|
||||
url = "/core/keystore/"
|
||||
|
||||
data = {"name": "test", "value": "text"}
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.check_not_authenticated("post", url)
|
||||
|
||||
def test_update_keystore(self):
|
||||
# setup
|
||||
key = baker.make("core.GlobalKVStore")
|
||||
|
||||
# test not found
|
||||
r = self.client.put("/core/keystore/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/keystore/{key.id}/" # type: ignore
|
||||
data = {"name": "test", "value": "text"}
|
||||
r = self.client.put(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
new_key = GlobalKVStore.objects.get(pk=key.id) # type: ignore
|
||||
self.assertEqual(new_key.name, data["name"])
|
||||
self.assertEqual(new_key.value, data["value"])
|
||||
|
||||
self.check_not_authenticated("put", url)
|
||||
|
||||
def test_delete_keystore(self):
|
||||
# setup
|
||||
key = baker.make("core.GlobalKVStore")
|
||||
|
||||
# test not found
|
||||
r = self.client.delete("/core/keystore/500/")
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
url = f"/core/keystore/{key.id}/" # type: ignore
|
||||
r = self.client.delete(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.assertFalse(GlobalKVStore.objects.filter(pk=key.id).exists()) # type: ignore
|
||||
|
||||
self.check_not_authenticated("delete", url)
|
||||
|
||||
@@ -10,4 +10,9 @@ urlpatterns = [
|
||||
path("emailtest/", views.email_test),
|
||||
path("dashinfo/", views.dashboard_info),
|
||||
path("servermaintenance/", views.server_maintenance),
|
||||
path("customfields/", views.GetAddCustomFields.as_view()),
|
||||
path("customfields/<int:pk>/", views.GetUpdateDeleteCustomFields.as_view()),
|
||||
path("codesign/", views.CodeSign.as_view()),
|
||||
path("keystore/", views.GetAddKeyStore.as_view()),
|
||||
path("keystore/<int:pk>/", views.UpdateDeleteKeyStore.as_view()),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.exceptions import ParseError
|
||||
@@ -10,8 +11,13 @@ from rest_framework.views import APIView
|
||||
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
from .models import CoreSettings
|
||||
from .serializers import CoreSettingsSerializer
|
||||
from .models import CodeSignToken, CoreSettings, CustomField, GlobalKVStore
|
||||
from .serializers import (
|
||||
CodeSignTokenSerializer,
|
||||
CoreSettingsSerializer,
|
||||
CustomFieldSerializer,
|
||||
KeyStoreSerializer,
|
||||
)
|
||||
|
||||
|
||||
class UploadMeshAgent(APIView):
|
||||
@@ -56,14 +62,20 @@ def version(request):
|
||||
|
||||
@api_view()
|
||||
def dashboard_info(request):
|
||||
from tacticalrmm.utils import get_latest_trmm_ver
|
||||
|
||||
return Response(
|
||||
{
|
||||
"trmm_version": settings.TRMM_VERSION,
|
||||
"latest_trmm_ver": get_latest_trmm_ver(),
|
||||
"dark_mode": request.user.dark_mode,
|
||||
"show_community_scripts": request.user.show_community_scripts,
|
||||
"dbl_click_action": request.user.agent_dblclick_action,
|
||||
"default_agent_tbl_tab": request.user.default_agent_tbl_tab,
|
||||
"client_tree_sort": request.user.client_tree_sort,
|
||||
"client_tree_splitter": request.user.client_tree_splitter,
|
||||
"loading_bar_color": request.user.loading_bar_color,
|
||||
"no_code_sign": hasattr(settings, "NOCODESIGN") and settings.NOCODESIGN,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -133,3 +145,120 @@ def server_maintenance(request):
|
||||
return Response(f"{records_count} records were pruned from the database")
|
||||
|
||||
return notify_error("The data is incorrect")
|
||||
|
||||
|
||||
class GetAddCustomFields(APIView):
|
||||
def get(self, request):
|
||||
fields = CustomField.objects.all()
|
||||
return Response(CustomFieldSerializer(fields, many=True).data)
|
||||
|
||||
def patch(self, request):
|
||||
if "model" in request.data.keys():
|
||||
fields = CustomField.objects.filter(model=request.data["model"])
|
||||
return Response(CustomFieldSerializer(fields, many=True).data)
|
||||
else:
|
||||
return notify_error("The request was invalid")
|
||||
|
||||
def post(self, request):
|
||||
serializer = CustomFieldSerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class GetUpdateDeleteCustomFields(APIView):
|
||||
def get(self, request, pk):
|
||||
custom_field = get_object_or_404(CustomField, pk=pk)
|
||||
|
||||
return Response(CustomFieldSerializer(custom_field).data)
|
||||
|
||||
def put(self, request, pk):
|
||||
custom_field = get_object_or_404(CustomField, pk=pk)
|
||||
|
||||
serializer = CustomFieldSerializer(
|
||||
instance=custom_field, data=request.data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(CustomField, pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class CodeSign(APIView):
|
||||
def get(self, request):
|
||||
token = CodeSignToken.objects.first()
|
||||
return Response(CodeSignTokenSerializer(token).data)
|
||||
|
||||
def patch(self, request):
|
||||
import requests
|
||||
|
||||
errors = []
|
||||
for url in settings.EXE_GEN_URLS:
|
||||
try:
|
||||
r = requests.post(
|
||||
f"{url}/api/v1/checktoken",
|
||||
json={"token": request.data["token"]},
|
||||
headers={"Content-type": "application/json"},
|
||||
timeout=15,
|
||||
)
|
||||
except Exception as e:
|
||||
errors.append(str(e))
|
||||
else:
|
||||
errors = []
|
||||
break
|
||||
|
||||
if errors:
|
||||
return notify_error(", ".join(errors))
|
||||
|
||||
if r.status_code == 400 or r.status_code == 401: # type: ignore
|
||||
return notify_error(r.json()["ret"]) # type: ignore
|
||||
elif r.status_code == 200: # type: ignore
|
||||
t = CodeSignToken.objects.first()
|
||||
if t is None:
|
||||
CodeSignToken.objects.create(token=request.data["token"])
|
||||
else:
|
||||
serializer = CodeSignTokenSerializer(instance=t, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
return Response("Token was saved")
|
||||
|
||||
try:
|
||||
ret = r.json()["ret"] # type: ignore
|
||||
except:
|
||||
ret = "Something went wrong"
|
||||
return notify_error(ret)
|
||||
|
||||
|
||||
class GetAddKeyStore(APIView):
|
||||
def get(self, request):
|
||||
keys = GlobalKVStore.objects.all()
|
||||
return Response(KeyStoreSerializer(keys, many=True).data)
|
||||
|
||||
def post(self, request):
|
||||
serializer = KeyStoreSerializer(data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
|
||||
class UpdateDeleteKeyStore(APIView):
|
||||
def put(self, request, pk):
|
||||
key = get_object_or_404(GlobalKVStore, pk=pk)
|
||||
|
||||
serializer = KeyStoreSerializer(instance=key, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
def delete(self, request, pk):
|
||||
get_object_or_404(GlobalKVStore, pk=pk).delete()
|
||||
|
||||
return Response("ok")
|
||||
|
||||
@@ -7,7 +7,7 @@ from tacticalrmm.middleware import get_debug_info, get_username
|
||||
|
||||
ACTION_TYPE_CHOICES = [
|
||||
("schedreboot", "Scheduled Reboot"),
|
||||
("taskaction", "Scheduled Task Action"),
|
||||
("taskaction", "Scheduled Task Action"), # deprecated
|
||||
("agentupdate", "Agent Update"),
|
||||
("chocoinstall", "Chocolatey Software Install"),
|
||||
]
|
||||
@@ -42,13 +42,6 @@ AUDIT_OBJECT_TYPE_CHOICES = [
|
||||
("bulk", "Bulk"),
|
||||
]
|
||||
|
||||
# taskaction details format
|
||||
# {
|
||||
# "action": "taskcreate" | "taskdelete" | "tasktoggle",
|
||||
# "value": "Enable" | "Disable" # only needed for task toggle,
|
||||
# "task_id": 1
|
||||
# }
|
||||
|
||||
STATUS_CHOICES = [
|
||||
("pending", "Pending"),
|
||||
("completed", "Completed"),
|
||||
@@ -250,8 +243,6 @@ class PendingAction(models.Model):
|
||||
if self.action_type == "schedreboot":
|
||||
obj = dt.datetime.strptime(self.details["time"], "%Y-%m-%d %H:%M:%S")
|
||||
return dt.datetime.strftime(obj, "%B %d, %Y at %I:%M %p")
|
||||
elif self.action_type == "taskaction":
|
||||
return "Next agent check-in"
|
||||
elif self.action_type == "agentupdate":
|
||||
return "Next update cycle"
|
||||
elif self.action_type == "chocoinstall":
|
||||
@@ -268,20 +259,6 @@ class PendingAction(models.Model):
|
||||
elif self.action_type == "chocoinstall":
|
||||
return f"{self.details['name']} software install"
|
||||
|
||||
elif self.action_type == "taskaction":
|
||||
if self.details["action"] == "taskdelete":
|
||||
return "Device pending task deletion"
|
||||
elif self.details["action"] == "taskcreate":
|
||||
return "Device pending task creation"
|
||||
elif self.details["action"] == "tasktoggle":
|
||||
# value is bool
|
||||
if self.details["value"]:
|
||||
action = "enable"
|
||||
else:
|
||||
action = "disable"
|
||||
|
||||
return f"Device pending task {action}"
|
||||
|
||||
|
||||
class BaseAuditModel(models.Model):
|
||||
# abstract base class for auditing models
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class NatsapiConfig(AppConfig):
|
||||
name = "natsapi"
|
||||
@@ -1,36 +0,0 @@
|
||||
from django.conf import settings
|
||||
from model_bakery import baker
|
||||
|
||||
from tacticalrmm.test import TacticalTestCase
|
||||
|
||||
|
||||
class TestNatsAPIViews(TacticalTestCase):
|
||||
def setUp(self):
|
||||
self.authenticate()
|
||||
self.setup_coresettings()
|
||||
|
||||
def test_nats_agents(self):
|
||||
baker.make_recipe(
|
||||
"agents.online_agent", version=settings.LATEST_AGENT_VER, _quantity=14
|
||||
)
|
||||
|
||||
baker.make_recipe(
|
||||
"agents.offline_agent", version=settings.LATEST_AGENT_VER, _quantity=6
|
||||
)
|
||||
baker.make_recipe(
|
||||
"agents.overdue_agent", version=settings.LATEST_AGENT_VER, _quantity=5
|
||||
)
|
||||
|
||||
url = "/natsapi/online/agents/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.json()["agent_ids"]), 14)
|
||||
|
||||
url = "/natsapi/offline/agents/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(len(r.json()["agent_ids"]), 11)
|
||||
|
||||
url = "/natsapi/asdjaksdasd/agents/"
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
@@ -1,9 +0,0 @@
|
||||
from django.urls import path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("natsinfo/", views.nats_info),
|
||||
path("<str:stat>/agents/", views.NatsAgents.as_view()),
|
||||
path("logcrash/", views.LogCrash.as_view()),
|
||||
]
|
||||
@@ -1,60 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone as djangotime
|
||||
from loguru import logger
|
||||
from rest_framework.decorators import (
|
||||
api_view,
|
||||
authentication_classes,
|
||||
permission_classes,
|
||||
)
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from agents.models import Agent
|
||||
from tacticalrmm.utils import notify_error
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
@api_view()
|
||||
@permission_classes([])
|
||||
@authentication_classes([])
|
||||
def nats_info(request):
|
||||
return Response({"user": "tacticalrmm", "password": settings.SECRET_KEY})
|
||||
|
||||
|
||||
class NatsAgents(APIView):
|
||||
authentication_classes = [] # type: ignore
|
||||
permission_classes = [] # type: ignore
|
||||
|
||||
def get(self, request, stat: str):
|
||||
if stat not in ["online", "offline"]:
|
||||
return notify_error("invalid request")
|
||||
|
||||
ret: list[str] = []
|
||||
agents = Agent.objects.only(
|
||||
"pk", "agent_id", "version", "last_seen", "overdue_time", "offline_time"
|
||||
)
|
||||
if stat == "online":
|
||||
ret = [i.agent_id for i in agents if i.status == "online"]
|
||||
else:
|
||||
ret = [i.agent_id for i in agents if i.status != "online"]
|
||||
|
||||
return Response({"agent_ids": ret})
|
||||
|
||||
|
||||
class LogCrash(APIView):
|
||||
authentication_classes = [] # type: ignore
|
||||
permission_classes = [] # type: ignore
|
||||
|
||||
def post(self, request):
|
||||
agent = get_object_or_404(Agent, agent_id=request.data["agentid"])
|
||||
agent.last_seen = djangotime.now()
|
||||
agent.save(update_fields=["last_seen"])
|
||||
|
||||
if hasattr(settings, "DEBUGTEST") and settings.DEBUGTEST:
|
||||
logger.info(
|
||||
f"Detected crashed tacticalagent service on {agent.hostname} v{agent.version}, attempting recovery"
|
||||
)
|
||||
|
||||
return Response("ok")
|
||||
@@ -1,17 +1,16 @@
|
||||
amqp==5.0.5
|
||||
asgiref==3.3.1
|
||||
asgiref==3.3.4
|
||||
asyncio-nats-client==0.11.4
|
||||
billiard==3.6.3.0
|
||||
celery==5.0.5
|
||||
certifi==2020.12.5
|
||||
cffi==1.14.5
|
||||
channels==3.0.3
|
||||
chardet==4.0.0
|
||||
cryptography==3.4.6
|
||||
decorator==4.4.2
|
||||
Django==3.1.7
|
||||
cryptography==3.4.7
|
||||
daphne==3.0.2
|
||||
Django==3.2.0
|
||||
django-cors-headers==3.7.0
|
||||
django-rest-knox==4.1.0
|
||||
djangorestframework==3.12.2
|
||||
djangorestframework==3.12.4
|
||||
future==0.18.2
|
||||
kombu==5.0.2
|
||||
loguru==0.5.3
|
||||
@@ -28,8 +27,8 @@ redis==3.5.3
|
||||
requests==2.25.1
|
||||
six==1.15.0
|
||||
sqlparse==0.4.1
|
||||
twilio==6.53.0
|
||||
urllib3==1.26.3
|
||||
twilio==6.56.0
|
||||
urllib3==1.26.4
|
||||
uWSGI==2.0.19.1
|
||||
validators==0.18.2
|
||||
vine==5.0.0
|
||||
|
||||
@@ -1,219 +1,623 @@
|
||||
[
|
||||
{
|
||||
"filename": "ClearFirefoxCache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Clear Firefox Cache",
|
||||
"description": "This script will clean up Mozilla Firefox for all users.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "ClearGoogleChromeCache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Clear Google Chrome Cache",
|
||||
"description": "This script will clean up Google Chrome for all users.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "InstallAdobeReader.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Install Adobe Reader DC",
|
||||
"description": "Installs Adobe Reader DC.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "InstallDuplicati.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Install Duplicati",
|
||||
"description": "This script installs Duplicati 2.0.5.1 as a service.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "Reset-WindowsUpdate.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Reset Windows Update",
|
||||
"description": "This script will reset all of the Windows Updates components to DEFAULT SETTINGS.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "Start-Cleanup.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Cleanup C: drive",
|
||||
"description": "Cleans the C: drive's Window Temperary files, Windows SoftwareDistribution folder, the local users Temperary folder, IIS logs (if applicable) and empties the recycling bin. All deleted files will go into a log transcript in $env:TEMP. By default this script leaves files that are newer than 7 days old however this variable can be edited.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "WindowsDefenderFullScanBackground.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Windows Defender Full Scan",
|
||||
"description": "Runs a Windows Defender Full background scan.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "WindowsDefenderQuickScanBackground.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Windows Defender Quick Scan",
|
||||
"description": "Runs a Quick Scan using Windows Defender in the Background.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "speedtest.py",
|
||||
"submittedBy": "https://github.com/wh1te909",
|
||||
"name": "Speed Test",
|
||||
"description": "Runs a Speed Test",
|
||||
"shell": "python"
|
||||
},
|
||||
{
|
||||
"filename": "Rename-Installed-App.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Rename Tactical RMM Agent",
|
||||
"description": "Updates the DisplayName registry entry for the Tactical RMM windows agent to your desired name. This script takes 1 required argument: the name you wish to set.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "bitlocker_encrypted_drive_c.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Check C Drive for Bitlocker Status",
|
||||
"description": "Runs a check on drive C for Bitlocker status.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "bitlocker_create_status_report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Create Bitlocker Status Report",
|
||||
"description": "Creates a Bitlocker status report.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "bitlocker_retrieve_status_report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Retreive Bitlocker Status Report",
|
||||
"description": "Retreives a Bitlocker status report.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "bios_check.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Check BIOS Information",
|
||||
"description": "Retreives and reports on BIOS make, version, and date .",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "ResetHighPerformancePowerProfiletoDefaults.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Reset High Perf Power Profile",
|
||||
"description": "Resets monitor, disk, standby, and hibernate timers in the default High Performance power profile to their default values. It also re-indexes the AC and DC power profiles into their default order.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "SetHighPerformancePowerProfile.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Set High Perf Power Profile",
|
||||
"description": "Sets the High Performance Power profile to the active power profile. Use this to keep machines from falling asleep.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "Windows10Upgrade.ps1",
|
||||
"submittedBy": "https://github.com/RVL-Solutions and https://github.com/darimm",
|
||||
"name": "Windows 10 Upgrade",
|
||||
"description": "Forces an upgrade to the latest release of Windows 10.",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "DiskStatus.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Check Disks",
|
||||
"description": "Checks local disks for errors reported in event viewer within the last 24 hours",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "DuplicatiStatus.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Check Duplicati",
|
||||
"description": "Checks Duplicati Backup is running properly over the last 24 hours",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "EnableDefender.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Enable Windows Defender",
|
||||
"description": "Enables Windows Defender and sets preferences",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "OpenSSHServerInstall.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Install SSH",
|
||||
"description": "Installs and enabled OpenSSH Server",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "RDP_enable.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Enable RDP",
|
||||
"description": "Enables RDP",
|
||||
"shell": "cmd"
|
||||
},
|
||||
{
|
||||
"filename": "Speedtest.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "PS Speed Test",
|
||||
"description": "Powershell speed test (win 10 or server2016+)",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "SyncTime.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Sync DC Time",
|
||||
"description": "Syncs time with domain controller",
|
||||
"shell": "cmd"
|
||||
},
|
||||
{
|
||||
"filename": "WinDefenderClearLogs.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Clear Defender Logs",
|
||||
"description": "Clears Windows Defender Logs",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "WinDefenderStatus.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Defender Status",
|
||||
"description": "This will check for Malware, Antispyware, that Windows Defender is Healthy, last scan etc within the last 24 hours",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "disable_FastStartup.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Disable Fast Startup",
|
||||
"description": "Disables Faststartup on Windows 10",
|
||||
"shell": "cmd"
|
||||
},
|
||||
{
|
||||
"filename": "updatetacticalexclusion.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "TRMM Defender Exclusions",
|
||||
"description": "Windows Defender Exclusions for Tactical RMM",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "Display_Message_To_User.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Display Message To User",
|
||||
"description": "Displays a popup message to the currently logged on user",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "VerifyAntivirus.ps1",
|
||||
"submittedBy": "https://github.com/beejayzed",
|
||||
"name": "Verify Antivirus Status",
|
||||
"description": "Verify and display status for all installed Antiviruses",
|
||||
"shell": "powershell"
|
||||
},
|
||||
{
|
||||
"filename": "CreateAllUserLogonScript.ps1",
|
||||
"submittedBy": "https://github.com/nr-plaxon",
|
||||
"name": "Create User Logon Script",
|
||||
"description": "Creates a powershell script that runs at logon of any user on the machine in the security context of the user.",
|
||||
"shell": "powershell"
|
||||
}
|
||||
{
|
||||
"guid": "6820cb5e-5a7f-4d9b-8c22-d54677e3cc04",
|
||||
"filename": "Win_Firefox_Clear_Cache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Firefox - Clean Cache",
|
||||
"description": "This script will clean up Mozilla Firefox for all users.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Browsers"
|
||||
},
|
||||
{
|
||||
"guid": "3ff6a386-11d1-4f9d-8cca-1b0563bb6443",
|
||||
"filename": "Win_Google_Chrome_Clear_Cache.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Chrome - Clear Cache for All Users",
|
||||
"description": "This script will clean up Google Chrome for all users.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Browsers"
|
||||
},
|
||||
{
|
||||
"guid": "be1de837-f677-4ac5-aa0c-37a0fc9991fc",
|
||||
"filename": "Win_Install_Adobe_Reader.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Adobe Reader DC - Install",
|
||||
"description": "Installs Adobe Reader DC.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey"
|
||||
},
|
||||
{
|
||||
"guid": "2ee134d5-76aa-4160-b334-a1efbc62079f",
|
||||
"filename": "Win_Install_Duplicati.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Duplicati - Install",
|
||||
"description": "This script installs Duplicati 2.0.5.1 as a service.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "81cc5bcb-01bf-4b0c-89b9-0ac0f3fe0c04",
|
||||
"filename": "Win_Reset_Windows_Update.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Windows Update - Reset",
|
||||
"description": "This script will reset all of the Windows Updates components to DEFAULT SETTINGS.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "8db87ff0-a9b4-4d9d-bc55-377bbcb85b6d",
|
||||
"filename": "Win_Start_Cleanup.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Disk - Cleanup C: drive",
|
||||
"description": "Cleans the C: drive's Window Temperary files, Windows SoftwareDistribution folder, the local users Temperary folder, IIS logs (if applicable) and empties the recycling bin. All deleted files will go into a log transcript in $env:TEMP. By default this script leaves files that are newer than 7 days old however this variable can be edited.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Maintenance"
|
||||
},
|
||||
{
|
||||
"guid": "2f28e8c1-ae0f-4b46-a826-f513974526a3",
|
||||
"filename": "Win_Defender_FullScan_Background.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Defender - Full Scan",
|
||||
"description": "Runs a Windows Defender Full background scan.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "adf81ddb-3b77-415c-a89b-2ccc826b5aa7",
|
||||
"filename": "Win_Defender_QuickScan_Background.ps1",
|
||||
"submittedBy": "https://github.com/Omnicef",
|
||||
"name": "Defender - Quick Scan",
|
||||
"description": "Runs a Quick Scan using Windows Defender in the Background.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "3c46290b-85db-4cd2-93a2-943c8c93b3b1",
|
||||
"filename": "Speedtest.py",
|
||||
"submittedBy": "https://github.com/wh1te909",
|
||||
"name": "Speed Test - Python",
|
||||
"description": "Runs a Speed Test using Python",
|
||||
"shell": "python",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"guid": "9d34f482-1f0c-4b2f-b65f-a9cf3c13ef5f",
|
||||
"filename": "Win_TRMM_Rename_Installed_App.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "TacticalRMM Agent Rename",
|
||||
"description": "Updates the DisplayName registry entry for the Tactical RMM windows agent to your desired name. This script takes 1 required argument: the name you wish to set.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):TacticalRMM Related"
|
||||
},
|
||||
{
|
||||
"guid": "525ae965-1dcf-4c17-92b3-5da3cf6819f5",
|
||||
"filename": "Win_Bitlocker_Encrypted_Drive_c.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Bitlocker - Check C Drive for Status",
|
||||
"description": "Runs a check on drive C for Bitlocker status.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "2ea35fa2-c227-4d17-a40e-4d39f252e27a",
|
||||
"filename": "Win_Bitlocker_Create_Status_Report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Bitlocker - Create Status Report",
|
||||
"description": "Creates a Bitlocker status report.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "9e5769c1-3873-4941-bf70-e851e0afbd6d",
|
||||
"filename": "Win_Bitlocker_Retrieve_Status_Report.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "Bitlocker - Retrieve Status Report",
|
||||
"description": "Retreives a Bitlocker status report.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "72b93487-0266-43f0-97cc-03d4c7ee0b44",
|
||||
"filename": "Win_Bitlocker_Get_Recovery_Keys.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Bitlocker - Get Recovery Keys",
|
||||
"description": "Retreives a Bitlocker Recovery Keys",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "cfa14c28-4dfc-4d4e-95ee-a380652e058d",
|
||||
"filename": "Win_Bios_Check.ps1",
|
||||
"submittedBy": "https://github.com/ThatsNASt",
|
||||
"name": "BIOS - Check Information",
|
||||
"description": "Retreives and reports on BIOS make, version, and date.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"guid": "e1c27982-b955-4766-85b6-d92527a177cf",
|
||||
"filename": "Win_Hardware_Monitor_Get_Info.ps1",
|
||||
"submittedBy": "https://github.com/MaxAnderson95/",
|
||||
"name": "Monitor - Get Info",
|
||||
"description": "Retreives and reports on Monitor info: Manufacturer, Model, Serial",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"guid": "ae231ac4-b01f-4a39-a9d2-3d817af75260",
|
||||
"filename": "Win_Hardware_RAM_Status.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "RAM - Check Information",
|
||||
"description": "Retreives and reports on RAM info: DIMM's, total memory, slots total and used",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"guid": "95a2ee6f-b89b-4551-856e-3081b041caa7",
|
||||
"filename": "Win_Power_Profile_Reset_High_Performance_to_Defaults.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Power Profile - Reset High Perf Power Profile to defaults",
|
||||
"description": "Resets monitor, disk, standby, and hibernate timers in the default High Performance power profile to their default values. It also re-indexes the AC and DC power profiles into their default order.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"guid": "2cbd30b0-84dd-4388-a36d-2e2e980f1a3e",
|
||||
"filename": "Win_Power_Profile_Set_High_Performance.ps1",
|
||||
"submittedBy": "https://github.com/azulskyknight",
|
||||
"name": "Power Profile - Set High Performance",
|
||||
"description": "Sets the High Performance Power profile to the active power profile. Use this to keep machines from falling asleep.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"guid": "553236d3-81bc-49f4-af8a-0cff925a7f6d",
|
||||
"filename": "Win_10_Upgrade.ps1",
|
||||
"submittedBy": "https://github.com/RVL-Solutions and https://github.com/darimm",
|
||||
"name": "Windows 10 Upgrade",
|
||||
"description": "Forces an upgrade to the latest release of Windows 10.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "375323e5-cac6-4f35-a304-bb7cef35902d",
|
||||
"filename": "Win_Disk_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Disk Hardware Health Check (using Event Viewer errors)",
|
||||
"description": "Checks local disks for errors reported in event viewer within the last 24 hours",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Hardware"
|
||||
},
|
||||
{
|
||||
"guid": "7c14beb4-d1c3-41aa-8e70-92a267d6e080",
|
||||
"filename": "Win_Duplicati_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Duplicati - Check Status",
|
||||
"description": "Checks Duplicati Backup is running properly over the last 24 hours",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "da51111c-aff6-4d87-9d76-0608e1f67fe5",
|
||||
"filename": "Win_Defender_Enable.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Defender - Enable",
|
||||
"description": "Enables Windows Defender and sets preferences",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "a223d03a-e22e-40e0-94f2-92dd8c481d14",
|
||||
"filename": "Win_Open_SSH_Server_Install.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "SSH - Install Feature and Enable",
|
||||
"description": "Installs and enabled OpenSSH Server Feature in Win10",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "2435297a-6263-4e90-8688-1847400d0e22",
|
||||
"filename": "Win_RDP_enable.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "RDP - Enable",
|
||||
"description": "Enables RDP",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Windows Features"
|
||||
},
|
||||
{
|
||||
"guid": "24f19ead-fdfe-46b4-9dcb-4cd0e12a3940",
|
||||
"filename": "Win_Speedtest.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Speed Test - Powershell",
|
||||
"description": "Speed Test with Powershell(win 10 or server2016+)",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"guid": "a821975c-60df-4d58-8990-6cf8a55b4ee0",
|
||||
"filename": "Win_Sync_Time.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "ADDC - Sync DC Time",
|
||||
"description": "Syncs time with domain controller",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"guid": "b6b9912f-4274-4162-99cc-9fd47fbcb292",
|
||||
"filename": "Win_ADDC_Sync_Start.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "ADDC - Sync AD",
|
||||
"description": "Trigger AD Sync on domain controller",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"guid": "b720e320-7755-4c89-9992-e1a6c43699ed",
|
||||
"filename": "Win_Defender_Clear_Logs.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Defender - Clear Logs",
|
||||
"description": "Clears Windows Defender Logs",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "d980fda3-a068-47eb-8495-1aab07a24e64",
|
||||
"filename": "Win_Defender_Status_Report.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Defender - Status Report",
|
||||
"description": "This will check for Malware and Antispyware within the last 24 hours and display, otherwise will report as Healthy. Command Parameter: (number) if provided will check that number of days back in the log.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "9956e936-6fdb-4488-a9d8-8b274658037f",
|
||||
"filename": "Win_Disable_Fast_Startup.bat",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Power - Fast Startup Disable",
|
||||
"description": "Disables Faststartup on Windows 10",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"guid": "f628a02b-16c3-4ab5-b788-dec5bc2af1d9",
|
||||
"filename": "Win_Power_Disable_Hibernation.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Power - Hibernate Disable",
|
||||
"description": "Disables Hibernation",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Power"
|
||||
},
|
||||
{
|
||||
"guid": "2472bbaf-1941-4722-8a58-d1dd0f528801",
|
||||
"filename": "Win_TRMM_AV_Update_Exclusion.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "TRMM Defender Exclusions",
|
||||
"description": "Windows Defender Exclusions for Tactical RMM",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "b253dc76-41a0-48ca-9cea-bee4277402c4",
|
||||
"filename": "Win_Display_Message_To_User.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "Message Popup To User",
|
||||
"description": "Displays a popup message to the currently logged on user",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "19224d21-bd39-44bc-b9cf-8f1ba3ca9c11",
|
||||
"filename": "Win_Antivirus_Verify.ps1",
|
||||
"submittedBy": "https://github.com/beejayzed",
|
||||
"name": "Antivirus - Verify Status",
|
||||
"description": "Verify and display status for all installed Antiviruses",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security>Antivirus"
|
||||
},
|
||||
{
|
||||
"guid": "f88c5c52-c6fe-44db-b727-b7912a4279ed",
|
||||
"filename": "Win_Create_All_User_Logon_Script.ps1",
|
||||
"submittedBy": "https://github.com/nr-plaxon",
|
||||
"name": "Template Example - Create User Logon Script",
|
||||
"description": "Creates a powershell script that runs at logon of any user on the machine in the security context of the user.",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "5615aa90-0272-427b-8acf-0ca019612501",
|
||||
"filename": "Win_Chocolatey_Update_Installed.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Update Installed Apps",
|
||||
"description": "Update all apps that were installed using Chocolatey.",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):3rd Party Software>Chocolatey"
|
||||
},
|
||||
{
|
||||
"guid": "fff8024d-d72e-4457-84fa-6c780f69a16f",
|
||||
"filename": "Win_AD_Check_And_Enable_AD_Recycle_Bin.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "ADDC - Check and Enable AD Recycle Bin",
|
||||
"description": "Only run on Domain Controllers, checks for Active Directory Recycle Bin and enables if not already enabled",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Active Directory"
|
||||
},
|
||||
{
|
||||
"guid": "71090fc4-faa6-460b-adb0-95d7863544e1",
|
||||
"filename": "Win_Check_Events_for_Bluescreens.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Event Viewer - Bluescreen Notification",
|
||||
"description": "Event Viewer Monitor - Notify Bluescreen events on your system",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
{
|
||||
"guid": "8373846f-facc-49b9-9891-3a780a394c89",
|
||||
"filename": "Win_Local_User_Created_Monitor.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Event Viewer - New User Notification",
|
||||
"description": "Event Viewer Monitor - Notify when new Local user is created",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
{
|
||||
"guid": "65e5cef1-8338-4180-a0bc-cd54e62de690",
|
||||
"filename": "Win_Task_Scheduler_New_Items_Monitor.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Event Viewer - Task Scheduler New Item Notification",
|
||||
"description": "Event Viewer Monitor - Notify when new Task Scheduler item is created",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Monitoring"
|
||||
},
|
||||
{
|
||||
"guid": "08ca81f2-f044-4dfc-ad47-090b19b19d76",
|
||||
"filename": "Win_User_Logged_in_with_Temp_Profile.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "User Logged in with temp profile check",
|
||||
"description": "Check if users are logged in with a temp profile",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "5d905886-9eb1-4129-8b81-a013f842eb24",
|
||||
"filename": "Win_Rename_Computer.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Rename Computer",
|
||||
"description": "Rename computer. First parameter will be new PC name. 2nd parameter if yes will auto-reboot machine",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": 30
|
||||
},
|
||||
{
|
||||
"guid": "f396dae2-c768-45c5-bd6c-176e56ed3614",
|
||||
"filename": "Win_Power_RestartorShutdown.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Power - Restart or Shutdown PC",
|
||||
"description": "Restart PC. Add parameter: shutdown if you want to shutdown computer",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Updates"
|
||||
},
|
||||
{
|
||||
"guid": "e09895d5-ca13-44a2-a38c-6e77c740f0e8",
|
||||
"filename": "Win_ScreenConnectAIO.ps1",
|
||||
"submittedBy": "https://github.com/bradhawkins85",
|
||||
"name": "ScreenConnect AIO",
|
||||
"description": "Install, Uninstall, Start and Stop ScreenConnect Access Agent",
|
||||
"args": [
|
||||
"-serviceName {{client.ScreenConnectService}}",
|
||||
"-url {{client.ScreenConnectInstaller}}",
|
||||
"-action install"
|
||||
],
|
||||
"default_timeout": "90",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):3rd Party Software"
|
||||
},
|
||||
{
|
||||
"guid": "3abbb62a-3757-492c-8979-b4fc6174845d",
|
||||
"filename": "Win_AutoRun_Disable.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Autorun - Disable",
|
||||
"description": "Disable Autorun System Wide",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "30"
|
||||
},
|
||||
{
|
||||
"guid": "4a11877a-7555-494c-ac74-29d6df3c1989",
|
||||
"filename": "Win_Cortana_Disable.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Cortana - Disable",
|
||||
"description": "Disable Cortana System Wide",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "30"
|
||||
},
|
||||
{
|
||||
"guid": "28ef1387-dd4f-4bab-b042-26250914e370",
|
||||
"filename": "Win_WOL_Enable_Status.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "BROKEN Network WoL - Enable function",
|
||||
"description": "Wake on Lan enable on Dell, HP, Lenovo",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "685d5432-0b84-46d5-98e8-3ec2054150fe",
|
||||
"filename": "Win_WOL_Test_State.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "BROKEN Network WoL - Test State",
|
||||
"description": "Wake on Lan test status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "abe78170-7cf9-435b-9666-c5ef6c11a106",
|
||||
"filename": "Win_Network_IPv6_Disable.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network IPv6 - Disable",
|
||||
"description": "Disable IPv6 on all adapters",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "745bb7cd-b71a-4f2e-b6f2-c579b1828162",
|
||||
"filename": "Win_Network_DHCP_Set.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network - Set Primary NIC to DHCP",
|
||||
"description": "Enable DHCP on primary adapter",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "83aa4d51-63ce-41e7-829f-3c16e6115bbf",
|
||||
"filename": "Win_Network_DNS_Set_to_1.1.1.2.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Network - Set all NICs to use DNS of 1.1.1.2",
|
||||
"description": "Domain computers skipped. Sets all NICs to have primary DNS server of 1.1.1.2, backup of 1.0.0.2 (Cloudflare malware blocking)",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "6ce5682a-49db-4c0b-9417-609cf905ac43",
|
||||
"filename": "Win_Win10_Change_Key_and_Activate.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "Product Key in Win10 Change and Activate",
|
||||
"description": "Insert new product key and Activate. Requires 1 parameter the product key you want to use",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other",
|
||||
"default_timeout": "90"
|
||||
},
|
||||
{
|
||||
"guid": "83f6c6ea-6120-4fd3-bec8-d3abc505dcdf",
|
||||
"filename": "Win_TRMM_Start_Menu_Delete_Shortcut.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "TacticalRMM Delete Start Menu Shortcut for App",
|
||||
"description": "Delete its application shortcut that's installed in the start menu by default",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):TacticalRMM Related",
|
||||
"default_timeout": "10"
|
||||
},
|
||||
{
|
||||
"guid": "60130fca-7636-446e-acd7-cc5d29d609c2",
|
||||
"filename": "Win_Firewall_Check_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Windows Firewall - Check Status",
|
||||
"description": "Windows Firewall - Check state, report status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"guid": "93379675-c01c-433f-87df-a11597c959f0",
|
||||
"filename": "Win_UAC_Check_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Windows UAC - Check Status",
|
||||
"description": "Windows UAC - Report status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Security"
|
||||
},
|
||||
{
|
||||
"guid": "7ea6a11a-05c0-4151-b5c1-cb8af029299f",
|
||||
"filename": "Win_AzureAD_Check_Connection_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Azure AD - Check Status",
|
||||
"description": "Azure AD - Check if joined or not",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Azure>AD"
|
||||
},
|
||||
{
|
||||
"guid": "7d81859a-1ba3-42b0-8664-29844f0dd765",
|
||||
"filename": "Win_Azure_Mars_Cloud_Backup_Status.ps1",
|
||||
"submittedBy": "https://github.com/dinger1986",
|
||||
"name": "Azure - Mars Cloud backup Status",
|
||||
"description": "Azure - Mars Cloud backup Check Status",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Azure>Backup"
|
||||
},
|
||||
{
|
||||
"guid": "e18c64d0-b783-4b52-b44b-9bb7592b439b",
|
||||
"filename": "Win_FileSystem_Enable_Long_Paths.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "File System - Enable Long Paths",
|
||||
"description": "Enables NTFS Long paths greater than 260 characters",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "c6252ca8-5172-42ea-9114-e447f80868f5",
|
||||
"filename": "Win_USB_Disable_Access.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "USB - Disable Access",
|
||||
"description": "USB - Disable Plugged in USB devices",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "3785952f-69fb-4bda-b2fe-5e3e8642738a",
|
||||
"filename": "Win_USB_Enable_Access.bat",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "USB - Enable Access",
|
||||
"description": "USB - Enable Plugged in USB devices",
|
||||
"shell": "cmd",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "c6014da2-b188-4e1b-b96a-e3440ade3a6a",
|
||||
"filename": "Win_RecycleBin_Empty.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "File System - Empty Recycle Bin",
|
||||
"description": "Empty the recycle bin",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Storage"
|
||||
},
|
||||
{
|
||||
"guid": "57997ec7-b293-4fd5-9f90-a25426d0eb90",
|
||||
"filename": "Win_Get_Computer_Users.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Get Computer Users",
|
||||
"description": "Get list of computer users and show which one is enabled",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "77da9c87-5a7a-4ba1-bdde-3eeb3b01d62d",
|
||||
"filename": "Win_Network_Set_To_Private.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Network Category - Set Network To Private",
|
||||
"description": "Sets current network type to Private",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Network"
|
||||
},
|
||||
{
|
||||
"guid": "768f42d5-7b45-45ed-8233-254ae537aaa2",
|
||||
"filename": "Win_TaskScheduler_Add_Task.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "Task Scheduler - Add a task",
|
||||
"description": "Add a task to Task Scheduler, needs editing",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Other"
|
||||
},
|
||||
{
|
||||
"guid": "17040742-184a-4251-8f7b-4a1b0a1f02d1",
|
||||
"filename": "Win_File_Copy_Misc.ps1",
|
||||
"submittedBy": "https://github.com/tremor021",
|
||||
"name": "EXAMPLE File Copying using powershell",
|
||||
"description": "Reference Script: Will need manual tweaking, for copying files/folders from paths/websites to local",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Misc>Reference",
|
||||
"default_timeout": "1"
|
||||
},
|
||||
{
|
||||
"guid": "168037d8-78e6-4a6a-a9a9-8ec2c1dbe949",
|
||||
"filename": "Win_MSI_Install.ps1",
|
||||
"submittedBy": "https://github.com/silversword411",
|
||||
"name": "EXAMPLE Function for running MSI install via powershell",
|
||||
"description": "Reference Script: Will need manual tweaking, for running MSI from powershell",
|
||||
"shell": "powershell",
|
||||
"category": "TRMM (Win):Misc>Reference",
|
||||
"default_timeout": "1"
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-31 01:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0005_auto_20201207_1606'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='script',
|
||||
name='default_timeout',
|
||||
field=models.PositiveIntegerField(default=90),
|
||||
),
|
||||
]
|
||||
19
api/tacticalrmm/scripts/migrations/0007_script_args.py
Normal file
19
api/tacticalrmm/scripts/migrations/0007_script_args.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-01 14:52
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0006_script_default_timeout'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='script',
|
||||
name='args',
|
||||
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(blank=True, null=True), blank=True, default=list, null=True, size=None),
|
||||
),
|
||||
]
|
||||
18
api/tacticalrmm/scripts/migrations/0008_script_guid.py
Normal file
18
api/tacticalrmm/scripts/migrations/0008_script_guid.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-15 02:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('scripts', '0007_script_args'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='script',
|
||||
name='guid',
|
||||
field=models.CharField(blank=True, max_length=64, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,11 @@
|
||||
import base64
|
||||
import re
|
||||
from typing import Any, List, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from loguru import logger
|
||||
|
||||
from logs.models import BaseAuditModel
|
||||
|
||||
@@ -15,8 +20,11 @@ SCRIPT_TYPES = [
|
||||
("builtin", "Built In"),
|
||||
]
|
||||
|
||||
logger.configure(**settings.LOG_CONFIG)
|
||||
|
||||
|
||||
class Script(BaseAuditModel):
|
||||
guid = name = models.CharField(max_length=64, null=True, blank=True)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.TextField(null=True, blank=True)
|
||||
filename = models.CharField(max_length=255) # deprecated
|
||||
@@ -26,9 +34,16 @@ class Script(BaseAuditModel):
|
||||
script_type = models.CharField(
|
||||
max_length=100, choices=SCRIPT_TYPES, default="userdefined"
|
||||
)
|
||||
args = ArrayField(
|
||||
models.TextField(null=True, blank=True),
|
||||
null=True,
|
||||
blank=True,
|
||||
default=list,
|
||||
)
|
||||
favorite = models.BooleanField(default=False)
|
||||
category = models.CharField(max_length=100, null=True, blank=True)
|
||||
code_base64 = models.TextField(null=True, blank=True)
|
||||
default_timeout = models.PositiveIntegerField(default=90)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
@@ -65,15 +80,28 @@ class Script(BaseAuditModel):
|
||||
|
||||
for script in info:
|
||||
if os.path.exists(os.path.join(scripts_dir, script["filename"])):
|
||||
s = cls.objects.filter(script_type="builtin").filter(
|
||||
name=script["name"]
|
||||
s = cls.objects.filter(script_type="builtin", guid=script["guid"])
|
||||
|
||||
category = (
|
||||
script["category"] if "category" in script.keys() else "Community"
|
||||
)
|
||||
|
||||
default_timeout = (
|
||||
int(script["default_timeout"])
|
||||
if "default_timeout" in script.keys()
|
||||
else 90
|
||||
)
|
||||
|
||||
args = script["args"] if "args" in script.keys() else []
|
||||
|
||||
if s.exists():
|
||||
i = s.first()
|
||||
i.name = script["name"]
|
||||
i.description = script["description"]
|
||||
i.category = "Community"
|
||||
i.category = category
|
||||
i.shell = script["shell"]
|
||||
i.default_timeout = default_timeout
|
||||
i.args = args
|
||||
|
||||
with open(os.path.join(scripts_dir, script["filename"]), "rb") as f:
|
||||
script_bytes = (
|
||||
@@ -86,10 +114,52 @@ class Script(BaseAuditModel):
|
||||
"name",
|
||||
"description",
|
||||
"category",
|
||||
"default_timeout",
|
||||
"code_base64",
|
||||
"shell",
|
||||
"args",
|
||||
]
|
||||
)
|
||||
|
||||
# check if script was added without a guid
|
||||
elif cls.objects.filter(
|
||||
script_type="builtin", name=script["name"]
|
||||
).exists():
|
||||
s = cls.objects.get(script_type="builtin", name=script["name"])
|
||||
|
||||
if not s.guid:
|
||||
print(f"Updating GUID for: {script['name']}")
|
||||
s.guid = script["guid"]
|
||||
s.name = script["name"]
|
||||
s.description = script["description"]
|
||||
s.category = category
|
||||
s.shell = script["shell"]
|
||||
s.default_timeout = default_timeout
|
||||
s.args = args
|
||||
|
||||
with open(
|
||||
os.path.join(scripts_dir, script["filename"]), "rb"
|
||||
) as f:
|
||||
script_bytes = (
|
||||
f.read().decode("utf-8").encode("ascii", "ignore")
|
||||
)
|
||||
s.code_base64 = base64.b64encode(script_bytes).decode(
|
||||
"ascii"
|
||||
)
|
||||
|
||||
s.save(
|
||||
update_fields=[
|
||||
"guid",
|
||||
"name",
|
||||
"description",
|
||||
"category",
|
||||
"default_timeout",
|
||||
"code_base64",
|
||||
"shell",
|
||||
"args",
|
||||
]
|
||||
)
|
||||
|
||||
else:
|
||||
print(f"Adding new community script: {script['name']}")
|
||||
|
||||
@@ -101,17 +171,140 @@ class Script(BaseAuditModel):
|
||||
|
||||
cls(
|
||||
code_base64=code_base64,
|
||||
guid=script["guid"],
|
||||
name=script["name"],
|
||||
description=script["description"],
|
||||
filename=script["filename"],
|
||||
shell=script["shell"],
|
||||
script_type="builtin",
|
||||
category="Community",
|
||||
category=category,
|
||||
default_timeout=default_timeout,
|
||||
args=args,
|
||||
).save()
|
||||
|
||||
# delete community scripts that had their name changed
|
||||
cls.objects.filter(script_type="builtin", guid=None).delete()
|
||||
|
||||
@staticmethod
|
||||
def serialize(script):
|
||||
# serializes the script and returns json
|
||||
from .serializers import ScriptSerializer
|
||||
|
||||
return ScriptSerializer(script).data
|
||||
|
||||
@classmethod
|
||||
def parse_script_args(
|
||||
cls, agent, shell: str, args: List[str] = list()
|
||||
) -> Union[List[str], None]:
|
||||
from core.models import CustomField, GlobalKVStore
|
||||
|
||||
if not args:
|
||||
return []
|
||||
|
||||
temp_args = list()
|
||||
|
||||
# pattern to match for injection
|
||||
pattern = re.compile(".*\\{\\{(.*)\\}\\}.*")
|
||||
|
||||
for arg in args:
|
||||
match = pattern.match(arg)
|
||||
if match:
|
||||
# only get the match between the () in regex
|
||||
string = match.group(1)
|
||||
|
||||
# split by period if exists. First should be model and second should be property
|
||||
temp = string.split(".")
|
||||
|
||||
# check for model and property
|
||||
if len(temp) != 2:
|
||||
# ignore arg since it is invalid
|
||||
continue
|
||||
|
||||
# value is in the global keystore and replace value
|
||||
if temp[0] == "global":
|
||||
if GlobalKVStore.objects.filter(name=temp[1]).exists():
|
||||
value = GlobalKVStore.objects.get(name=temp[1]).value
|
||||
temp_args.append(
|
||||
re.sub("\\{\\{.*\\}\\}", "'" + value + "'", arg)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
# ignore since value doesn't exist
|
||||
continue
|
||||
|
||||
if temp[0] == "client":
|
||||
model = "client"
|
||||
obj = agent.client
|
||||
elif temp[0] == "site":
|
||||
model = "site"
|
||||
obj = agent.site
|
||||
elif temp[0] == "agent":
|
||||
model = "agent"
|
||||
obj = agent
|
||||
else:
|
||||
# ignore arg since it is invalid
|
||||
continue
|
||||
|
||||
if hasattr(obj, temp[1]):
|
||||
value = getattr(obj, temp[1])
|
||||
|
||||
elif CustomField.objects.filter(model=model, name=temp[1]).exists():
|
||||
|
||||
field = CustomField.objects.get(model=model, name=temp[1])
|
||||
model_fields = getattr(field, f"{model}_fields")
|
||||
value = None
|
||||
if model_fields.filter(**{model: obj}).exists():
|
||||
value = model_fields.get(**{model: obj}).value
|
||||
|
||||
if not value and field.default_value:
|
||||
value = field.default_value
|
||||
|
||||
# check if value exists and if not use defa
|
||||
if value and field.type == "multiple":
|
||||
value = format_shell_array(shell, value)
|
||||
elif value and field.type == "checkbox":
|
||||
value = format_shell_bool(shell, value)
|
||||
|
||||
if not value:
|
||||
continue
|
||||
|
||||
else:
|
||||
# ignore arg since property is invalid
|
||||
continue
|
||||
|
||||
# replace the value in the arg and push to array
|
||||
# log any unhashable type errors
|
||||
try:
|
||||
temp_args.append(re.sub("\\{\\{.*\\}\\}", "'" + value + "'", arg)) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
|
||||
else:
|
||||
temp_args.append(arg)
|
||||
|
||||
return temp_args
|
||||
|
||||
|
||||
def format_shell_array(shell: str, value: Any) -> str:
|
||||
if shell == "cmd":
|
||||
return "array args are not supported with batch"
|
||||
elif shell == "powershell":
|
||||
temp_string = ""
|
||||
for item in value:
|
||||
temp_string += item + ","
|
||||
return temp_string.strip(",")
|
||||
else: # python
|
||||
temp_string = ""
|
||||
for item in value:
|
||||
temp_string += item + ","
|
||||
return temp_string.strip(",")
|
||||
|
||||
|
||||
def format_shell_bool(shell: str, value: Any) -> str:
|
||||
if shell == "cmd":
|
||||
return "1" if value else "0"
|
||||
elif shell == "powershell":
|
||||
return "$True" if value else "$False"
|
||||
else: # python
|
||||
return "True" if value else "False"
|
||||
|
||||
@@ -12,8 +12,10 @@ class ScriptTableSerializer(ModelSerializer):
|
||||
"description",
|
||||
"script_type",
|
||||
"shell",
|
||||
"args",
|
||||
"category",
|
||||
"favorite",
|
||||
"default_timeout",
|
||||
]
|
||||
|
||||
|
||||
@@ -25,9 +27,11 @@ class ScriptSerializer(ModelSerializer):
|
||||
"name",
|
||||
"description",
|
||||
"shell",
|
||||
"args",
|
||||
"category",
|
||||
"favorite",
|
||||
"code_base64",
|
||||
"default_timeout",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -7,8 +7,6 @@ from tacticalrmm.celery import app
|
||||
|
||||
@app.task
|
||||
def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
agents = Agent.objects.filter(pk__in=agentpks)
|
||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||
nats_data = {
|
||||
"func": "rawcmd",
|
||||
"timeout": timeout,
|
||||
@@ -17,15 +15,13 @@ def handle_bulk_command_task(agentpks, cmd, shell, timeout) -> None:
|
||||
"shell": shell,
|
||||
},
|
||||
}
|
||||
for agent in agents_nats:
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
|
||||
@app.task
|
||||
def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None:
|
||||
script = Script.objects.get(pk=scriptpk)
|
||||
agents = Agent.objects.filter(pk__in=agentpks)
|
||||
agents_nats = [agent for agent in agents if agent.has_nats]
|
||||
nats_data = {
|
||||
"func": "runscript",
|
||||
"timeout": timeout,
|
||||
@@ -35,5 +31,5 @@ def handle_bulk_script_task(scriptpk, agentpks, args, timeout) -> None:
|
||||
"shell": script.shell,
|
||||
},
|
||||
}
|
||||
for agent in agents_nats:
|
||||
for agent in Agent.objects.filter(pk__in=agentpks):
|
||||
asyncio.run(agent.nats_cmd(nats_data, wait=False))
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
from email.policy import default
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
@@ -23,7 +24,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
serializer = ScriptTableSerializer(scripts, many=True)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -36,10 +37,13 @@ class TestScriptViews(TacticalTestCase):
|
||||
"shell": "powershell",
|
||||
"category": "New",
|
||||
"code": "Some Test Code\nnew Line",
|
||||
"default_timeout": 99,
|
||||
"args": ["hello", "world", r"{{agent.public_ip}}"],
|
||||
"favorite": False,
|
||||
}
|
||||
|
||||
# test without file upload
|
||||
resp = self.client.post(url, data)
|
||||
resp = self.client.post(url, data, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(Script.objects.filter(name="Name").exists())
|
||||
self.assertEqual(Script.objects.get(name="Name").code, data["code"])
|
||||
@@ -55,6 +59,10 @@ class TestScriptViews(TacticalTestCase):
|
||||
"shell": "cmd",
|
||||
"category": "New",
|
||||
"filename": file,
|
||||
"default_timeout": 4455,
|
||||
"args": json.dumps(
|
||||
["hello", "world", r"{{agent.public_ip}}"]
|
||||
), # simulate javascript's JSON.stringify() for formData
|
||||
}
|
||||
|
||||
# test with file upload
|
||||
@@ -79,6 +87,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
"description": "Description Change",
|
||||
"shell": script.shell,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"default_timeout": 13344556,
|
||||
}
|
||||
|
||||
# test edit a userdefined script
|
||||
@@ -104,6 +113,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
"shell": script.shell,
|
||||
"favorite": True,
|
||||
"code": "Test Code\nAnother Line",
|
||||
"default_timeout": 54345,
|
||||
}
|
||||
# test marking a builtin script as favorite
|
||||
resp = self.client.put(
|
||||
@@ -120,11 +130,11 @@ class TestScriptViews(TacticalTestCase):
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
|
||||
script = baker.make("scripts.Script")
|
||||
url = f"/scripts/{script.pk}/script/"
|
||||
url = f"/scripts/{script.pk}/script/" # type: ignore
|
||||
serializer = ScriptSerializer(script)
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(serializer.data, resp.data)
|
||||
self.assertEqual(serializer.data, resp.data) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -160,27 +170,27 @@ class TestScriptViews(TacticalTestCase):
|
||||
script = baker.make(
|
||||
"scripts.Script", code_base64="VGVzdA==", shell="powershell"
|
||||
)
|
||||
url = f"/scripts/{script.pk}/download/"
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.ps1", "code": "Test"})
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.ps1", "code": "Test"}) # type: ignore
|
||||
|
||||
# test batch file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="cmd")
|
||||
url = f"/scripts/{script.pk}/download/"
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.bat", "code": "Test"})
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.bat", "code": "Test"}) # type: ignore
|
||||
|
||||
# test python file
|
||||
script = baker.make("scripts.Script", code_base64="VGVzdA==", shell="python")
|
||||
url = f"/scripts/{script.pk}/download/"
|
||||
url = f"/scripts/{script.pk}/download/" # type: ignore
|
||||
|
||||
resp = self.client.get(url, format="json")
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.py", "code": "Test"})
|
||||
self.assertEqual(resp.data, {"filename": f"{script.name}.py", "code": "Test"}) # type: ignore
|
||||
|
||||
self.check_not_authenticated("get", url)
|
||||
|
||||
@@ -197,6 +207,7 @@ class TestScriptViews(TacticalTestCase):
|
||||
) as f:
|
||||
info = json.load(f)
|
||||
|
||||
guids = []
|
||||
for script in info:
|
||||
fn: str = script["filename"]
|
||||
self.assertTrue(os.path.exists(os.path.join(scripts_dir, fn)))
|
||||
@@ -213,6 +224,19 @@ class TestScriptViews(TacticalTestCase):
|
||||
elif fn.endswith(".py"):
|
||||
self.assertEqual(script["shell"], "python")
|
||||
|
||||
if "args" in script.keys():
|
||||
self.assertIsInstance(script["args"], list)
|
||||
|
||||
# allows strings as long as they can be type casted to int
|
||||
if "default_timeout" in script.keys():
|
||||
self.assertIsInstance(int(script["default_timeout"]), int)
|
||||
|
||||
self.assertIn("guid", script.keys())
|
||||
guids.append(script["guid"])
|
||||
|
||||
# check guids are unique
|
||||
self.assertEqual(len(guids), len(set(guids)))
|
||||
|
||||
def test_load_community_scripts(self):
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||
@@ -221,9 +245,46 @@ class TestScriptViews(TacticalTestCase):
|
||||
|
||||
Script.load_community_scripts()
|
||||
|
||||
community_scripts = Script.objects.filter(script_type="builtin").count()
|
||||
self.assertEqual(len(info), community_scripts)
|
||||
community_scripts_count = Script.objects.filter(script_type="builtin").count()
|
||||
if len(info) != community_scripts_count:
|
||||
raise Exception(
|
||||
f"There are {len(info)} scripts in json file but only {community_scripts_count} in database"
|
||||
)
|
||||
|
||||
# test updating already added community scripts
|
||||
Script.load_community_scripts()
|
||||
self.assertEqual(len(info), community_scripts)
|
||||
community_scripts_count2 = Script.objects.filter(script_type="builtin").count()
|
||||
self.assertEqual(len(info), community_scripts_count2)
|
||||
|
||||
def test_community_script_has_jsonfile_entry(self):
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||
) as f:
|
||||
info = json.load(f)
|
||||
|
||||
filenames = [i["filename"] for i in info]
|
||||
|
||||
# normal
|
||||
if not settings.DOCKER_BUILD:
|
||||
scripts_dir = os.path.join(Path(settings.BASE_DIR).parents[1], "scripts")
|
||||
# docker
|
||||
else:
|
||||
scripts_dir = settings.SCRIPTS_DIR
|
||||
|
||||
with os.scandir(scripts_dir) as it:
|
||||
for f in it:
|
||||
if not f.name.startswith(".") and f.is_file():
|
||||
if f.name not in filenames:
|
||||
raise Exception(
|
||||
f"{f.name} is missing an entry in community_scripts.json"
|
||||
)
|
||||
|
||||
def test_script_filenames_do_not_contain_spaces(self):
|
||||
with open(
|
||||
os.path.join(settings.BASE_DIR, "scripts/community_scripts.json")
|
||||
) as f:
|
||||
info = json.load(f)
|
||||
for script in info:
|
||||
fn: str = script["filename"]
|
||||
if " " in fn:
|
||||
raise Exception(f"{fn} must not contain spaces in filename")
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import base64
|
||||
import json
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -24,25 +25,33 @@ class GetAddScripts(APIView):
|
||||
return Response(ScriptTableSerializer(scripts, many=True).data)
|
||||
|
||||
def post(self, request, format=None):
|
||||
|
||||
data = {
|
||||
"name": request.data["name"],
|
||||
"category": request.data["category"],
|
||||
"description": request.data["description"],
|
||||
"shell": request.data["shell"],
|
||||
"default_timeout": request.data["default_timeout"],
|
||||
"script_type": "userdefined", # force all uploads to be userdefined. built in scripts cannot be edited by user
|
||||
}
|
||||
|
||||
if "favorite" in request.data:
|
||||
# code editor upload
|
||||
if "args" in request.data.keys() and isinstance(request.data["args"], list):
|
||||
data["args"] = request.data["args"]
|
||||
|
||||
# file upload, have to json load it cuz it's formData
|
||||
if "args" in request.data.keys() and "file_upload" in request.data.keys():
|
||||
data["args"] = json.loads(request.data["args"])
|
||||
|
||||
if "favorite" in request.data.keys():
|
||||
data["favorite"] = request.data["favorite"]
|
||||
|
||||
if "filename" in request.data:
|
||||
if "filename" in request.data.keys():
|
||||
message_bytes = request.data["filename"].read()
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode(
|
||||
"ascii", "ignore"
|
||||
)
|
||||
|
||||
elif "code" in request.data:
|
||||
elif "code" in request.data.keys():
|
||||
message_bytes = request.data["code"].encode("ascii", "ignore")
|
||||
data["code_base64"] = base64.b64encode(message_bytes).decode("ascii")
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user